diff --git a/block-storage/src/main/java/org/lucares/pdb/blockstorage/BSFile.java b/block-storage/src/main/java/org/lucares/pdb/blockstorage/BSFile.java index 1f4f9d3..a284040 100644 --- a/block-storage/src/main/java/org/lucares/pdb/blockstorage/BSFile.java +++ b/block-storage/src/main/java/org/lucares/pdb/blockstorage/BSFile.java @@ -37,212 +37,213 @@ import org.slf4j.LoggerFactory; */ public class BSFile implements AutoCloseable { - private static final Logger LOGGER = LoggerFactory.getLogger(BSFile.class); + private static final Logger LOGGER = LoggerFactory.getLogger(BSFile.class); - public static final int BLOCK_SIZE = 512; + public static final int BLOCK_SIZE = 512; - /* - * The last disk block of this sequence. This is the block new values will be - * appended to. - */ - private BSFileDiskBlock buffer; + /* + * The last disk block of this sequence. This is the block new values will be + * appended to. + */ + private BSFileDiskBlock buffer; - private int offsetInBuffer = 0; + private int offsetInBuffer = 0; - private boolean dirty = false; + private boolean dirty = false; - private final long rootBlockOffset; + private final long rootBlockOffset; - private final DiskStorage diskStorage; + private final DiskStorage diskStorage; - private final BSFileDiskBlock rootDiskBlock; + private final BSFileDiskBlock rootDiskBlock; - private final BSFileCustomizer customizer; + private final BSFileCustomizer customizer; - BSFile(final long rootBlockOffset, final DiskStorage diskStorage, final BSFileCustomizer customizer) { + BSFile(final long rootBlockOffset, final DiskStorage diskStorage, final BSFileCustomizer customizer) { - this(new BSFileDiskBlock(diskStorage.getDiskBlock(rootBlockOffset, BLOCK_SIZE)), diskStorage, customizer); - } + this(new BSFileDiskBlock(diskStorage.getDiskBlock(rootBlockOffset, BLOCK_SIZE)), diskStorage, customizer); + } - BSFile(final BSFileDiskBlock rootDiskBlock, final DiskStorage diskStorage, final BSFileCustomizer customizer) { + BSFile(final BSFileDiskBlock rootDiskBlock, final DiskStorage diskStorage, final BSFileCustomizer customizer) { - this.rootDiskBlock = rootDiskBlock; - this.customizer = customizer; - this.rootBlockOffset = rootDiskBlock.getBlockOffset(); - this.diskStorage = diskStorage; + this.rootDiskBlock = rootDiskBlock; + this.customizer = customizer; + this.rootBlockOffset = rootDiskBlock.getBlockOffset(); + this.diskStorage = diskStorage; - final long lastBlockNumber = rootDiskBlock.getLastBlockPointer(); - if (lastBlockNumber == rootBlockOffset || lastBlockNumber == 0) { - buffer = rootDiskBlock; - } else { - buffer = new BSFileDiskBlock(diskStorage.getDiskBlock(lastBlockNumber, BLOCK_SIZE)); - } - offsetInBuffer = determineWriteOffsetInExistingBuffer(buffer); - customizer.init(buffer); - LOGGER.trace("create bsFile={} lastBlockNumber={}", rootBlockOffset, lastBlockNumber); - } + final long lastBlockNumber = rootDiskBlock.getLastBlockPointer(); + if (lastBlockNumber == rootBlockOffset || lastBlockNumber == 0) { + buffer = rootDiskBlock; + } else { + buffer = new BSFileDiskBlock(diskStorage.getDiskBlock(lastBlockNumber, BLOCK_SIZE)); + } + offsetInBuffer = determineWriteOffsetInExistingBuffer(buffer); + customizer.init(buffer); + LOGGER.trace("create bsFile={} lastBlockNumber={}", rootBlockOffset, lastBlockNumber); + } - private int determineWriteOffsetInExistingBuffer(final BSFileDiskBlock buffer) { + private int determineWriteOffsetInExistingBuffer(final BSFileDiskBlock buffer) { - final byte[] buf = buffer.getBuffer(); + final byte[] buf = buffer.getBuffer(); - int result = 0; - while (result < buf.length && buf[result] != 0) { - result++; - } + int result = 0; + while (result < buf.length && buf[result] != 0) { + result++; + } - return result; - } + return result; + } - public static BSFile existingFile(final long blockNumber, final DiskStorage diskStorage, - final BSFileCustomizer customizer) { - return new BSFile(blockNumber, diskStorage, customizer); - } + public static BSFile existingFile(final long blockNumber, final DiskStorage diskStorage, + final BSFileCustomizer customizer) { + return new BSFile(blockNumber, diskStorage, customizer); + } - public static BSFile newFile(final DiskStorage diskStorage, final BSFileCustomizer customizer) { - final long rootBlockOffset = diskStorage.allocateBlock(BLOCK_SIZE); - LOGGER.trace("create new bsFile={}", rootBlockOffset); - return new BSFile(rootBlockOffset, diskStorage, customizer); - } + public static BSFile newFile(final DiskStorage diskStorage, final BSFileCustomizer customizer) { + final long rootBlockOffset = diskStorage.allocateBlock(BLOCK_SIZE); + LOGGER.trace("create new bsFile={}", rootBlockOffset); + return new BSFile(rootBlockOffset, diskStorage, customizer); + } - public void append(final long value1, final long value2) { - final long val1 = customizer.preProcessWriteValue1(value1); - final long val2 = customizer.preProcessWriteValue2(value2); + public void append(final long value1, final long value2) { + final long val1 = customizer.preProcessWriteValue1(value1); + final long val2 = customizer.preProcessWriteValue2(value2); - final int bytesWritten = VariableByteEncoder.encodeInto(val1, val2, buffer.getBuffer(), offsetInBuffer); + final int bytesWritten = VariableByteEncoder.encodeInto(val1, val2, buffer.getBuffer(), offsetInBuffer); - if (bytesWritten == 0) { - flushFullBufferAndCreateNew(); - customizer.newBlock(); + if (bytesWritten == 0) { + flushFullBufferAndCreateNew(); + customizer.newBlock(); - append(value1, value2); - } - offsetInBuffer += bytesWritten; - dirty = true; - } + append(value1, value2); + } + offsetInBuffer += bytesWritten; + dirty = true; + } - public void append(final long value) { - int bytesWritten = VariableByteEncoder.encodeInto(value, buffer.getBuffer(), offsetInBuffer); + public void append(final long value) { + int bytesWritten = VariableByteEncoder.encodeInto(value, buffer.getBuffer(), offsetInBuffer); - if (bytesWritten == 0) { - flushFullBufferAndCreateNew(); - bytesWritten = VariableByteEncoder.encodeInto(value, buffer.getBuffer(), offsetInBuffer); - assert bytesWritten > 0 : "after a flush the buffer is emtpy, so it should be possible to write a few bytes"; - } - offsetInBuffer += bytesWritten; - dirty = true; - } + if (bytesWritten == 0) { + flushFullBufferAndCreateNew(); + bytesWritten = VariableByteEncoder.encodeInto(value, buffer.getBuffer(), offsetInBuffer); + assert bytesWritten > 0 : "after a flush the buffer is emtpy, so it should be possible to write a few bytes"; + } + offsetInBuffer += bytesWritten; + dirty = true; + } - private void flushFullBufferAndCreateNew() { + private void flushFullBufferAndCreateNew() { - final long newBlockOffset = diskStorage.allocateBlock(BLOCK_SIZE); + final long newBlockOffset = diskStorage.allocateBlock(BLOCK_SIZE); - if (buffer == rootDiskBlock) { - // root block and current block are the same, so we need - // to update only one - buffer.setLastBlockOffset(newBlockOffset); - buffer.setNextBlockOffset(newBlockOffset); - buffer.writeAsync(); - } else { - rootDiskBlock.writeLastBlockOffset(newBlockOffset); + if (buffer == rootDiskBlock) { + // root block and current block are the same, so we need + // to update only one + buffer.setLastBlockOffset(newBlockOffset); + buffer.setNextBlockOffset(newBlockOffset); + buffer.writeAsync(); + } else { + rootDiskBlock.writeLastBlockOffset(newBlockOffset); - buffer.setNextBlockOffset(newBlockOffset); - buffer.writeAsync(); - } + buffer.setNextBlockOffset(newBlockOffset); + buffer.writeAsync(); + } - // set the new buffer - buffer = new BSFileDiskBlock(diskStorage.getDiskBlock(newBlockOffset, BLOCK_SIZE)); - offsetInBuffer = 0; - dirty = false; - LOGGER.trace("flushFullBufferAndCreateNew bsFile={} newBlock={}", rootBlockOffset, newBlockOffset); - } + // set the new buffer + buffer = new BSFileDiskBlock(diskStorage.getDiskBlock(newBlockOffset, BLOCK_SIZE)); + offsetInBuffer = 0; + dirty = false; + LOGGER.trace("flushFullBufferAndCreateNew bsFile={} newBlock={}", rootBlockOffset, newBlockOffset); + } - public void flush() { + public void flush() { - LOGGER.trace("flush bsFile={} dirty={} file={}", rootBlockOffset, dirty, diskStorage.getRelativeDatabaseFileForLogging()); - if (dirty) { - buffer.writeAsync(); - } - } + LOGGER.trace("flush bsFile={} dirty={} file={}", rootBlockOffset, dirty, + diskStorage.getRelativeDatabaseFileForLogging()); + if (dirty) { + buffer.writeAsync(); + } + } - public Optional getLastValue() { + public Optional getLastValue() { - final byte[] buf = buffer.getBuffer(); - final LongList bufferedLongs = VariableByteEncoder.decode(buf); + final byte[] buf = buffer.getBuffer(); + final LongList bufferedLongs = VariableByteEncoder.decode(buf); - final Optional result; - if (bufferedLongs.isEmpty()) { - result = Optional.empty(); - } else { - final long lastValue = bufferedLongs.get(bufferedLongs.size() - 1); - result = Optional.of(lastValue); - } - return result; - } + final Optional result; + if (bufferedLongs.isEmpty()) { + result = Optional.empty(); + } else { + final long lastValue = bufferedLongs.get(bufferedLongs.size() - 1); + result = Optional.of(lastValue); + } + return result; + } - public Stream streamOfLongLists() { - final Iterator iterator = new LongListIterator(rootBlockOffset, diskStorage); - final Stream stream = StreamSupport - .stream(Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false); + public Stream streamOfLongLists() { + final Iterator iterator = new LongListIterator(rootBlockOffset, diskStorage); + final Stream stream = StreamSupport + .stream(Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false); - final Optional> mapper = customizer.getStreamMapper(); - if (mapper.isPresent()) { - return stream.map(mapper.get()); - } - return stream; - } + final Optional> mapper = customizer.getStreamMapper(); + if (mapper.isPresent()) { + return stream.map(mapper.get()); + } + return stream; + } - private static class LongListIterator implements Iterator { + private static class LongListIterator implements Iterator { - private LongList next = null; - private long nextBlockOffset; + private LongList next = null; + private long nextBlockOffset; - private final DiskStorage diskStorage; + private final DiskStorage diskStorage; - public LongListIterator(final long nextBlockNumber, final DiskStorage diskStorage) { - this.nextBlockOffset = nextBlockNumber; - this.diskStorage = diskStorage; - } + public LongListIterator(final long nextBlockNumber, final DiskStorage diskStorage) { + this.nextBlockOffset = nextBlockNumber; + this.diskStorage = diskStorage; + } - @Override - public boolean hasNext() { - return nextBlockOffset != BSFileDiskBlock.NO_NEXT_POINTER; - } + @Override + public boolean hasNext() { + return nextBlockOffset != BSFileDiskBlock.NO_NEXT_POINTER; + } - @Override - public LongList next() { - if (nextBlockOffset == BSFileDiskBlock.NO_NEXT_POINTER) { - throw new NoSuchElementException(); - } + @Override + public LongList next() { + if (nextBlockOffset == BSFileDiskBlock.NO_NEXT_POINTER) { + throw new NoSuchElementException(); + } - final BSFileDiskBlock diskBlock = getDiskBlock(nextBlockOffset); - nextBlockOffset = diskBlock.getNextBlockNumber(); + final BSFileDiskBlock diskBlock = getDiskBlock(nextBlockOffset); + nextBlockOffset = diskBlock.getNextBlockNumber(); - final byte[] buf = diskBlock.getBuffer(); - next = VariableByteEncoder.decode(buf); - return next; - } + final byte[] buf = diskBlock.getBuffer(); + next = VariableByteEncoder.decode(buf); + return next; + } - private BSFileDiskBlock getDiskBlock(final long blockOffset) { - final DiskBlock diskBlock = diskStorage.getDiskBlock(blockOffset, BLOCK_SIZE); - return new BSFileDiskBlock(diskBlock); - } - } + private BSFileDiskBlock getDiskBlock(final long blockOffset) { + final DiskBlock diskBlock = diskStorage.getDiskBlock(blockOffset, BLOCK_SIZE); + return new BSFileDiskBlock(diskBlock); + } + } - public LongList asLongList() { + public LongList asLongList() { - final LongList result = new LongList(); - streamOfLongLists().forEachOrdered(result::addAll); - return result; - } + final LongList result = new LongList(); + streamOfLongLists().forEachOrdered(result::addAll); + return result; + } - public long getRootBlockOffset() { + public long getRootBlockOffset() { - return rootBlockOffset; - } + return rootBlockOffset; + } - @Override - public void close() { - flush(); - } + @Override + public void close() { + flush(); + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/blockstorage/BSFileCustomizer.java b/block-storage/src/main/java/org/lucares/pdb/blockstorage/BSFileCustomizer.java index 328a8b5..1e26cce 100644 --- a/block-storage/src/main/java/org/lucares/pdb/blockstorage/BSFileCustomizer.java +++ b/block-storage/src/main/java/org/lucares/pdb/blockstorage/BSFileCustomizer.java @@ -6,13 +6,13 @@ import java.util.function.Function; import org.lucares.collections.LongList; public interface BSFileCustomizer { - void init(BSFileDiskBlock lastDiskBlockOfStream); + void init(BSFileDiskBlock lastDiskBlockOfStream); - Optional> getStreamMapper(); + Optional> getStreamMapper(); - void newBlock(); + void newBlock(); - long preProcessWriteValue1(long value); + long preProcessWriteValue1(long value); - long preProcessWriteValue2(long value); + long preProcessWriteValue2(long value); } diff --git a/block-storage/src/main/java/org/lucares/pdb/blockstorage/BSFileDiskBlock.java b/block-storage/src/main/java/org/lucares/pdb/blockstorage/BSFileDiskBlock.java index f55f118..fad2087 100644 --- a/block-storage/src/main/java/org/lucares/pdb/blockstorage/BSFileDiskBlock.java +++ b/block-storage/src/main/java/org/lucares/pdb/blockstorage/BSFileDiskBlock.java @@ -8,90 +8,90 @@ import org.lucares.utils.byteencoder.VariableByteEncoder; class BSFileDiskBlock { - protected static final int NEXT_POINTER_OFFSET = 0; - public static final long NO_NEXT_POINTER = 0; - private static final int LAST_BLOCK_POINTER_POSITION = 8; - public static final long NO_LAST_BLOCK = 0; - private static final int INT_SEQUENCE_OFFSET = 8 // next block pointer - + 8; // last block pointer; + protected static final int NEXT_POINTER_OFFSET = 0; + public static final long NO_NEXT_POINTER = 0; + private static final int LAST_BLOCK_POINTER_POSITION = 8; + public static final long NO_LAST_BLOCK = 0; + private static final int INT_SEQUENCE_OFFSET = 8 // next block pointer + + 8; // last block pointer; - private final DiskBlock diskBlock; - private long nextBlockOffset = 0; - private long lastBlockOffset = 0; + private final DiskBlock diskBlock; + private long nextBlockOffset = 0; + private long lastBlockOffset = 0; - private byte[] buffer = null; + private byte[] buffer = null; - public BSFileDiskBlock(final DiskBlock diskBlock) { - this.diskBlock = diskBlock; - } + public BSFileDiskBlock(final DiskBlock diskBlock) { + this.diskBlock = diskBlock; + } - public byte[] getBuffer() { + public byte[] getBuffer() { - if (buffer == null) { - final ByteBuffer byteBuffer = diskBlock.getByteBuffer(); - this.buffer = new byte[byteBuffer.capacity() - INT_SEQUENCE_OFFSET]; - byteBuffer.position(INT_SEQUENCE_OFFSET); - byteBuffer.get(buffer); - } + if (buffer == null) { + final ByteBuffer byteBuffer = diskBlock.getByteBuffer(); + this.buffer = new byte[byteBuffer.capacity() - INT_SEQUENCE_OFFSET]; + byteBuffer.position(INT_SEQUENCE_OFFSET); + byteBuffer.get(buffer); + } - return buffer; - } + return buffer; + } - public long getBlockOffset() { - return diskBlock.getBlockOffset(); - } + public long getBlockOffset() { + return diskBlock.getBlockOffset(); + } - public void setNextBlockOffset(final long nextBlockOffset) { - this.nextBlockOffset = nextBlockOffset; - } + public void setNextBlockOffset(final long nextBlockOffset) { + this.nextBlockOffset = nextBlockOffset; + } - public long getLastBlockPointer() { + public long getLastBlockPointer() { - if (lastBlockOffset <= 0) { - lastBlockOffset = diskBlock.getByteBuffer().getLong(LAST_BLOCK_POINTER_POSITION); - } + if (lastBlockOffset <= 0) { + lastBlockOffset = diskBlock.getByteBuffer().getLong(LAST_BLOCK_POINTER_POSITION); + } - return lastBlockOffset; - } + return lastBlockOffset; + } - public long getNextBlockNumber() { - if (nextBlockOffset <= 0) { - nextBlockOffset = diskBlock.getByteBuffer().getLong(NEXT_POINTER_OFFSET); - } - return nextBlockOffset; - } + public long getNextBlockNumber() { + if (nextBlockOffset <= 0) { + nextBlockOffset = diskBlock.getByteBuffer().getLong(NEXT_POINTER_OFFSET); + } + return nextBlockOffset; + } - public void setLastBlockOffset(final long lastBlockOffset) { - this.lastBlockOffset = lastBlockOffset; - } + public void setLastBlockOffset(final long lastBlockOffset) { + this.lastBlockOffset = lastBlockOffset; + } - public void writeLastBlockOffset(final long lastBlockOffset) { - this.lastBlockOffset = lastBlockOffset; - diskBlock.getByteBuffer().putLong(LAST_BLOCK_POINTER_POSITION, lastBlockOffset); - } + public void writeLastBlockOffset(final long lastBlockOffset) { + this.lastBlockOffset = lastBlockOffset; + diskBlock.getByteBuffer().putLong(LAST_BLOCK_POINTER_POSITION, lastBlockOffset); + } - private void writeBufferToByteBuffer() { - diskBlock.getByteBuffer().position(INT_SEQUENCE_OFFSET); - diskBlock.getByteBuffer().put(buffer); - } + private void writeBufferToByteBuffer() { + diskBlock.getByteBuffer().position(INT_SEQUENCE_OFFSET); + diskBlock.getByteBuffer().put(buffer); + } - private void writeBlockHeader() { - diskBlock.getByteBuffer().putLong(NEXT_POINTER_OFFSET, nextBlockOffset); - diskBlock.getByteBuffer().putLong(LAST_BLOCK_POINTER_POSITION, lastBlockOffset); - } + private void writeBlockHeader() { + diskBlock.getByteBuffer().putLong(NEXT_POINTER_OFFSET, nextBlockOffset); + diskBlock.getByteBuffer().putLong(LAST_BLOCK_POINTER_POSITION, lastBlockOffset); + } - public void writeAsync() { - writeBlockHeader(); - writeBufferToByteBuffer(); - } + public void writeAsync() { + writeBlockHeader(); + writeBufferToByteBuffer(); + } - public void force() { - diskBlock.force(); - } + public void force() { + diskBlock.force(); + } - @Override - public String toString() { - final LongList bufferDecoded = VariableByteEncoder.decode(buffer); - return "BSFileDiskBlock[bufferDecoded=" + bufferDecoded + "]"; - } + @Override + public String toString() { + final LongList bufferDecoded = VariableByteEncoder.decode(buffer); + return "BSFileDiskBlock[bufferDecoded=" + bufferDecoded + "]"; + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/blockstorage/LongStreamFile.java b/block-storage/src/main/java/org/lucares/pdb/blockstorage/LongStreamFile.java index f1ce7c7..44cd86b 100644 --- a/block-storage/src/main/java/org/lucares/pdb/blockstorage/LongStreamFile.java +++ b/block-storage/src/main/java/org/lucares/pdb/blockstorage/LongStreamFile.java @@ -8,41 +8,41 @@ import org.lucares.pdb.diskstorage.DiskStorage; public class LongStreamFile implements AutoCloseable { - private final BSFile bsFile; + private final BSFile bsFile; - LongStreamFile(final BSFile bsFile) { - this.bsFile = bsFile; - } + LongStreamFile(final BSFile bsFile) { + this.bsFile = bsFile; + } - public static LongStreamFile existingFile(final long blockNumber, final DiskStorage diskStorage) - throws IOException { - final BSFile bsFile = BSFile.existingFile(blockNumber, diskStorage, NullCustomizer.INSTANCE); - return new LongStreamFile(bsFile); - } + public static LongStreamFile existingFile(final long blockNumber, final DiskStorage diskStorage) + throws IOException { + final BSFile bsFile = BSFile.existingFile(blockNumber, diskStorage, NullCustomizer.INSTANCE); + return new LongStreamFile(bsFile); + } - public static LongStreamFile newFile(final DiskStorage diskStorage) throws IOException { - final BSFile bsFile = BSFile.newFile(diskStorage, NullCustomizer.INSTANCE); - return new LongStreamFile(bsFile); - } + public static LongStreamFile newFile(final DiskStorage diskStorage) throws IOException { + final BSFile bsFile = BSFile.newFile(diskStorage, NullCustomizer.INSTANCE); + return new LongStreamFile(bsFile); + } - public void append(final long value) throws IOException { + public void append(final long value) throws IOException { - bsFile.append(value); - } + bsFile.append(value); + } - public Stream streamOfLongLists() { - return bsFile.streamOfLongLists(); - } + public Stream streamOfLongLists() { + return bsFile.streamOfLongLists(); + } - public LongList asLongList() { + public LongList asLongList() { - final LongList result = new LongList(); - streamOfLongLists().forEachOrdered(result::addAll); - return result; - } + final LongList result = new LongList(); + streamOfLongLists().forEachOrdered(result::addAll); + return result; + } - @Override - public void close() { - bsFile.close(); - } + @Override + public void close() { + bsFile.close(); + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/blockstorage/NullCustomizer.java b/block-storage/src/main/java/org/lucares/pdb/blockstorage/NullCustomizer.java index 8b1be1b..fe18d5a 100644 --- a/block-storage/src/main/java/org/lucares/pdb/blockstorage/NullCustomizer.java +++ b/block-storage/src/main/java/org/lucares/pdb/blockstorage/NullCustomizer.java @@ -7,31 +7,31 @@ import org.lucares.collections.LongList; public class NullCustomizer implements BSFileCustomizer { - public static final NullCustomizer INSTANCE = new NullCustomizer(); + public static final NullCustomizer INSTANCE = new NullCustomizer(); - @Override - public void init(final BSFileDiskBlock lastDiskBlockOfStream) { - // nothing to do - this is a NullObject - } + @Override + public void init(final BSFileDiskBlock lastDiskBlockOfStream) { + // nothing to do - this is a NullObject + } - @Override - public Optional> getStreamMapper() { - // no mapper to return - this is a NullObject - return Optional.empty(); - } + @Override + public Optional> getStreamMapper() { + // no mapper to return - this is a NullObject + return Optional.empty(); + } - @Override - public void newBlock() { - // nothing to do - this is a NullObject - } + @Override + public void newBlock() { + // nothing to do - this is a NullObject + } - @Override - public long preProcessWriteValue1(final long value) { - return value; - } + @Override + public long preProcessWriteValue1(final long value) { + return value; + } - @Override - public long preProcessWriteValue2(final long value) { - return value; - } + @Override + public long preProcessWriteValue2(final long value) { + return value; + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/blockstorage/TimeSeriesCustomizer.java b/block-storage/src/main/java/org/lucares/pdb/blockstorage/TimeSeriesCustomizer.java index 6e4b3eb..83532e8 100644 --- a/block-storage/src/main/java/org/lucares/pdb/blockstorage/TimeSeriesCustomizer.java +++ b/block-storage/src/main/java/org/lucares/pdb/blockstorage/TimeSeriesCustomizer.java @@ -8,71 +8,71 @@ import org.lucares.utils.byteencoder.VariableByteEncoder; public class TimeSeriesCustomizer implements BSFileCustomizer { - private static class TimeStampDeltaDecoder implements Function { + private static class TimeStampDeltaDecoder implements Function { - /** - * Computes the inverse of the delta encoding in {@link BSFile#appendTimeValue} - */ - @Override - public LongList apply(final LongList t) { - long lastTimeValue = 0; - for (int i = 0; i < t.size(); i += 2) { - lastTimeValue += t.get(i); - t.set(i, lastTimeValue); - } + /** + * Computes the inverse of the delta encoding in {@link BSFile#appendTimeValue} + */ + @Override + public LongList apply(final LongList t) { + long lastTimeValue = 0; + for (int i = 0; i < t.size(); i += 2) { + lastTimeValue += t.get(i); + t.set(i, lastTimeValue); + } - return t; - } - } + return t; + } + } - private static final TimeStampDeltaDecoder TIME_DELTA_DECODER = new TimeStampDeltaDecoder(); + private static final TimeStampDeltaDecoder TIME_DELTA_DECODER = new TimeStampDeltaDecoder(); - private long lastEpochMilli; + private long lastEpochMilli; - @Override - public void init(final BSFileDiskBlock lastDiskBlockOfStream) { - lastEpochMilli = determineLastEpochMilli(lastDiskBlockOfStream); - } + @Override + public void init(final BSFileDiskBlock lastDiskBlockOfStream) { + lastEpochMilli = determineLastEpochMilli(lastDiskBlockOfStream); + } - private long determineLastEpochMilli(final BSFileDiskBlock diskBlock) { + private long determineLastEpochMilli(final BSFileDiskBlock diskBlock) { - // get the time/value delta encoded longs - final byte[] buf = diskBlock.getBuffer(); - LongList longList = VariableByteEncoder.decode(buf); - final long result; - if (longList.size() < 2) { - // only new files have empty disk blocks - // and empty disk blocks have time offset 0 - result = 0; - } else { - // decode the deltas to get the correct timestamps - longList = TIME_DELTA_DECODER.apply(longList); + // get the time/value delta encoded longs + final byte[] buf = diskBlock.getBuffer(); + LongList longList = VariableByteEncoder.decode(buf); + final long result; + if (longList.size() < 2) { + // only new files have empty disk blocks + // and empty disk blocks have time offset 0 + result = 0; + } else { + // decode the deltas to get the correct timestamps + longList = TIME_DELTA_DECODER.apply(longList); - // return the last timestamp - result = longList.get(longList.size() - 2); - } - return result; - } + // return the last timestamp + result = longList.get(longList.size() - 2); + } + return result; + } - @Override - public Optional> getStreamMapper() { - return Optional.of(TIME_DELTA_DECODER); - } + @Override + public Optional> getStreamMapper() { + return Optional.of(TIME_DELTA_DECODER); + } - @Override - public void newBlock() { - lastEpochMilli = 0; - } + @Override + public void newBlock() { + lastEpochMilli = 0; + } - @Override - public long preProcessWriteValue1(final long epochMilli) { - final long epochMilliDelta = epochMilli - lastEpochMilli; - lastEpochMilli = epochMilli; - return epochMilliDelta; - } + @Override + public long preProcessWriteValue1(final long epochMilli) { + final long epochMilliDelta = epochMilli - lastEpochMilli; + lastEpochMilli = epochMilli; + return epochMilliDelta; + } - @Override - public long preProcessWriteValue2(final long value) { - return value; - } + @Override + public long preProcessWriteValue2(final long value) { + return value; + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/blockstorage/TimeSeriesFile.java b/block-storage/src/main/java/org/lucares/pdb/blockstorage/TimeSeriesFile.java index b24b682..cfec9da 100644 --- a/block-storage/src/main/java/org/lucares/pdb/blockstorage/TimeSeriesFile.java +++ b/block-storage/src/main/java/org/lucares/pdb/blockstorage/TimeSeriesFile.java @@ -8,52 +8,52 @@ import org.lucares.pdb.diskstorage.DiskStorage; public class TimeSeriesFile implements AutoCloseable { - private final BSFile bsFile; + private final BSFile bsFile; - private TimeSeriesFile(final BSFile bsFile) { - this.bsFile = bsFile; - } + private TimeSeriesFile(final BSFile bsFile) { + this.bsFile = bsFile; + } - public static TimeSeriesFile existingFile(final long blockNumber, final DiskStorage diskStorage) { - final BSFile bsFile = BSFile.existingFile(blockNumber, diskStorage, new TimeSeriesCustomizer()); - return new TimeSeriesFile(bsFile); - } + public static TimeSeriesFile existingFile(final long blockNumber, final DiskStorage diskStorage) { + final BSFile bsFile = BSFile.existingFile(blockNumber, diskStorage, new TimeSeriesCustomizer()); + return new TimeSeriesFile(bsFile); + } - public static TimeSeriesFile newFile(final DiskStorage diskStorage) { - final BSFile bsFile = BSFile.newFile(diskStorage, new TimeSeriesCustomizer()); - return new TimeSeriesFile(bsFile); - } + public static TimeSeriesFile newFile(final DiskStorage diskStorage) { + final BSFile bsFile = BSFile.newFile(diskStorage, new TimeSeriesCustomizer()); + return new TimeSeriesFile(bsFile); + } - public void appendTimeValue(final long epochMilli, final long value) { + public void appendTimeValue(final long epochMilli, final long value) { - bsFile.append(epochMilli, value); - } + bsFile.append(epochMilli, value); + } - public Stream streamOfLongLists() { - return bsFile.streamOfLongLists(); - } + public Stream streamOfLongLists() { + return bsFile.streamOfLongLists(); + } - public LongList asTimeValueLongList() { + public LongList asTimeValueLongList() { - final LongList result = new LongList(); - streamOfLongLists().forEachOrdered(result::addAll); - return result; - } + final LongList result = new LongList(); + streamOfLongLists().forEachOrdered(result::addAll); + return result; + } - @Override - public void close() { - bsFile.close(); - } + @Override + public void close() { + bsFile.close(); + } - public long getRootBlockOffset() { - return bsFile.getRootBlockOffset(); - } + public long getRootBlockOffset() { + return bsFile.getRootBlockOffset(); + } - public Optional getLastValue() { - return bsFile.getLastValue(); - } + public Optional getLastValue() { + return bsFile.getLastValue(); + } - public void flush() { - bsFile.flush(); - } + public void flush() { + bsFile.flush(); + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/diskstorage/DiskBlock.java b/block-storage/src/main/java/org/lucares/pdb/diskstorage/DiskBlock.java index e80a22b..c977368 100644 --- a/block-storage/src/main/java/org/lucares/pdb/diskstorage/DiskBlock.java +++ b/block-storage/src/main/java/org/lucares/pdb/diskstorage/DiskBlock.java @@ -5,52 +5,52 @@ import java.nio.MappedByteBuffer; public class DiskBlock { - private byte[] buffer = null; - private final long blockOffset; + private byte[] buffer = null; + private final long blockOffset; - private final ByteBuffer byteBuffer; + private final ByteBuffer byteBuffer; - public DiskBlock(final long blockOffset, final ByteBuffer byteBuffer) { - this.blockOffset = blockOffset; - this.byteBuffer = byteBuffer; - } + public DiskBlock(final long blockOffset, final ByteBuffer byteBuffer) { + this.blockOffset = blockOffset; + this.byteBuffer = byteBuffer; + } - public byte[] getBuffer() { + public byte[] getBuffer() { - if (buffer == null) { - this.buffer = new byte[byteBuffer.capacity()]; - byteBuffer.get(buffer); - } + if (buffer == null) { + this.buffer = new byte[byteBuffer.capacity()]; + byteBuffer.get(buffer); + } - return buffer; - } + return buffer; + } - public ByteBuffer getByteBuffer() { - return byteBuffer; - } + public ByteBuffer getByteBuffer() { + return byteBuffer; + } - public long getBlockOffset() { - return blockOffset; - } + public long getBlockOffset() { + return blockOffset; + } - private void writeBufferToByteBuffer() { - byteBuffer.position(0); - byteBuffer.put(buffer); - } + private void writeBufferToByteBuffer() { + byteBuffer.position(0); + byteBuffer.put(buffer); + } - public void writeAsync() { - writeBufferToByteBuffer(); - } + public void writeAsync() { + writeBufferToByteBuffer(); + } - public void force() { - // some tests use HeapByteBuffer and don't support force - if (byteBuffer instanceof MappedByteBuffer) { - ((MappedByteBuffer) byteBuffer).force(); - } - } + public void force() { + // some tests use HeapByteBuffer and don't support force + if (byteBuffer instanceof MappedByteBuffer) { + ((MappedByteBuffer) byteBuffer).force(); + } + } - @Override - public String toString() { - return "DiskBlock[" + blockOffset + "]"; - } + @Override + public String toString() { + return "DiskBlock[" + blockOffset + "]"; + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/diskstorage/DiskStorage.java b/block-storage/src/main/java/org/lucares/pdb/diskstorage/DiskStorage.java index 29d4eb7..2b6fee9 100644 --- a/block-storage/src/main/java/org/lucares/pdb/diskstorage/DiskStorage.java +++ b/block-storage/src/main/java/org/lucares/pdb/diskstorage/DiskStorage.java @@ -14,273 +14,273 @@ import org.slf4j.LoggerFactory; public class DiskStorage implements AutoCloseable { - private static final Logger LOGGER = LoggerFactory.getLogger(DiskStorage.class); + private static final Logger LOGGER = LoggerFactory.getLogger(DiskStorage.class); - private static final long FREE_LIST_ROOT_OFFSET = 0; - private static final long NO_POINTER = 0; - private static final int FREE_LIST_NEXT_POINTER = 0; - private static final int FREE_LIST_PREV_POINTER = 8; - private static final int FREE_LIST_SIZE = 16; - private static final int FREE_LIST_NODE_SIZE = 32; + private static final long FREE_LIST_ROOT_OFFSET = 0; + private static final long NO_POINTER = 0; + private static final int FREE_LIST_NEXT_POINTER = 0; + private static final int FREE_LIST_PREV_POINTER = 8; + private static final int FREE_LIST_SIZE = 16; + private static final int FREE_LIST_NODE_SIZE = 32; - private final FileChannel fileChannel; + private final FileChannel fileChannel; - private Path relativeDatabaseFileForLogging; + private Path relativeDatabaseFileForLogging; - - public DiskStorage(final Path databaseFile, Path storageBasePath) { - this.relativeDatabaseFileForLogging = storageBasePath != null ? storageBasePath.relativize(databaseFile): databaseFile; - try { - Files.createDirectories(databaseFile.getParent()); + public DiskStorage(final Path databaseFile, Path storageBasePath) { + this.relativeDatabaseFileForLogging = storageBasePath != null ? storageBasePath.relativize(databaseFile) + : databaseFile; + try { + Files.createDirectories(databaseFile.getParent()); - fileChannel = FileChannel.open(databaseFile, StandardOpenOption.READ, StandardOpenOption.WRITE, - StandardOpenOption.CREATE); + fileChannel = FileChannel.open(databaseFile, StandardOpenOption.READ, StandardOpenOption.WRITE, + StandardOpenOption.CREATE); - initIfNew(); - } catch (final IOException e) { - throw new DiskStorageException(e); - } - } + initIfNew(); + } catch (final IOException e) { + throw new DiskStorageException(e); + } + } - private void initIfNew() throws IOException { - if (fileChannel.size() == 0) { - // file is new -> add root of the free list - writeFreeListRootNodePosition(NO_POINTER); - } - } + private void initIfNew() throws IOException { + if (fileChannel.size() == 0) { + // file is new -> add root of the free list + writeFreeListRootNodePosition(NO_POINTER); + } + } - public DiskBlock getDiskBlock(final long blockOffset, final int blockSize) { - try { - LOGGER.trace("read block={} file={}", blockOffset, relativeDatabaseFileForLogging); + public DiskBlock getDiskBlock(final long blockOffset, final int blockSize) { + try { + LOGGER.trace("read block={} file={}", blockOffset, relativeDatabaseFileForLogging); - final var byteBuffer = fileChannel.map(MapMode.READ_WRITE, blockOffset, blockSize); + final var byteBuffer = fileChannel.map(MapMode.READ_WRITE, blockOffset, blockSize); - return new DiskBlock(blockOffset, byteBuffer); - } catch (final IOException e) { - throw new DiskStorageException(e); - } - } - - public Path getRelativeDatabaseFileForLogging() { - return relativeDatabaseFileForLogging; - } + return new DiskBlock(blockOffset, byteBuffer); + } catch (final IOException e) { + throw new DiskStorageException(e); + } + } - @Override - public void close() { - try { - fileChannel.force(true); - fileChannel.close(); - } catch (final IOException e) { - throw new DiskStorageException(e); - } - } + public Path getRelativeDatabaseFileForLogging() { + return relativeDatabaseFileForLogging; + } - public synchronized long allocateBlock(final int blockSize) { - if (blockSize < FREE_LIST_NODE_SIZE) { - throw new IllegalArgumentException("The minimal allocation size is 32 byte."); - } + @Override + public void close() { + try { + fileChannel.force(true); + fileChannel.close(); + } catch (final IOException e) { + throw new DiskStorageException(e); + } + } - try { - final var optionalFreeBlock = findFreeBlockWithSize(blockSize); - if (optionalFreeBlock.isPresent()) { - final FreeListNode freeBlock = optionalFreeBlock.get(); - removeBlockFromFreeList(freeBlock); - clearBlock(freeBlock); - return freeBlock.getOffset(); - } else { - return allocateNewBlock(blockSize); - } - } catch (final IOException e) { - throw new DiskStorageException(e); - } - } + public synchronized long allocateBlock(final int blockSize) { + if (blockSize < FREE_LIST_NODE_SIZE) { + throw new IllegalArgumentException("The minimal allocation size is 32 byte."); + } - private long allocateNewBlock(final int blockSize) throws IOException { - final var buffer = new byte[blockSize]; - final var src = ByteBuffer.wrap(buffer); + try { + final var optionalFreeBlock = findFreeBlockWithSize(blockSize); + if (optionalFreeBlock.isPresent()) { + final FreeListNode freeBlock = optionalFreeBlock.get(); + removeBlockFromFreeList(freeBlock); + clearBlock(freeBlock); + return freeBlock.getOffset(); + } else { + return allocateNewBlock(blockSize); + } + } catch (final IOException e) { + throw new DiskStorageException(e); + } + } - // block numbers start with 1, so that the uninitialized value - // (0) means 'no block'. That way we do not have to write - // data to a newly created block, which reduces IO. - final var blockOffset = fileChannel.size(); - fileChannel.write(src, fileChannel.size()); - return blockOffset; - } + private long allocateNewBlock(final int blockSize) throws IOException { + final var buffer = new byte[blockSize]; + final var src = ByteBuffer.wrap(buffer); - public synchronized void free(final long blockOffset, final int blockSize) throws IOException { + // block numbers start with 1, so that the uninitialized value + // (0) means 'no block'. That way we do not have to write + // data to a newly created block, which reduces IO. + final var blockOffset = fileChannel.size(); + fileChannel.write(src, fileChannel.size()); + return blockOffset; + } - final var neighboringFreeListNode = getNeighboringFreeListNode(blockOffset); + public synchronized void free(final long blockOffset, final int blockSize) throws IOException { - if (neighboringFreeListNode.isPresent()) { - // insert new free node into the free list - final var prev = neighboringFreeListNode.get(); + final var neighboringFreeListNode = getNeighboringFreeListNode(blockOffset); - insertFreeListNode(prev, blockOffset, blockSize); + if (neighboringFreeListNode.isPresent()) { + // insert new free node into the free list + final var prev = neighboringFreeListNode.get(); - } else { - // add new free list node as the first node in the list - insertFreeListNodeAsNewRoot(blockOffset, blockSize); - } - } + insertFreeListNode(prev, blockOffset, blockSize); - private void insertFreeListNodeAsNewRoot(final long blockOffset, final int blockSize) throws IOException { - final var freeListRootNodePosition = readFreeListRootNodePosition(); + } else { + // add new free list node as the first node in the list + insertFreeListNodeAsNewRoot(blockOffset, blockSize); + } + } - if (freeListRootNodePosition > 0) { - // there are free list nodes, but they are after the new node + private void insertFreeListNodeAsNewRoot(final long blockOffset, final int blockSize) throws IOException { + final var freeListRootNodePosition = readFreeListRootNodePosition(); - final var next = readFreeListNode(freeListRootNodePosition); - final var newNode = new FreeListNode(blockOffset, blockSize); + if (freeListRootNodePosition > 0) { + // there are free list nodes, but they are after the new node - FreeListNode.link(newNode, next); + final var next = readFreeListNode(freeListRootNodePosition); + final var newNode = new FreeListNode(blockOffset, blockSize); - writeFreeListNode(newNode, next); - writeFreeListRootNodePosition(blockOffset); + FreeListNode.link(newNode, next); - } else { - // this is the first free list node - final var newNode = new FreeListNode(blockOffset, blockSize); - writeFreeListNode(newNode); - writeFreeListRootNodePosition(blockOffset); - } - } + writeFreeListNode(newNode, next); + writeFreeListRootNodePosition(blockOffset); - private void insertFreeListNode(final FreeListNode prev, final long blockOffset, final int blockSize) - throws IOException { + } else { + // this is the first free list node + final var newNode = new FreeListNode(blockOffset, blockSize); + writeFreeListNode(newNode); + writeFreeListRootNodePosition(blockOffset); + } + } - final var newNode = new FreeListNode(blockOffset, blockSize); - final var next = prev.hasNext() ? readFreeListNode(prev.getNext()) : null; + private void insertFreeListNode(final FreeListNode prev, final long blockOffset, final int blockSize) + throws IOException { - FreeListNode.link(prev, newNode, next); + final var newNode = new FreeListNode(blockOffset, blockSize); + final var next = prev.hasNext() ? readFreeListNode(prev.getNext()) : null; - writeFreeListNode(prev, newNode, next); - } + FreeListNode.link(prev, newNode, next); - /** - * - * @param blockOffset the offset of the block that is about to be free'd - * @return the free list node before the block - * @throws IOException - */ - private Optional getNeighboringFreeListNode(final long blockOffset) throws IOException { - FreeListNode result = null; - final long freeListRootNodePosition = readFreeListRootNodePosition(); - if (freeListRootNodePosition < blockOffset) { + writeFreeListNode(prev, newNode, next); + } - long nextFreeListNodeOffset = freeListRootNodePosition; - while (nextFreeListNodeOffset > 0) { - final var freeListNode = readFreeListNode(nextFreeListNodeOffset); + /** + * + * @param blockOffset the offset of the block that is about to be free'd + * @return the free list node before the block + * @throws IOException + */ + private Optional getNeighboringFreeListNode(final long blockOffset) throws IOException { + FreeListNode result = null; + final long freeListRootNodePosition = readFreeListRootNodePosition(); + if (freeListRootNodePosition < blockOffset) { - if (freeListNode.getOffset() > blockOffset) { - break; - } - nextFreeListNodeOffset = freeListNode.getNext(); - result = freeListNode; - } - } - return Optional.ofNullable(result); - } + long nextFreeListNodeOffset = freeListRootNodePosition; + while (nextFreeListNodeOffset > 0) { + final var freeListNode = readFreeListNode(nextFreeListNodeOffset); - private Optional findFreeBlockWithSize(final long blockSize) throws IOException { - FreeListNode result = null; - final long freeListRootNodePosition = readFreeListRootNodePosition(); + if (freeListNode.getOffset() > blockOffset) { + break; + } + nextFreeListNodeOffset = freeListNode.getNext(); + result = freeListNode; + } + } + return Optional.ofNullable(result); + } - long nextFreeListNodeOffset = freeListRootNodePosition; - while (nextFreeListNodeOffset > 0) { - final var freeListNode = readFreeListNode(nextFreeListNodeOffset); + private Optional findFreeBlockWithSize(final long blockSize) throws IOException { + FreeListNode result = null; + final long freeListRootNodePosition = readFreeListRootNodePosition(); - if (freeListNode.getSize() == blockSize) { - result = freeListNode; - break; - } - nextFreeListNodeOffset = freeListNode.getNext(); - } + long nextFreeListNodeOffset = freeListRootNodePosition; + while (nextFreeListNodeOffset > 0) { + final var freeListNode = readFreeListNode(nextFreeListNodeOffset); - return Optional.ofNullable(result); - } + if (freeListNode.getSize() == blockSize) { + result = freeListNode; + break; + } + nextFreeListNodeOffset = freeListNode.getNext(); + } - private void clearBlock(final FreeListNode freeBlock) throws IOException { - final var src = ByteBuffer.allocate(freeBlock.getSize()); - fileChannel.write(src, freeBlock.getOffset()); - } + return Optional.ofNullable(result); + } - private void removeBlockFromFreeList(final FreeListNode freeBlock) throws IOException { + private void clearBlock(final FreeListNode freeBlock) throws IOException { + final var src = ByteBuffer.allocate(freeBlock.getSize()); + fileChannel.write(src, freeBlock.getOffset()); + } - if (freeBlock.getPrev() == 0) { - writeFreeListRootNodePosition(freeBlock.getNext()); - } + private void removeBlockFromFreeList(final FreeListNode freeBlock) throws IOException { - if (freeBlock.getNext() > 0) { - final FreeListNode next = readFreeListNode(freeBlock.getNext()); - next.setPrev(freeBlock.getPrev()); - writeFreeListNode(next); - } + if (freeBlock.getPrev() == 0) { + writeFreeListRootNodePosition(freeBlock.getNext()); + } - if (freeBlock.getPrev() > 0) { - final FreeListNode prev = readFreeListNode(freeBlock.getPrev()); - prev.setNext(freeBlock.getNext()); - writeFreeListNode(prev); - } - } + if (freeBlock.getNext() > 0) { + final FreeListNode next = readFreeListNode(freeBlock.getNext()); + next.setPrev(freeBlock.getPrev()); + writeFreeListNode(next); + } - private FreeListNode readFreeListNode(final long freeListNodePosition) throws IOException { - final var freeListNode = ByteBuffer.allocate(FREE_LIST_NODE_SIZE); - fileChannel.read(freeListNode, freeListNodePosition); - final long offset = freeListNodePosition; - final long next = freeListNode.getLong(FREE_LIST_NEXT_POINTER); - final long prev = freeListNode.getLong(FREE_LIST_PREV_POINTER); - final int size = freeListNode.getInt(FREE_LIST_SIZE); - return new FreeListNode(offset, next, prev, size); - } + if (freeBlock.getPrev() > 0) { + final FreeListNode prev = readFreeListNode(freeBlock.getPrev()); + prev.setNext(freeBlock.getNext()); + writeFreeListNode(prev); + } + } - private void writeFreeListNode(final FreeListNode... nodes) throws IOException { + private FreeListNode readFreeListNode(final long freeListNodePosition) throws IOException { + final var freeListNode = ByteBuffer.allocate(FREE_LIST_NODE_SIZE); + fileChannel.read(freeListNode, freeListNodePosition); + final long offset = freeListNodePosition; + final long next = freeListNode.getLong(FREE_LIST_NEXT_POINTER); + final long prev = freeListNode.getLong(FREE_LIST_PREV_POINTER); + final int size = freeListNode.getInt(FREE_LIST_SIZE); + return new FreeListNode(offset, next, prev, size); + } - for (final FreeListNode node : nodes) { - if (node != null) { - final var src = ByteBuffer.allocate(FREE_LIST_NODE_SIZE); - src.putLong(FREE_LIST_NEXT_POINTER, node.getNext()); - src.putLong(FREE_LIST_PREV_POINTER, node.getPrev()); - src.putInt(FREE_LIST_SIZE, node.getSize()); - fileChannel.write(src, node.getOffset()); - } - } - } + private void writeFreeListNode(final FreeListNode... nodes) throws IOException { - private long readFreeListRootNodePosition() throws IOException { - final var freeListFirstBlock = ByteBuffer.allocate(8); - fileChannel.read(freeListFirstBlock, FREE_LIST_ROOT_OFFSET); - return freeListFirstBlock.getLong(0); - } + for (final FreeListNode node : nodes) { + if (node != null) { + final var src = ByteBuffer.allocate(FREE_LIST_NODE_SIZE); + src.putLong(FREE_LIST_NEXT_POINTER, node.getNext()); + src.putLong(FREE_LIST_PREV_POINTER, node.getPrev()); + src.putInt(FREE_LIST_SIZE, node.getSize()); + fileChannel.write(src, node.getOffset()); + } + } + } - private void writeFreeListRootNodePosition(final long freeListRootNodePosition) throws IOException { - final var freeListFirstBlock = ByteBuffer.allocate(8); - freeListFirstBlock.putLong(0, freeListRootNodePosition); - fileChannel.write(freeListFirstBlock, FREE_LIST_ROOT_OFFSET); - } + private long readFreeListRootNodePosition() throws IOException { + final var freeListFirstBlock = ByteBuffer.allocate(8); + fileChannel.read(freeListFirstBlock, FREE_LIST_ROOT_OFFSET); + return freeListFirstBlock.getLong(0); + } - public synchronized void ensureAlignmentForNewBlocks(final int alignment) { - try { - final long size = fileChannel.size(); - final int alignmentMismatch = Math.floorMod(size, alignment); - if (alignmentMismatch != 0) { - // The next allocated block would not be aligned. Therefore we allocate a - // throw-away block. - allocateNewBlock(alignment - alignmentMismatch); - } - } catch (final IOException e) { - throw new DiskStorageException(e); - } - } + private void writeFreeListRootNodePosition(final long freeListRootNodePosition) throws IOException { + final var freeListFirstBlock = ByteBuffer.allocate(8); + freeListFirstBlock.putLong(0, freeListRootNodePosition); + fileChannel.write(freeListFirstBlock, FREE_LIST_ROOT_OFFSET); + } - public long size() { - try { - return fileChannel.size(); - } catch (final IOException e) { - throw new DiskStorageException(e); - } - } + public synchronized void ensureAlignmentForNewBlocks(final int alignment) { + try { + final long size = fileChannel.size(); + final int alignmentMismatch = Math.floorMod(size, alignment); + if (alignmentMismatch != 0) { + // The next allocated block would not be aligned. Therefore we allocate a + // throw-away block. + allocateNewBlock(alignment - alignmentMismatch); + } + } catch (final IOException e) { + throw new DiskStorageException(e); + } + } - public int minAllocationSize() { - return FREE_LIST_NODE_SIZE; - } + public long size() { + try { + return fileChannel.size(); + } catch (final IOException e) { + throw new DiskStorageException(e); + } + } + + public int minAllocationSize() { + return FREE_LIST_NODE_SIZE; + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/diskstorage/DiskStorageException.java b/block-storage/src/main/java/org/lucares/pdb/diskstorage/DiskStorageException.java index 5b2763d..fdcfc34 100644 --- a/block-storage/src/main/java/org/lucares/pdb/diskstorage/DiskStorageException.java +++ b/block-storage/src/main/java/org/lucares/pdb/diskstorage/DiskStorageException.java @@ -2,18 +2,18 @@ package org.lucares.pdb.diskstorage; public class DiskStorageException extends RuntimeException { - private static final long serialVersionUID = 1683775743640383633L; + private static final long serialVersionUID = 1683775743640383633L; - public DiskStorageException(final String message, final Throwable cause) { - super(message, cause); - } + public DiskStorageException(final String message, final Throwable cause) { + super(message, cause); + } - public DiskStorageException(final String message) { - super(message); - } + public DiskStorageException(final String message) { + super(message); + } - public DiskStorageException(final Throwable cause) { - super(cause); - } + public DiskStorageException(final Throwable cause) { + super(cause); + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/diskstorage/FreeListNode.java b/block-storage/src/main/java/org/lucares/pdb/diskstorage/FreeListNode.java index 42e3958..5f233cb 100644 --- a/block-storage/src/main/java/org/lucares/pdb/diskstorage/FreeListNode.java +++ b/block-storage/src/main/java/org/lucares/pdb/diskstorage/FreeListNode.java @@ -1,82 +1,82 @@ package org.lucares.pdb.diskstorage; public class FreeListNode { - private final long offset; - private long next; - private long prev; - private int size; + private final long offset; + private long next; + private long prev; + private int size; - public FreeListNode(final long offset, final int size) { - this.offset = offset; - this.size = size; - } + public FreeListNode(final long offset, final int size) { + this.offset = offset; + this.size = size; + } - public FreeListNode(final long offset, final long next, final long prev, final int size) { - this.offset = offset; - this.next = next; - this.prev = prev; - this.size = size; - } + public FreeListNode(final long offset, final long next, final long prev, final int size) { + this.offset = offset; + this.next = next; + this.prev = prev; + this.size = size; + } - public long getOffset() { - return offset; - } + public long getOffset() { + return offset; + } - public long getNext() { - return next; - } + public long getNext() { + return next; + } - public void setNext(final long next) { - this.next = next; - } + public void setNext(final long next) { + this.next = next; + } - public void setNext(final FreeListNode next) { - this.next = next != null ? next.getOffset() : 0; - } + public void setNext(final FreeListNode next) { + this.next = next != null ? next.getOffset() : 0; + } - public long getPrev() { - return prev; - } + public long getPrev() { + return prev; + } - public void setPrev(final long prev) { - this.prev = prev; - } + public void setPrev(final long prev) { + this.prev = prev; + } - public void setPrev(final FreeListNode prev) { - this.prev = prev != null ? prev.getOffset() : 0; - } + public void setPrev(final FreeListNode prev) { + this.prev = prev != null ? prev.getOffset() : 0; + } - public int getSize() { - return size; - } + public int getSize() { + return size; + } - public void setSize(final int size) { - this.size = size; - } + public void setSize(final int size) { + this.size = size; + } - @Override - public String toString() { - return "FreeListNode [offset=" + offset + ", next=" + next + ", prev=" + prev + ", size=" + size + "]"; - } + @Override + public String toString() { + return "FreeListNode [offset=" + offset + ", next=" + next + ", prev=" + prev + ", size=" + size + "]"; + } - public boolean hasNext() { - return next != 0; - } + public boolean hasNext() { + return next != 0; + } - public static void link(final FreeListNode prev, final FreeListNode next) { - prev.setNext(next); - next.setPrev(prev); - } + public static void link(final FreeListNode prev, final FreeListNode next) { + prev.setNext(next); + next.setPrev(prev); + } - public static void link(final FreeListNode prev, final FreeListNode middle, final FreeListNode next) { - if (prev != null) { - prev.setNext(middle); - } - middle.setPrev(prev); - middle.setNext(next); - if (next != null) { - next.setPrev(prev); - } - } + public static void link(final FreeListNode prev, final FreeListNode middle, final FreeListNode next) { + if (prev != null) { + prev.setNext(middle); + } + middle.setPrev(prev); + middle.setNext(next); + if (next != null) { + next.setPrev(prev); + } + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/map/ByteArrayKey.java b/block-storage/src/main/java/org/lucares/pdb/map/ByteArrayKey.java index 63b3e3b..6cb1c6c 100644 --- a/block-storage/src/main/java/org/lucares/pdb/map/ByteArrayKey.java +++ b/block-storage/src/main/java/org/lucares/pdb/map/ByteArrayKey.java @@ -3,77 +3,77 @@ package org.lucares.pdb.map; import java.util.Arrays; public final class ByteArrayKey implements Comparable { - private final byte[] bytes; + private final byte[] bytes; - public ByteArrayKey(final byte[] bytes) { - this.bytes = bytes; - } + public ByteArrayKey(final byte[] bytes) { + this.bytes = bytes; + } - @Override - public int compareTo(final ByteArrayKey o) { - return compare(bytes, o.bytes); - } + @Override + public int compareTo(final ByteArrayKey o) { + return compare(bytes, o.bytes); + } - public static int compare(final byte[] key, final byte[] otherKey) { - return Arrays.compare(key, otherKey); - } + public static int compare(final byte[] key, final byte[] otherKey) { + return Arrays.compare(key, otherKey); + } - public static boolean isPrefix(final byte[] key, final byte[] keyPrefix) { + public static boolean isPrefix(final byte[] key, final byte[] keyPrefix) { - return compareKeyPrefix(key, keyPrefix) == 0; - } + return compareKeyPrefix(key, keyPrefix) == 0; + } - /** - * Same as {@link #compare(byte[])}, but return 0 if prefix is a prefix of the - * key. {@link #compare(byte[])} return values >0 in that case, because key - * is longer than the prefix. - * - * @param prefix the prefix - * @return 0 if {@code prefix} is a prefix of the key otherwise the value is - * defined by {@link #compare(byte[])} - */ - public static int compareKeyPrefix(final byte[] key, final byte[] prefix) { - int i = 0; - while (i < key.length && i < prefix.length) { - if (key[i] != prefix[i]) { - return key[i] - prefix[i]; - } - i++; - } + /** + * Same as {@link #compare(byte[])}, but return 0 if prefix is a prefix of the + * key. {@link #compare(byte[])} return values >0 in that case, because key + * is longer than the prefix. + * + * @param prefix the prefix + * @return 0 if {@code prefix} is a prefix of the key otherwise the value is + * defined by {@link #compare(byte[])} + */ + public static int compareKeyPrefix(final byte[] key, final byte[] prefix) { + int i = 0; + while (i < key.length && i < prefix.length) { + if (key[i] != prefix[i]) { + return key[i] - prefix[i]; + } + i++; + } - return key.length > prefix.length ? 0 : key.length - prefix.length; + return key.length > prefix.length ? 0 : key.length - prefix.length; - } + } - public static boolean equal(final byte[] key, final byte[] otherKey) { - return compare(key, otherKey) == 0; - } - - @Override - public String toString() { - return Arrays.toString(bytes); - } + public static boolean equal(final byte[] key, final byte[] otherKey) { + return compare(key, otherKey) == 0; + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + Arrays.hashCode(bytes); - return result; - } + @Override + public String toString() { + return Arrays.toString(bytes); + } - @Override - public boolean equals(final Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final ByteArrayKey other = (ByteArrayKey) obj; - if (!Arrays.equals(bytes, other.bytes)) - return false; - return true; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + Arrays.hashCode(bytes); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + final ByteArrayKey other = (ByteArrayKey) obj; + if (!Arrays.equals(bytes, other.bytes)) + return false; + return true; + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/map/Empty.java b/block-storage/src/main/java/org/lucares/pdb/map/Empty.java index 7627887..3b7719d 100644 --- a/block-storage/src/main/java/org/lucares/pdb/map/Empty.java +++ b/block-storage/src/main/java/org/lucares/pdb/map/Empty.java @@ -14,13 +14,13 @@ import org.lucares.pdb.map.PersistentMap.EncoderDecoder; * {@link Empty} solves this by providing a single unmodifiable value. */ public final class Empty { - public static final Empty INSTANCE = new Empty(); + public static final Empty INSTANCE = new Empty(); - private Empty() { - } + private Empty() { + } - @Override - public String toString() { - return ""; - } + @Override + public String toString() { + return ""; + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/map/NodeEntry.java b/block-storage/src/main/java/org/lucares/pdb/map/NodeEntry.java index c32df7a..3adab2c 100644 --- a/block-storage/src/main/java/org/lucares/pdb/map/NodeEntry.java +++ b/block-storage/src/main/java/org/lucares/pdb/map/NodeEntry.java @@ -9,158 +9,158 @@ import java.util.function.Predicate; import org.lucares.utils.byteencoder.VariableByteEncoder; class NodeEntry { - enum ValueType { - VALUE_INLINE((byte) 1), NODE_POINTER((byte) 2); + enum ValueType { + VALUE_INLINE((byte) 1), NODE_POINTER((byte) 2); - private final byte b; + private final byte b; - ValueType(final byte b) { - this.b = b; - } + ValueType(final byte b) { + this.b = b; + } - static ValueType fromByte(final byte b) { - for (final ValueType type : values()) { - if (type.b == b) { - return type; - } - } - throw new IllegalStateException("Cannot map byte " + b + " to a value type."); - } + static ValueType fromByte(final byte b) { + for (final ValueType type : values()) { + if (type.b == b) { + return type; + } + } + throw new IllegalStateException("Cannot map byte " + b + " to a value type."); + } - public byte asByte() { - return b; - } - } + public byte asByte() { + return b; + } + } - static final class KeyMatches implements Predicate { + static final class KeyMatches implements Predicate { - private final byte[] key; + private final byte[] key; - public KeyMatches(final byte[] key) { - this.key = key; - } + public KeyMatches(final byte[] key) { + this.key = key; + } - @Override - public boolean test(final NodeEntry t) { - return Arrays.equals(key, t.getKey()); - } - } + @Override + public boolean test(final NodeEntry t) { + return Arrays.equals(key, t.getKey()); + } + } - private final ValueType type; - private final byte[] key; - private final byte[] value; + private final ValueType type; + private final byte[] key; + private final byte[] value; - public NodeEntry(final ValueType type, final byte[] key, final byte[] value) { - this.type = type; - this.key = key; - this.value = value; - } + public NodeEntry(final ValueType type, final byte[] key, final byte[] value) { + this.type = type; + this.key = key; + this.value = value; + } - public ValueType getType() { - return type; - } + public ValueType getType() { + return type; + } - public byte[] getKey() { - return key; - } + public byte[] getKey() { + return key; + } - public byte[] getValue() { - return value; - } + public byte[] getValue() { + return value; + } - public int size() { - return 1 + key.length + value.length; - } + public int size() { + return 1 + key.length + value.length; + } - @Override - public String toString() { - final String valueAsString = isInnerNode() ? String.valueOf(VariableByteEncoder.decodeFirstValue(value)) - : new String(value, StandardCharsets.UTF_8); + @Override + public String toString() { + final String valueAsString = isInnerNode() ? String.valueOf(VariableByteEncoder.decodeFirstValue(value)) + : new String(value, StandardCharsets.UTF_8); - return "NodeEntry [type=" + type + ", key=" + new String(key, StandardCharsets.UTF_8) + ", value=" - + valueAsString + "]"; - } + return "NodeEntry [type=" + type + ", key=" + new String(key, StandardCharsets.UTF_8) + ", value=" + + valueAsString + "]"; + } - public String toString(final Function keyDecoder, final Function valueDecoder) { - final String valueAsString = isInnerNode() ? String.valueOf(VariableByteEncoder.decodeFirstValue(value)) - : String.valueOf(valueDecoder.apply(value)); + public String toString(final Function keyDecoder, final Function valueDecoder) { + final String valueAsString = isInnerNode() ? String.valueOf(VariableByteEncoder.decodeFirstValue(value)) + : String.valueOf(valueDecoder.apply(value)); - final String keyAsString; - if (Arrays.equals(key, PersistentMap.MAX_KEY)) { - keyAsString = "<<>>"; - } else { - keyAsString = String.valueOf(keyDecoder.apply(key)); - } + final String keyAsString; + if (Arrays.equals(key, PersistentMap.MAX_KEY)) { + keyAsString = "<<>>"; + } else { + keyAsString = String.valueOf(keyDecoder.apply(key)); + } - return "NodeEntry [type=" + type + ", key=" + keyAsString + ", value=" + valueAsString + "]"; - } + return "NodeEntry [type=" + type + ", key=" + keyAsString + ", value=" + valueAsString + "]"; + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + Arrays.hashCode(key); - result = prime * result + ((type == null) ? 0 : type.hashCode()); - result = prime * result + Arrays.hashCode(value); - return result; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + Arrays.hashCode(key); + result = prime * result + ((type == null) ? 0 : type.hashCode()); + result = prime * result + Arrays.hashCode(value); + return result; + } - @Override - public boolean equals(final Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final NodeEntry other = (NodeEntry) obj; - if (!Arrays.equals(key, other.key)) - return false; - if (type != other.type) - return false; - if (!Arrays.equals(value, other.value)) - return false; - return true; - } + @Override + public boolean equals(final Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + final NodeEntry other = (NodeEntry) obj; + if (!Arrays.equals(key, other.key)) + return false; + if (type != other.type) + return false; + if (!Arrays.equals(value, other.value)) + return false; + return true; + } - public static int neededBytes(final Collection entries) { - return entries.stream().mapToInt(NodeEntry::size).sum(); - } + public static int neededBytes(final Collection entries) { + return entries.stream().mapToInt(NodeEntry::size).sum(); + } - public int compare(final byte[] otherKey) { + public int compare(final byte[] otherKey) { - return ByteArrayKey.compare(key, otherKey); - } + return ByteArrayKey.compare(key, otherKey); + } - public boolean isPrefix(final byte[] keyPrefix) { + public boolean isPrefix(final byte[] keyPrefix) { - return ByteArrayKey.compareKeyPrefix(key, keyPrefix) == 0; - } + return ByteArrayKey.compareKeyPrefix(key, keyPrefix) == 0; + } - /** - * Same as {@link #compare(byte[])}, but return 0 if prefix is a prefix of the - * key. {@link #compare(byte[])} return values >0 in that case, because key - * is longer than the prefix. - * - * @param prefix the prefix - * @return 0 if {@code prefix} is a prefix of the key otherwise the value is - * defined by {@link #compare(byte[])} - */ - public int compareKeyPrefix(final byte[] prefix) { + /** + * Same as {@link #compare(byte[])}, but return 0 if prefix is a prefix of the + * key. {@link #compare(byte[])} return values >0 in that case, because key + * is longer than the prefix. + * + * @param prefix the prefix + * @return 0 if {@code prefix} is a prefix of the key otherwise the value is + * defined by {@link #compare(byte[])} + */ + public int compareKeyPrefix(final byte[] prefix) { - return ByteArrayKey.compareKeyPrefix(key, prefix); - } + return ByteArrayKey.compareKeyPrefix(key, prefix); + } - public boolean equal(final byte[] otherKey) { - return compare(otherKey) == 0; - } + public boolean equal(final byte[] otherKey) { + return compare(otherKey) == 0; + } - public boolean isDataNode() { - return type == ValueType.VALUE_INLINE; - } + public boolean isDataNode() { + return type == ValueType.VALUE_INLINE; + } - public boolean isInnerNode() { - return type == ValueType.NODE_POINTER; - } + public boolean isInnerNode() { + return type == ValueType.NODE_POINTER; + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/map/PersistentMap.java b/block-storage/src/main/java/org/lucares/pdb/map/PersistentMap.java index b567964..ae8269d 100644 --- a/block-storage/src/main/java/org/lucares/pdb/map/PersistentMap.java +++ b/block-storage/src/main/java/org/lucares/pdb/map/PersistentMap.java @@ -23,470 +23,470 @@ import org.slf4j.LoggerFactory; public class PersistentMap implements AutoCloseable { - private static final Logger LOGGER = LoggerFactory.getLogger(PersistentMap.class); + private static final Logger LOGGER = LoggerFactory.getLogger(PersistentMap.class); - // the maximum key - static final byte[] MAX_KEY; - static { - MAX_KEY = new byte[20]; - Arrays.fill(MAX_KEY, Byte.MAX_VALUE); - } + // the maximum key + static final byte[] MAX_KEY; + static { + MAX_KEY = new byte[20]; + Arrays.fill(MAX_KEY, Byte.MAX_VALUE); + } - interface VisitorCallback { - void visit(PersistentMapDiskNode node, PersistentMapDiskNode parentNode, NodeEntry nodeEntry, int depth); - } + interface VisitorCallback { + void visit(PersistentMapDiskNode node, PersistentMapDiskNode parentNode, NodeEntry nodeEntry, int depth); + } - public interface EncoderDecoder { - public byte[] encode(O object); - - public O decode(byte[] bytes); + public interface EncoderDecoder { + public byte[] encode(O object); + + public O decode(byte[] bytes); - public default Function asDecoder() { - return bytes -> this.decode(bytes); - } + public default Function asDecoder() { + return bytes -> this.decode(bytes); + } - public default Function asEncoder() { - return plain -> this.encode(plain); - } + public default Function asEncoder() { + return plain -> this.encode(plain); + } - public byte[] getEmptyValue(); - } + public byte[] getEmptyValue(); + } - private static final class StringCoder implements EncoderDecoder { + private static final class StringCoder implements EncoderDecoder { - @Override - public byte[] encode(final String object) { - return object.getBytes(StandardCharsets.UTF_8); - } + @Override + public byte[] encode(final String object) { + return object.getBytes(StandardCharsets.UTF_8); + } - @Override - public String decode(final byte[] bytes) { - return bytes == null ? null : new String(bytes, StandardCharsets.UTF_8); - } + @Override + public String decode(final byte[] bytes) { + return bytes == null ? null : new String(bytes, StandardCharsets.UTF_8); + } - @Override - public byte[] getEmptyValue() { - return new byte[] { 0 }; - } - } + @Override + public byte[] getEmptyValue() { + return new byte[] { 0 }; + } + } - private static final class LongCoder implements EncoderDecoder { + private static final class LongCoder implements EncoderDecoder { - @Override - public byte[] encode(final Long object) { - return VariableByteEncoder.encode(object); - } + @Override + public byte[] encode(final Long object) { + return VariableByteEncoder.encode(object); + } - @Override - public Long decode(final byte[] bytes) { - return bytes == null ? null : VariableByteEncoder.decodeFirstValue(bytes); - } + @Override + public Long decode(final byte[] bytes) { + return bytes == null ? null : VariableByteEncoder.decodeFirstValue(bytes); + } - @Override - public byte[] getEmptyValue() { - return new byte[] { 0 }; - } - } + @Override + public byte[] getEmptyValue() { + return new byte[] { 0 }; + } + } - private static final class UUIDCoder implements EncoderDecoder { + private static final class UUIDCoder implements EncoderDecoder { - @Override - public byte[] encode(final UUID uuid) { - final long mostSignificantBits = uuid.getMostSignificantBits(); - final long leastSignificantBits = uuid.getLeastSignificantBits(); - return VariableByteEncoder.encode(mostSignificantBits, leastSignificantBits); - } + @Override + public byte[] encode(final UUID uuid) { + final long mostSignificantBits = uuid.getMostSignificantBits(); + final long leastSignificantBits = uuid.getLeastSignificantBits(); + return VariableByteEncoder.encode(mostSignificantBits, leastSignificantBits); + } - @Override - public UUID decode(final byte[] bytes) { + @Override + public UUID decode(final byte[] bytes) { - final LongList longs = VariableByteEncoder.decode(bytes); - final long mostSignificantBits = longs.get(0); - final long leastSignificantBits = longs.get(1); + final LongList longs = VariableByteEncoder.decode(bytes); + final long mostSignificantBits = longs.get(0); + final long leastSignificantBits = longs.get(1); - return new UUID(mostSignificantBits, leastSignificantBits); - } + return new UUID(mostSignificantBits, leastSignificantBits); + } - @Override - public byte[] getEmptyValue() { - return new byte[] { 0 }; - } - } + @Override + public byte[] getEmptyValue() { + return new byte[] { 0 }; + } + } - private static final class EmptyCoder implements EncoderDecoder { - - private static final byte[] EMPTY_BYTE_ARRAY = new byte[0]; + private static final class EmptyCoder implements EncoderDecoder { + + private static final byte[] EMPTY_BYTE_ARRAY = new byte[0]; - @Override - public byte[] encode(final Empty __) { - return EMPTY_BYTE_ARRAY; - } + @Override + public byte[] encode(final Empty __) { + return EMPTY_BYTE_ARRAY; + } - @Override - public Empty decode(final byte[] bytes) { - Preconditions.checkTrue(bytes.length == 0, ""); - - return Empty.INSTANCE; - } - - @Override - public byte[] getEmptyValue() { - return new byte[] {}; - } - } - - public static final EncoderDecoder LONG_CODER = new LongCoder(); - public static final EncoderDecoder UUID_ENCODER = new UUIDCoder(); - public static final EncoderDecoder STRING_CODER = new StringCoder(); - public static final EncoderDecoder EMPTY_ENCODER = new EmptyCoder(); - - static final int BLOCK_SIZE = 4096; - static final long NODE_OFFSET_TO_ROOT_NODE = 8; - - private final DiskStorage diskStore; - - private int maxEntriesInNode = Integer.MAX_VALUE; - - private final EncoderDecoder keyEncoder; - - private final EncoderDecoder valueEncoder; - - private final LRUCache nodeCache = new LRUCache<>(10_000); - - private final LRUCache valueCache = new LRUCache<>(1_000); - - public PersistentMap(final Path path, final Path storageBasePath, final EncoderDecoder keyEncoder, - final EncoderDecoder valueEncoder) { - this.diskStore = new DiskStorage(path, storageBasePath); - this.keyEncoder = keyEncoder; - this.valueEncoder = valueEncoder; - initIfNew(); - } - - @Override - public void close() { - diskStore.close(); - } - - public void setMaxEntriesInNode(final int maxEntriesInNode) { - this.maxEntriesInNode = maxEntriesInNode; - } - - private void initIfNew() { - if (diskStore.size() < BLOCK_SIZE) { - final long nodeOffsetToRootNode = diskStore.allocateBlock(diskStore.minAllocationSize()); - Preconditions.checkEqual(nodeOffsetToRootNode, NODE_OFFSET_TO_ROOT_NODE, - "The offset of the pointer to the root node must be at a well known location. " - + "Otherwise we would not be able to find it in an already existing file."); - - // 2. make sure new blocks are aligned to the block size (for faster disk IO) - diskStore.ensureAlignmentForNewBlocks(BLOCK_SIZE); - - // 3. initialize an empty root node - final long blockOffset = diskStore.allocateBlock(BLOCK_SIZE); - final var rootNode = PersistentMapDiskNode.emptyRootNode(blockOffset); - writeNode(rootNode); - - // 4. update pointer to root node - writeNodeOffsetOfRootNode(blockOffset); - - // 5. insert a dummy entry with a 'maximum' key - putValue(MAX_KEY, valueEncoder.getEmptyValue()); - } - } - - public synchronized void putAllValues(final Map map) { - for (final Entry e : map.entrySet()) { - putValue(e.getKey(), e.getValue()); - } - } - - public synchronized V putValue(final K key, final V value) { - - final V cachedValue = valueCache.get(key); - if (cachedValue != null && cachedValue == value) { - return value; - } - - final byte[] encodedKey = keyEncoder.encode(key); - final byte[] encodedValue = valueEncoder.encode(value); - final byte[] encodedOldValue = putValue(encodedKey, encodedValue); - final V oldValue = encodedOldValue == null ? null : valueEncoder.decode(encodedOldValue); - valueCache.put(key, value); - return oldValue; - } - - public synchronized V getValue(final K key) { - - final V cachedValue = valueCache.get(key); - if (cachedValue != null) { - return cachedValue; - } - - final byte[] encodedKey = keyEncoder.encode(key); - final byte[] foundValue = getValue(encodedKey); - final V result = foundValue == null ? null : valueEncoder.decode(foundValue); - valueCache.put(key, result); - return result; - } - - private byte[] putValue(final byte[] key, final byte[] value) { - final long rootNodeOffset = readNodeOffsetOfRootNode(); - final Stack parents = new Stack<>(); - return insert(parents, rootNodeOffset, key, value); - } - - private byte[] getValue(final byte[] key) { - final long rootNodeOffset = readNodeOffsetOfRootNode(); - final NodeEntry entry = findNodeEntry(rootNodeOffset, key); - - return entry == null ? null : entry.getValue(); - } - - private byte[] insert(final Stack parents, final long nodeOffest, final byte[] key, - final byte[] value) { - final PersistentMapDiskNode node = getNode(nodeOffest); - - final NodeEntry entry = node.getNodeEntryTo(key); - if (entry == null || entry.isDataNode()) { - - final byte[] oldValue; - if (entry == null) { - oldValue = null; - } else { - // found a NodeEntry that is either equal to key, or it is at the insertion - // point - final boolean entryIsForKey = entry.equal(key); - - oldValue = entryIsForKey ? entry.getValue() : null; - - // Early exit, if the oldValue equals the new value. - // We do not have to replace the value, because it would not change anything - // (just cause unnecessary write operations). But we return the oldValue so that - // the caller thinks we replaced the value. - if (Objects.equals(oldValue, value)) { - return oldValue; - } - - if (entryIsForKey) { - node.removeKey(key); - } - } - - if (node.canAdd(key, value, maxEntriesInNode)) { - // insert in existing node - node.addKeyValue(key, value); - writeNode(node); - return oldValue; - } else { - // add new node - // 1. split current node into A and B - splitNode(parents, node); - - // 2. insert the value - // start from the root, because we might have added a new root node - return putValue(key, value); - } - } else { - final long childNodeOffset = toNodeOffset(entry); - parents.add(node); - return insert(parents, childNodeOffset, key, value); - } - } - - private PersistentMapDiskNode splitNode(final Stack parents, - final PersistentMapDiskNode node) { - - // System.out.println("\n\npre split node: " + node + "\n"); - - final long newBlockOffset = diskStore.allocateBlock(BLOCK_SIZE); - - final PersistentMapDiskNode newNode = node.split(newBlockOffset); - - final PersistentMapDiskNode parent = parents.isEmpty() ? null : parents.pop(); - - if (parent != null) { - final byte[] newNodeKey = newNode.getTopNodeEntry().getKey(); - if (parent.canAdd(newNodeKey, newBlockOffset, maxEntriesInNode)) { - parent.addKeyNodePointer(newNodeKey, newBlockOffset); - writeNode(parent); - writeNode(newNode); - writeNode(node); - return parent; - } else { - final PersistentMapDiskNode grandParentNode = splitNode(parents, parent); - - final NodeEntry pointerToParentAfterSplit = grandParentNode.getNodeEntryTo(newNodeKey); - - Preconditions.checkEqual(pointerToParentAfterSplit.isInnerNode(), true, "{0} is pointer to inner node", - pointerToParentAfterSplit); - final long parentNodeOffset = toNodeOffset(pointerToParentAfterSplit); // the parent we have to add the - // newNode to - final PersistentMapDiskNode parentNode = getNode(parentNodeOffset); - parentNode.addKeyNodePointer(newNodeKey, newBlockOffset); - writeNode(parentNode); - writeNode(newNode); - writeNode(node); - return parentNode; - } - - } else { - // has no parent -> create a new parent (the new parent will also be the new - // root) - final long newRootNodeOffset = diskStore.allocateBlock(BLOCK_SIZE); - final PersistentMapDiskNode rootNode = PersistentMapDiskNode.emptyRootNode(newRootNodeOffset); - final byte[] newNodeKey = newNode.getTopNodeEntry().getKey(); - rootNode.addKeyNodePointer(newNodeKey, newBlockOffset); - - final byte[] oldNodeKey = node.getTopNodeEntry().getKey(); - rootNode.addKeyNodePointer(oldNodeKey, node.getNodeOffset()); - writeNode(rootNode); - - writeNode(newNode); - writeNode(node); - - writeNodeOffsetOfRootNode(newRootNodeOffset); - return rootNode; - } - } - - private NodeEntry findNodeEntry(final long nodeOffest, final byte[] key) { - final PersistentMapDiskNode node = getNode(nodeOffest); - - final var entry = node.getNodeEntryTo(key); - if (entry == null) { - return null; - } else if (entry.isDataNode()) { - if (entry.equal(key)) { - return entry; - } else { - return null; - } - } else { - final long childNodeOffset = toNodeOffset(entry); - return findNodeEntry(childNodeOffset, key); - } - } - - private long toNodeOffset(final NodeEntry entry) { - Preconditions.checkEqual(entry.isInnerNode(), true); - return VariableByteEncoder.decodeFirstValue(entry.getValue()); - } - - private PersistentMapDiskNode getNode(final long nodeOffset) { - - PersistentMapDiskNode node = nodeCache.get(nodeOffset); - if (node == null) { - - final DiskBlock diskBlock = diskStore.getDiskBlock(nodeOffset, BLOCK_SIZE); - - node = PersistentMapDiskNode.parse(nodeOffset, diskBlock); - nodeCache.put(nodeOffset, node); - } - - return node; - } - - private void writeNode(final PersistentMapDiskNode node) { - if (LOGGER.isTraceEnabled()) { - LOGGER.trace("writing node {}", node.toString(keyEncoder.asDecoder(), valueEncoder.asDecoder())); - } - final long nodeOffest = node.getNodeOffset(); - // final DiskBlock diskBlock = diskStore.getDiskBlock(nodeOffest, BLOCK_SIZE); - DiskBlock diskBlock = node.getDiskBlock(); - if (diskBlock == null) { - diskBlock = diskStore.getDiskBlock(nodeOffest, BLOCK_SIZE); - } - final byte[] buffer = diskBlock.getBuffer(); - final byte[] newBuffer = node.serialize(); - System.arraycopy(newBuffer, 0, buffer, 0, buffer.length); - diskBlock.writeAsync(); - // diskBlock.force(); // makes writing nodes slower by factor 800 (sic!) - } - - public synchronized void print() { - - visitNodeEntriesPreOrder((node, parentNode, nodeEntry, depth) -> { - - final PrintStream writer = System.out; - - final String children = "#" + node.getEntries().size(); - - writer.println(" ".repeat(depth) + "@" + node.getNodeOffset() + " " + children + " " + nodeEntry - .toString(b -> String.valueOf(keyEncoder.decode(b)), b -> String.valueOf(valueEncoder.decode(b)))); - }); - } - - public synchronized void visitNodeEntriesPreOrder(final VisitorCallback visitor) { - final long rootNodeOffset = readNodeOffsetOfRootNode(); - visitNodeEntriesPreOrderRecursively(rootNodeOffset, null, visitor, 0); - } - - private void visitNodeEntriesPreOrderRecursively(final long nodeOffset, final PersistentMapDiskNode parentNode, - final VisitorCallback visitor, final int depth) { - final PersistentMapDiskNode node = getNode(nodeOffset); - - for (final NodeEntry child : node.getEntries()) { - - visitor.visit(node, parentNode, child, depth); - if (child.isInnerNode()) { - final long childNodeOffset = VariableByteEncoder.decodeFirstValue(child.getValue()); - visitNodeEntriesPreOrderRecursively(childNodeOffset, node, visitor, depth + 1); - } - } - } - - enum VisitByPrefixMode { - FIND, ITERATE - } - - public synchronized void visitValues(final K keyPrefix, final Visitor visitor) { - final byte[] encodedKeyPrefix = keyEncoder.encode(keyPrefix); - - final long rootNodeOffset = readNodeOffsetOfRootNode(); - iterateNodeEntryByPrefix(rootNodeOffset, encodedKeyPrefix, visitor); - } - - private void iterateNodeEntryByPrefix(final long nodeOffest, final byte[] keyPrefix, final Visitor visitor) { - final PersistentMapDiskNode node = getNode(nodeOffest); - - // list of children that might contain a key with the keyPrefix - final List nodesForPrefix = node.getNodesByPrefix(keyPrefix); - - for (final NodeEntry entry : nodesForPrefix) { - - if (entry.isDataNode()) { - final int prefixCompareResult = entry.compareKeyPrefix(keyPrefix); - if (prefixCompareResult == 0) { - - if (Arrays.equals(entry.getKey(), MAX_KEY)) { - continue; - } - final K key = keyEncoder.decode(entry.getKey()); - final V value = valueEncoder.decode(entry.getValue()); - visitor.visit(key, value); - - // System.out.println("--> " + key + "=" + value); - } else if (prefixCompareResult > 0) { - break; - } - } else { - final long childNodeOffset = toNodeOffset(entry); - iterateNodeEntryByPrefix(childNodeOffset, keyPrefix, visitor); - } - } - } - - private long readNodeOffsetOfRootNode() { - final DiskBlock diskBlock = diskStore.getDiskBlock(NODE_OFFSET_TO_ROOT_NODE, diskStore.minAllocationSize()); - - return diskBlock.getByteBuffer().getLong(0); - } - - private void writeNodeOffsetOfRootNode(final long newNodeOffsetToRootNode) { - final DiskBlock diskBlock = diskStore.getDiskBlock(NODE_OFFSET_TO_ROOT_NODE, diskStore.minAllocationSize()); - diskBlock.getByteBuffer().putLong(0, newNodeOffsetToRootNode); - diskBlock.force(); - } + @Override + public Empty decode(final byte[] bytes) { + Preconditions.checkTrue(bytes.length == 0, ""); + + return Empty.INSTANCE; + } + + @Override + public byte[] getEmptyValue() { + return new byte[] {}; + } + } + + public static final EncoderDecoder LONG_CODER = new LongCoder(); + public static final EncoderDecoder UUID_ENCODER = new UUIDCoder(); + public static final EncoderDecoder STRING_CODER = new StringCoder(); + public static final EncoderDecoder EMPTY_ENCODER = new EmptyCoder(); + + static final int BLOCK_SIZE = 4096; + static final long NODE_OFFSET_TO_ROOT_NODE = 8; + + private final DiskStorage diskStore; + + private int maxEntriesInNode = Integer.MAX_VALUE; + + private final EncoderDecoder keyEncoder; + + private final EncoderDecoder valueEncoder; + + private final LRUCache nodeCache = new LRUCache<>(10_000); + + private final LRUCache valueCache = new LRUCache<>(1_000); + + public PersistentMap(final Path path, final Path storageBasePath, final EncoderDecoder keyEncoder, + final EncoderDecoder valueEncoder) { + this.diskStore = new DiskStorage(path, storageBasePath); + this.keyEncoder = keyEncoder; + this.valueEncoder = valueEncoder; + initIfNew(); + } + + @Override + public void close() { + diskStore.close(); + } + + public void setMaxEntriesInNode(final int maxEntriesInNode) { + this.maxEntriesInNode = maxEntriesInNode; + } + + private void initIfNew() { + if (diskStore.size() < BLOCK_SIZE) { + final long nodeOffsetToRootNode = diskStore.allocateBlock(diskStore.minAllocationSize()); + Preconditions.checkEqual(nodeOffsetToRootNode, NODE_OFFSET_TO_ROOT_NODE, + "The offset of the pointer to the root node must be at a well known location. " + + "Otherwise we would not be able to find it in an already existing file."); + + // 2. make sure new blocks are aligned to the block size (for faster disk IO) + diskStore.ensureAlignmentForNewBlocks(BLOCK_SIZE); + + // 3. initialize an empty root node + final long blockOffset = diskStore.allocateBlock(BLOCK_SIZE); + final var rootNode = PersistentMapDiskNode.emptyRootNode(blockOffset); + writeNode(rootNode); + + // 4. update pointer to root node + writeNodeOffsetOfRootNode(blockOffset); + + // 5. insert a dummy entry with a 'maximum' key + putValue(MAX_KEY, valueEncoder.getEmptyValue()); + } + } + + public synchronized void putAllValues(final Map map) { + for (final Entry e : map.entrySet()) { + putValue(e.getKey(), e.getValue()); + } + } + + public synchronized V putValue(final K key, final V value) { + + final V cachedValue = valueCache.get(key); + if (cachedValue != null && cachedValue == value) { + return value; + } + + final byte[] encodedKey = keyEncoder.encode(key); + final byte[] encodedValue = valueEncoder.encode(value); + final byte[] encodedOldValue = putValue(encodedKey, encodedValue); + final V oldValue = encodedOldValue == null ? null : valueEncoder.decode(encodedOldValue); + valueCache.put(key, value); + return oldValue; + } + + public synchronized V getValue(final K key) { + + final V cachedValue = valueCache.get(key); + if (cachedValue != null) { + return cachedValue; + } + + final byte[] encodedKey = keyEncoder.encode(key); + final byte[] foundValue = getValue(encodedKey); + final V result = foundValue == null ? null : valueEncoder.decode(foundValue); + valueCache.put(key, result); + return result; + } + + private byte[] putValue(final byte[] key, final byte[] value) { + final long rootNodeOffset = readNodeOffsetOfRootNode(); + final Stack parents = new Stack<>(); + return insert(parents, rootNodeOffset, key, value); + } + + private byte[] getValue(final byte[] key) { + final long rootNodeOffset = readNodeOffsetOfRootNode(); + final NodeEntry entry = findNodeEntry(rootNodeOffset, key); + + return entry == null ? null : entry.getValue(); + } + + private byte[] insert(final Stack parents, final long nodeOffest, final byte[] key, + final byte[] value) { + final PersistentMapDiskNode node = getNode(nodeOffest); + + final NodeEntry entry = node.getNodeEntryTo(key); + if (entry == null || entry.isDataNode()) { + + final byte[] oldValue; + if (entry == null) { + oldValue = null; + } else { + // found a NodeEntry that is either equal to key, or it is at the insertion + // point + final boolean entryIsForKey = entry.equal(key); + + oldValue = entryIsForKey ? entry.getValue() : null; + + // Early exit, if the oldValue equals the new value. + // We do not have to replace the value, because it would not change anything + // (just cause unnecessary write operations). But we return the oldValue so that + // the caller thinks we replaced the value. + if (Objects.equals(oldValue, value)) { + return oldValue; + } + + if (entryIsForKey) { + node.removeKey(key); + } + } + + if (node.canAdd(key, value, maxEntriesInNode)) { + // insert in existing node + node.addKeyValue(key, value); + writeNode(node); + return oldValue; + } else { + // add new node + // 1. split current node into A and B + splitNode(parents, node); + + // 2. insert the value + // start from the root, because we might have added a new root node + return putValue(key, value); + } + } else { + final long childNodeOffset = toNodeOffset(entry); + parents.add(node); + return insert(parents, childNodeOffset, key, value); + } + } + + private PersistentMapDiskNode splitNode(final Stack parents, + final PersistentMapDiskNode node) { + + // System.out.println("\n\npre split node: " + node + "\n"); + + final long newBlockOffset = diskStore.allocateBlock(BLOCK_SIZE); + + final PersistentMapDiskNode newNode = node.split(newBlockOffset); + + final PersistentMapDiskNode parent = parents.isEmpty() ? null : parents.pop(); + + if (parent != null) { + final byte[] newNodeKey = newNode.getTopNodeEntry().getKey(); + if (parent.canAdd(newNodeKey, newBlockOffset, maxEntriesInNode)) { + parent.addKeyNodePointer(newNodeKey, newBlockOffset); + writeNode(parent); + writeNode(newNode); + writeNode(node); + return parent; + } else { + final PersistentMapDiskNode grandParentNode = splitNode(parents, parent); + + final NodeEntry pointerToParentAfterSplit = grandParentNode.getNodeEntryTo(newNodeKey); + + Preconditions.checkEqual(pointerToParentAfterSplit.isInnerNode(), true, "{0} is pointer to inner node", + pointerToParentAfterSplit); + final long parentNodeOffset = toNodeOffset(pointerToParentAfterSplit); // the parent we have to add the + // newNode to + final PersistentMapDiskNode parentNode = getNode(parentNodeOffset); + parentNode.addKeyNodePointer(newNodeKey, newBlockOffset); + writeNode(parentNode); + writeNode(newNode); + writeNode(node); + return parentNode; + } + + } else { + // has no parent -> create a new parent (the new parent will also be the new + // root) + final long newRootNodeOffset = diskStore.allocateBlock(BLOCK_SIZE); + final PersistentMapDiskNode rootNode = PersistentMapDiskNode.emptyRootNode(newRootNodeOffset); + final byte[] newNodeKey = newNode.getTopNodeEntry().getKey(); + rootNode.addKeyNodePointer(newNodeKey, newBlockOffset); + + final byte[] oldNodeKey = node.getTopNodeEntry().getKey(); + rootNode.addKeyNodePointer(oldNodeKey, node.getNodeOffset()); + writeNode(rootNode); + + writeNode(newNode); + writeNode(node); + + writeNodeOffsetOfRootNode(newRootNodeOffset); + return rootNode; + } + } + + private NodeEntry findNodeEntry(final long nodeOffest, final byte[] key) { + final PersistentMapDiskNode node = getNode(nodeOffest); + + final var entry = node.getNodeEntryTo(key); + if (entry == null) { + return null; + } else if (entry.isDataNode()) { + if (entry.equal(key)) { + return entry; + } else { + return null; + } + } else { + final long childNodeOffset = toNodeOffset(entry); + return findNodeEntry(childNodeOffset, key); + } + } + + private long toNodeOffset(final NodeEntry entry) { + Preconditions.checkEqual(entry.isInnerNode(), true); + return VariableByteEncoder.decodeFirstValue(entry.getValue()); + } + + private PersistentMapDiskNode getNode(final long nodeOffset) { + + PersistentMapDiskNode node = nodeCache.get(nodeOffset); + if (node == null) { + + final DiskBlock diskBlock = diskStore.getDiskBlock(nodeOffset, BLOCK_SIZE); + + node = PersistentMapDiskNode.parse(nodeOffset, diskBlock); + nodeCache.put(nodeOffset, node); + } + + return node; + } + + private void writeNode(final PersistentMapDiskNode node) { + if (LOGGER.isTraceEnabled()) { + LOGGER.trace("writing node {}", node.toString(keyEncoder.asDecoder(), valueEncoder.asDecoder())); + } + final long nodeOffest = node.getNodeOffset(); + // final DiskBlock diskBlock = diskStore.getDiskBlock(nodeOffest, BLOCK_SIZE); + DiskBlock diskBlock = node.getDiskBlock(); + if (diskBlock == null) { + diskBlock = diskStore.getDiskBlock(nodeOffest, BLOCK_SIZE); + } + final byte[] buffer = diskBlock.getBuffer(); + final byte[] newBuffer = node.serialize(); + System.arraycopy(newBuffer, 0, buffer, 0, buffer.length); + diskBlock.writeAsync(); + // diskBlock.force(); // makes writing nodes slower by factor 800 (sic!) + } + + public synchronized void print() { + + visitNodeEntriesPreOrder((node, parentNode, nodeEntry, depth) -> { + + final PrintStream writer = System.out; + + final String children = "#" + node.getEntries().size(); + + writer.println(" ".repeat(depth) + "@" + node.getNodeOffset() + " " + children + " " + nodeEntry + .toString(b -> String.valueOf(keyEncoder.decode(b)), b -> String.valueOf(valueEncoder.decode(b)))); + }); + } + + public synchronized void visitNodeEntriesPreOrder(final VisitorCallback visitor) { + final long rootNodeOffset = readNodeOffsetOfRootNode(); + visitNodeEntriesPreOrderRecursively(rootNodeOffset, null, visitor, 0); + } + + private void visitNodeEntriesPreOrderRecursively(final long nodeOffset, final PersistentMapDiskNode parentNode, + final VisitorCallback visitor, final int depth) { + final PersistentMapDiskNode node = getNode(nodeOffset); + + for (final NodeEntry child : node.getEntries()) { + + visitor.visit(node, parentNode, child, depth); + if (child.isInnerNode()) { + final long childNodeOffset = VariableByteEncoder.decodeFirstValue(child.getValue()); + visitNodeEntriesPreOrderRecursively(childNodeOffset, node, visitor, depth + 1); + } + } + } + + enum VisitByPrefixMode { + FIND, ITERATE + } + + public synchronized void visitValues(final K keyPrefix, final Visitor visitor) { + final byte[] encodedKeyPrefix = keyEncoder.encode(keyPrefix); + + final long rootNodeOffset = readNodeOffsetOfRootNode(); + iterateNodeEntryByPrefix(rootNodeOffset, encodedKeyPrefix, visitor); + } + + private void iterateNodeEntryByPrefix(final long nodeOffest, final byte[] keyPrefix, final Visitor visitor) { + final PersistentMapDiskNode node = getNode(nodeOffest); + + // list of children that might contain a key with the keyPrefix + final List nodesForPrefix = node.getNodesByPrefix(keyPrefix); + + for (final NodeEntry entry : nodesForPrefix) { + + if (entry.isDataNode()) { + final int prefixCompareResult = entry.compareKeyPrefix(keyPrefix); + if (prefixCompareResult == 0) { + + if (Arrays.equals(entry.getKey(), MAX_KEY)) { + continue; + } + final K key = keyEncoder.decode(entry.getKey()); + final V value = valueEncoder.decode(entry.getValue()); + visitor.visit(key, value); + + // System.out.println("--> " + key + "=" + value); + } else if (prefixCompareResult > 0) { + break; + } + } else { + final long childNodeOffset = toNodeOffset(entry); + iterateNodeEntryByPrefix(childNodeOffset, keyPrefix, visitor); + } + } + } + + private long readNodeOffsetOfRootNode() { + final DiskBlock diskBlock = diskStore.getDiskBlock(NODE_OFFSET_TO_ROOT_NODE, diskStore.minAllocationSize()); + + return diskBlock.getByteBuffer().getLong(0); + } + + private void writeNodeOffsetOfRootNode(final long newNodeOffsetToRootNode) { + final DiskBlock diskBlock = diskStore.getDiskBlock(NODE_OFFSET_TO_ROOT_NODE, diskStore.minAllocationSize()); + diskBlock.getByteBuffer().putLong(0, newNodeOffsetToRootNode); + diskBlock.force(); + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/map/PersistentMapDiskNode.java b/block-storage/src/main/java/org/lucares/pdb/map/PersistentMapDiskNode.java index 2bb8e96..7693b55 100644 --- a/block-storage/src/main/java/org/lucares/pdb/map/PersistentMapDiskNode.java +++ b/block-storage/src/main/java/org/lucares/pdb/map/PersistentMapDiskNode.java @@ -42,256 +42,256 @@ import org.lucares.utils.byteencoder.VariableByteEncoder; */ public class PersistentMapDiskNode { - private final TreeMap entries; - private final long nodeOffset; - private final DiskBlock diskBlock; + private final TreeMap entries; + private final long nodeOffset; + private final DiskBlock diskBlock; - public PersistentMapDiskNode(final long nodeOffset, final Collection entries, - final DiskBlock diskBlock) { - this.nodeOffset = nodeOffset; - this.diskBlock = diskBlock; - this.entries = toMap(entries); - } + public PersistentMapDiskNode(final long nodeOffset, final Collection entries, + final DiskBlock diskBlock) { + this.nodeOffset = nodeOffset; + this.diskBlock = diskBlock; + this.entries = toMap(entries); + } - private static TreeMap toMap(final Collection nodeEntries) { - final TreeMap result = new TreeMap<>(); + private static TreeMap toMap(final Collection nodeEntries) { + final TreeMap result = new TreeMap<>(); - for (final NodeEntry nodeEntry : nodeEntries) { - result.put(new ByteArrayKey(nodeEntry.getKey()), nodeEntry); - } + for (final NodeEntry nodeEntry : nodeEntries) { + result.put(new ByteArrayKey(nodeEntry.getKey()), nodeEntry); + } - return result; - } + return result; + } - public static PersistentMapDiskNode emptyRootNode(final long nodeOffset) { - return new PersistentMapDiskNode(nodeOffset, Collections.emptyList(), null); - } + public static PersistentMapDiskNode emptyRootNode(final long nodeOffset) { + return new PersistentMapDiskNode(nodeOffset, Collections.emptyList(), null); + } - public static PersistentMapDiskNode parse(final long nodeOffset, final DiskBlock diskBlock) { - final byte[] data = diskBlock.getBuffer(); - if (data.length != PersistentMap.BLOCK_SIZE) { - throw new IllegalStateException( - "block size must be " + PersistentMap.BLOCK_SIZE + " but was " + data.length); - } - final LongList longs = VariableByteEncoder.decode(data); + public static PersistentMapDiskNode parse(final long nodeOffset, final DiskBlock diskBlock) { + final byte[] data = diskBlock.getBuffer(); + if (data.length != PersistentMap.BLOCK_SIZE) { + throw new IllegalStateException( + "block size must be " + PersistentMap.BLOCK_SIZE + " but was " + data.length); + } + final LongList longs = VariableByteEncoder.decode(data); - final List entries = deserialize(longs, data); - return new PersistentMapDiskNode(nodeOffset, entries, diskBlock); - } + final List entries = deserialize(longs, data); + return new PersistentMapDiskNode(nodeOffset, entries, diskBlock); + } - public static List deserialize(final LongList keyLengths, final byte[] buffer) { - final List entries = new ArrayList<>(); + public static List deserialize(final LongList keyLengths, final byte[] buffer) { + final List entries = new ArrayList<>(); - if (keyLengths.isEmpty() || keyLengths.get(0) == 0) { - // node is empty -> should only happen for the root node - } else { - final int numEntries = (int) keyLengths.get(0); + if (keyLengths.isEmpty() || keyLengths.get(0) == 0) { + // node is empty -> should only happen for the root node + } else { + final int numEntries = (int) keyLengths.get(0); - int offset = PersistentMap.BLOCK_SIZE; - for (int i = 0; i < numEntries; i++) { - final int keyLength = (int) keyLengths.get(i * 2 + 1); - final int valueLength = (int) keyLengths.get(i * 2 + 2); + int offset = PersistentMap.BLOCK_SIZE; + for (int i = 0; i < numEntries; i++) { + final int keyLength = (int) keyLengths.get(i * 2 + 1); + final int valueLength = (int) keyLengths.get(i * 2 + 2); - final int valueOffset = offset - valueLength; - final int keyOffset = valueOffset - keyLength; - final int typeOffset = keyOffset - 1; + final int valueOffset = offset - valueLength; + final int keyOffset = valueOffset - keyLength; + final int typeOffset = keyOffset - 1; - final byte typeByte = buffer[typeOffset]; - final byte[] key = Arrays.copyOfRange(buffer, keyOffset, keyOffset + keyLength); - final byte[] value = Arrays.copyOfRange(buffer, valueOffset, valueOffset + valueLength); + final byte typeByte = buffer[typeOffset]; + final byte[] key = Arrays.copyOfRange(buffer, keyOffset, keyOffset + keyLength); + final byte[] value = Arrays.copyOfRange(buffer, valueOffset, valueOffset + valueLength); - final NodeEntry entry = new NodeEntry(ValueType.fromByte(typeByte), key, value); + final NodeEntry entry = new NodeEntry(ValueType.fromByte(typeByte), key, value); - entries.add(entry); + entries.add(entry); - offset = typeOffset; - } - } - return entries; - } + offset = typeOffset; + } + } + return entries; + } - public byte[] serialize() { + public byte[] serialize() { - return serialize(entries); - } + return serialize(entries); + } - public DiskBlock getDiskBlock() { - return diskBlock; - } + public DiskBlock getDiskBlock() { + return diskBlock; + } - public long getNodeOffset() { - return nodeOffset; - } + public long getNodeOffset() { + return nodeOffset; + } - public NodeEntry getNodeEntryTo(final byte[] key) { + public NodeEntry getNodeEntryTo(final byte[] key) { - final Entry ceilingEntry = entries.ceilingEntry(new ByteArrayKey(key)); - return ceilingEntry != null ? ceilingEntry.getValue() : null; - } + final Entry ceilingEntry = entries.ceilingEntry(new ByteArrayKey(key)); + return ceilingEntry != null ? ceilingEntry.getValue() : null; + } - public List getNodesByPrefix(final byte[] keyPrefix) { - final List result = new ArrayList<>(); + public List getNodesByPrefix(final byte[] keyPrefix) { + final List result = new ArrayList<>(); - for (final NodeEntry nodeEntry : entries.values()) { - final int prefixCompareResult = nodeEntry.compareKeyPrefix(keyPrefix); - if (prefixCompareResult == 0) { - // add all entries where keyPrefix is a prefix of the key - result.add(nodeEntry); - } else if (prefixCompareResult > 0) { - // Only add the first entry where the keyPrefix is smaller (as defined by - // compareKeyPrefix) than the key. - // These are entries that might contain key with the keyPrefix. But only the - // first of those can really have such keys. - result.add(nodeEntry); - break; - } - } + for (final NodeEntry nodeEntry : entries.values()) { + final int prefixCompareResult = nodeEntry.compareKeyPrefix(keyPrefix); + if (prefixCompareResult == 0) { + // add all entries where keyPrefix is a prefix of the key + result.add(nodeEntry); + } else if (prefixCompareResult > 0) { + // Only add the first entry where the keyPrefix is smaller (as defined by + // compareKeyPrefix) than the key. + // These are entries that might contain key with the keyPrefix. But only the + // first of those can really have such keys. + result.add(nodeEntry); + break; + } + } - return result; - } + return result; + } - public void addKeyValue(final byte[] key, final byte[] value) { - addNode(ValueType.VALUE_INLINE, key, value); - } + public void addKeyValue(final byte[] key, final byte[] value) { + addNode(ValueType.VALUE_INLINE, key, value); + } - public void addKeyNodePointer(final byte[] key, final long nodePointer) { - final byte[] value = VariableByteEncoder.encode(nodePointer); - addNode(ValueType.NODE_POINTER, key, value); - } + public void addKeyNodePointer(final byte[] key, final long nodePointer) { + final byte[] value = VariableByteEncoder.encode(nodePointer); + addNode(ValueType.NODE_POINTER, key, value); + } - public void addNode(final ValueType valueType, final byte[] key, final byte[] value) { + public void addNode(final ValueType valueType, final byte[] key, final byte[] value) { - final NodeEntry entry = new NodeEntry(valueType, key, value); - entries.put(new ByteArrayKey(key), entry); - } + final NodeEntry entry = new NodeEntry(valueType, key, value); + entries.put(new ByteArrayKey(key), entry); + } - public boolean canAdd(final byte[] key, final long nodeOffset, final int maxEntriesInNode) { - return canAdd(key, VariableByteEncoder.encode(nodeOffset), maxEntriesInNode); - } + public boolean canAdd(final byte[] key, final long nodeOffset, final int maxEntriesInNode) { + return canAdd(key, VariableByteEncoder.encode(nodeOffset), maxEntriesInNode); + } - public boolean canAdd(final byte[] key, final byte[] value, final int maxEntriesInNode) { + public boolean canAdd(final byte[] key, final byte[] value, final int maxEntriesInNode) { - if (entries.size() > maxEntriesInNode) { - return false; - } else { - final NodeEntry entry = new NodeEntry(ValueType.VALUE_INLINE, key, value); - final List tmp = new ArrayList<>(entries.size() + 1); - tmp.addAll(entries.values()); - tmp.add(entry); + if (entries.size() > maxEntriesInNode) { + return false; + } else { + final NodeEntry entry = new NodeEntry(ValueType.VALUE_INLINE, key, value); + final List tmp = new ArrayList<>(entries.size() + 1); + tmp.addAll(entries.values()); + tmp.add(entry); - // the +1 is for the null-byte terminator of the prefix - return neededBytesTotal(tmp) + 1 <= PersistentMap.BLOCK_SIZE; - } - } + // the +1 is for the null-byte terminator of the prefix + return neededBytesTotal(tmp) + 1 <= PersistentMap.BLOCK_SIZE; + } + } - public void removeKey(final byte[] key) { - entries.remove(new ByteArrayKey(key)); - } + public void removeKey(final byte[] key) { + entries.remove(new ByteArrayKey(key)); + } - public List getEntries() { - return new ArrayList<>(entries.values()); - } + public List getEntries() { + return new ArrayList<>(entries.values()); + } - public void clear() { - entries.clear(); - } + public void clear() { + entries.clear(); + } - @Override - public String toString() { - return "@" + nodeOffset + ": " - + String.join("\n", entries.values().stream().map(NodeEntry::toString).collect(Collectors.toList())); - } - - public String toString(Function keyDecoder, Function valueDecoder) { - StringBuilder result = new StringBuilder(); - result.append("@"); - result.append(nodeOffset); - result.append(": "); - for (NodeEntry e : entries.values()) { - String s = e.toString(keyDecoder, valueDecoder); - result.append("\n"); - result.append(s); - } - - return result.toString(); - } + @Override + public String toString() { + return "@" + nodeOffset + ": " + + String.join("\n", entries.values().stream().map(NodeEntry::toString).collect(Collectors.toList())); + } - public NodeEntry getTopNodeEntry() { - return entries.lastEntry().getValue(); - } + public String toString(Function keyDecoder, Function valueDecoder) { + StringBuilder result = new StringBuilder(); + result.append("@"); + result.append(nodeOffset); + result.append(": "); + for (NodeEntry e : entries.values()) { + String s = e.toString(keyDecoder, valueDecoder); + result.append("\n"); + result.append(s); + } - public PersistentMapDiskNode split(final long newBlockOffset) { + return result.toString(); + } - final List entriesAsCollection = new ArrayList<>(entries.values()); + public NodeEntry getTopNodeEntry() { + return entries.lastEntry().getValue(); + } - final var leftEntries = new ArrayList<>(entriesAsCollection.subList(0, entriesAsCollection.size() / 2)); - final var rightEntries = new ArrayList<>( - entriesAsCollection.subList(entriesAsCollection.size() / 2, entriesAsCollection.size())); + public PersistentMapDiskNode split(final long newBlockOffset) { - entries.clear(); - entries.putAll(toMap(rightEntries)); + final List entriesAsCollection = new ArrayList<>(entries.values()); - return new PersistentMapDiskNode(newBlockOffset, leftEntries, null); - } + final var leftEntries = new ArrayList<>(entriesAsCollection.subList(0, entriesAsCollection.size() / 2)); + final var rightEntries = new ArrayList<>( + entriesAsCollection.subList(entriesAsCollection.size() / 2, entriesAsCollection.size())); - public static int neededBytesTotal(final List entries) { - final byte[] buffer = new byte[PersistentMap.BLOCK_SIZE]; + entries.clear(); + entries.putAll(toMap(rightEntries)); - final int usedBytes = serializePrefix(entries, buffer); + return new PersistentMapDiskNode(newBlockOffset, leftEntries, null); + } - return usedBytes + NodeEntry.neededBytes(entries); - } + public static int neededBytesTotal(final List entries) { + final byte[] buffer = new byte[PersistentMap.BLOCK_SIZE]; - private static byte[] serialize(final Map entries) { - final byte[] buffer = new byte[PersistentMap.BLOCK_SIZE]; - final Collection entriesAsCollection = entries.values(); - final int usedBytes = serializePrefix(entriesAsCollection, buffer); + final int usedBytes = serializePrefix(entries, buffer); - // the +1 is for the null-byte terminator of the prefix - Preconditions.checkGreaterOrEqual(PersistentMap.BLOCK_SIZE, - usedBytes + 1 + NodeEntry.neededBytes(entriesAsCollection), - "The node is too big. It cannot be encoded into " + PersistentMap.BLOCK_SIZE + " bytes."); + return usedBytes + NodeEntry.neededBytes(entries); + } - serializeIntoFromTail(entriesAsCollection, buffer); - return buffer; - } + private static byte[] serialize(final Map entries) { + final byte[] buffer = new byte[PersistentMap.BLOCK_SIZE]; + final Collection entriesAsCollection = entries.values(); + final int usedBytes = serializePrefix(entriesAsCollection, buffer); - private static int serializePrefix(final Collection entries, final byte[] buffer) { - final LongList longs = serializeKeyLengths(entries); + // the +1 is for the null-byte terminator of the prefix + Preconditions.checkGreaterOrEqual(PersistentMap.BLOCK_SIZE, + usedBytes + 1 + NodeEntry.neededBytes(entriesAsCollection), + "The node is too big. It cannot be encoded into " + PersistentMap.BLOCK_SIZE + " bytes."); - final int usedBytes = VariableByteEncoder.encodeInto(longs, buffer, 0); - return usedBytes; - } + serializeIntoFromTail(entriesAsCollection, buffer); + return buffer; + } - private static LongList serializeKeyLengths(final Collection entries) { - final var keyLengths = new LongList(); - keyLengths.add(entries.size()); - for (final NodeEntry nodeEntry : entries) { - keyLengths.add(nodeEntry.getKey().length); - keyLengths.add(nodeEntry.getValue().length); - } + private static int serializePrefix(final Collection entries, final byte[] buffer) { + final LongList longs = serializeKeyLengths(entries); - return keyLengths; - } + final int usedBytes = VariableByteEncoder.encodeInto(longs, buffer, 0); + return usedBytes; + } - private static void serializeIntoFromTail(final Collection entries, final byte[] buffer) { + private static LongList serializeKeyLengths(final Collection entries) { + final var keyLengths = new LongList(); + keyLengths.add(entries.size()); + for (final NodeEntry nodeEntry : entries) { + keyLengths.add(nodeEntry.getKey().length); + keyLengths.add(nodeEntry.getValue().length); + } - int offset = buffer.length; + return keyLengths; + } - for (final var entry : entries) { - final byte[] valueBytes = entry.getValue(); - final byte[] keyBytes = entry.getKey(); + private static void serializeIntoFromTail(final Collection entries, final byte[] buffer) { - final int offsetValue = offset - valueBytes.length; - final int offsetKey = offsetValue - keyBytes.length; - final int offsetType = offsetKey - 1; + int offset = buffer.length; - System.arraycopy(valueBytes, 0, buffer, offsetValue, valueBytes.length); - System.arraycopy(keyBytes, 0, buffer, offsetKey, keyBytes.length); - buffer[offsetType] = entry.getType().asByte(); + for (final var entry : entries) { + final byte[] valueBytes = entry.getValue(); + final byte[] keyBytes = entry.getKey(); - offset = offsetType; - } - } + final int offsetValue = offset - valueBytes.length; + final int offsetKey = offsetValue - keyBytes.length; + final int offsetType = offsetKey - 1; + + System.arraycopy(valueBytes, 0, buffer, offsetValue, valueBytes.length); + System.arraycopy(keyBytes, 0, buffer, offsetKey, keyBytes.length); + buffer[offsetType] = entry.getType().asByte(); + + offset = offsetType; + } + } } diff --git a/block-storage/src/main/java/org/lucares/pdb/map/Visitor.java b/block-storage/src/main/java/org/lucares/pdb/map/Visitor.java index cd418c1..b659feb 100644 --- a/block-storage/src/main/java/org/lucares/pdb/map/Visitor.java +++ b/block-storage/src/main/java/org/lucares/pdb/map/Visitor.java @@ -1,5 +1,5 @@ package org.lucares.pdb.map; public interface Visitor { - void visit(K key, V value); + void visit(K key, V value); } \ No newline at end of file diff --git a/block-storage/src/test/java/org/lucares/pdb/blockstorage/BSFileTest.java b/block-storage/src/test/java/org/lucares/pdb/blockstorage/BSFileTest.java index c247084..83a678c 100644 --- a/block-storage/src/test/java/org/lucares/pdb/blockstorage/BSFileTest.java +++ b/block-storage/src/test/java/org/lucares/pdb/blockstorage/BSFileTest.java @@ -25,110 +25,110 @@ import org.testng.annotations.Test; @Test public class BSFileTest { - private Path dataDirectory; + private Path dataDirectory; - @BeforeMethod - public void beforeMethod() throws IOException { - dataDirectory = Files.createTempDirectory("pdb"); - } + @BeforeMethod + public void beforeMethod() throws IOException { + dataDirectory = Files.createTempDirectory("pdb"); + } - @AfterMethod - public void afterMethod() throws IOException { - FileUtils.delete(dataDirectory); - } + @AfterMethod + public void afterMethod() throws IOException { + FileUtils.delete(dataDirectory); + } - public void testBlockStorage() throws Exception { - final Path file = dataDirectory.resolve("data.int.db"); - final int numLongs = 1000; - long blockOffset = -1; + public void testBlockStorage() throws Exception { + final Path file = dataDirectory.resolve("data.int.db"); + final int numLongs = 1000; + long blockOffset = -1; - long start = System.nanoTime(); - - try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) { + long start = System.nanoTime(); - try (final BSFile bsFile = BSFile.newFile(ds, NullCustomizer.INSTANCE)) { + try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) { - blockOffset = bsFile.getRootBlockOffset(); + try (final BSFile bsFile = BSFile.newFile(ds, NullCustomizer.INSTANCE)) { - for (long i = 0; i < numLongs / 2; i++) { - bsFile.append(i); - } - } - try (final BSFile bsFile = BSFile.existingFile(blockOffset, ds, NullCustomizer.INSTANCE)) { + blockOffset = bsFile.getRootBlockOffset(); - for (long i = numLongs / 2; i < numLongs; i++) { - bsFile.append(i); - } - } - } - System.out.println("duration write: " + (System.nanoTime() - start) / 1_000_000.0 + "ms"); + for (long i = 0; i < numLongs / 2; i++) { + bsFile.append(i); + } + } + try (final BSFile bsFile = BSFile.existingFile(blockOffset, ds, NullCustomizer.INSTANCE)) { - start = System.nanoTime(); - try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) { - final BSFile bsFile = BSFile.existingFile(blockOffset, ds, NullCustomizer.INSTANCE); - final LongList actualLongs = bsFile.asLongList(); - final LongList expectedLongs = LongList.rangeClosed(0, numLongs - 1); - Assert.assertEquals(actualLongs, expectedLongs); - } - System.out.println("duration read: " + (System.nanoTime() - start) / 1_000_000.0 + "ms"); - } + for (long i = numLongs / 2; i < numLongs; i++) { + bsFile.append(i); + } + } + } + System.out.println("duration write: " + (System.nanoTime() - start) / 1_000_000.0 + "ms"); - public void testBlockStorageMultithreading() throws Exception { - final ExecutorService pool = Executors.newCachedThreadPool(); + start = System.nanoTime(); + try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) { + final BSFile bsFile = BSFile.existingFile(blockOffset, ds, NullCustomizer.INSTANCE); + final LongList actualLongs = bsFile.asLongList(); + final LongList expectedLongs = LongList.rangeClosed(0, numLongs - 1); + Assert.assertEquals(actualLongs, expectedLongs); + } + System.out.println("duration read: " + (System.nanoTime() - start) / 1_000_000.0 + "ms"); + } - final Path file = dataDirectory.resolve("data.int.db"); + public void testBlockStorageMultithreading() throws Exception { + final ExecutorService pool = Executors.newCachedThreadPool(); - final int threads = 50; - final int values = 10000; - final Map expected = new HashMap<>(); - final List> futures = new ArrayList<>(); - final long start = System.nanoTime(); - try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) { + final Path file = dataDirectory.resolve("data.int.db"); - for (int i = 0; i < threads; i++) { - final Future future = pool.submit(() -> { - final ThreadLocalRandom random = ThreadLocalRandom.current(); - final LongList listOfValues = new LongList(); + final int threads = 50; + final int values = 10000; + final Map expected = new HashMap<>(); + final List> futures = new ArrayList<>(); + final long start = System.nanoTime(); + try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) { - try (BSFile bsFile = BSFile.newFile(ds, NullCustomizer.INSTANCE)) { + for (int i = 0; i < threads; i++) { + final Future future = pool.submit(() -> { + final ThreadLocalRandom random = ThreadLocalRandom.current(); + final LongList listOfValues = new LongList(); - for (int j = 0; j < values; j++) { + try (BSFile bsFile = BSFile.newFile(ds, NullCustomizer.INSTANCE)) { - // will produce 1,2 and 3 byte sequences when encoded - final long value = random.nextLong(32768); - listOfValues.add(value); - bsFile.append(value); - } - expected.put(bsFile.getRootBlockOffset(), listOfValues); - } + for (int j = 0; j < values; j++) { - return null; - }); - futures.add(future); - } + // will produce 1,2 and 3 byte sequences when encoded + final long value = random.nextLong(32768); + listOfValues.add(value); + bsFile.append(value); + } + expected.put(bsFile.getRootBlockOffset(), listOfValues); + } - for (final Future future : futures) { - future.get(); - } + return null; + }); + futures.add(future); + } - pool.shutdown(); - pool.awaitTermination(5, TimeUnit.MINUTES); - } - System.out.println("duration write: " + (System.nanoTime() - start) / 1_000_000.0 + "ms"); + for (final Future future : futures) { + future.get(); + } - // verification - try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) { - for (final Entry entry : expected.entrySet()) { - final long rootBlockNumber = entry.getKey(); - final LongList expectedValues = entry.getValue(); + pool.shutdown(); + pool.awaitTermination(5, TimeUnit.MINUTES); + } + System.out.println("duration write: " + (System.nanoTime() - start) / 1_000_000.0 + "ms"); - try (BSFile bsFile = BSFile.existingFile(rootBlockNumber, ds, NullCustomizer.INSTANCE)) { - final LongList actualLongs = bsFile.asLongList(); - final LongList expectedLongs = expectedValues; - Assert.assertEquals(actualLongs, expectedLongs, "for rootBlockNumber=" + rootBlockNumber); - } - } - } - } + // verification + try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) { + for (final Entry entry : expected.entrySet()) { + final long rootBlockNumber = entry.getKey(); + final LongList expectedValues = entry.getValue(); + + try (BSFile bsFile = BSFile.existingFile(rootBlockNumber, ds, NullCustomizer.INSTANCE)) { + final LongList actualLongs = bsFile.asLongList(); + final LongList expectedLongs = expectedValues; + Assert.assertEquals(actualLongs, expectedLongs, "for rootBlockNumber=" + rootBlockNumber); + } + } + } + } } diff --git a/block-storage/src/test/java/org/lucares/pdb/blockstorage/TimeSeriesFileTest.java b/block-storage/src/test/java/org/lucares/pdb/blockstorage/TimeSeriesFileTest.java index 1ace4d0..5af1aa2 100644 --- a/block-storage/src/test/java/org/lucares/pdb/blockstorage/TimeSeriesFileTest.java +++ b/block-storage/src/test/java/org/lucares/pdb/blockstorage/TimeSeriesFileTest.java @@ -15,70 +15,70 @@ import org.testng.annotations.BeforeMethod; public class TimeSeriesFileTest { - private Path dataDirectory; + private Path dataDirectory; - @BeforeMethod - public void beforeMethod() throws IOException { - dataDirectory = Files.createTempDirectory("pdb"); - } + @BeforeMethod + public void beforeMethod() throws IOException { + dataDirectory = Files.createTempDirectory("pdb"); + } - @AfterMethod - public void afterMethod() throws IOException { - FileUtils.delete(dataDirectory); - } + @AfterMethod + public void afterMethod() throws IOException { + FileUtils.delete(dataDirectory); + } - public void testBlockStorageTimeValue() throws Exception { - final Path file = dataDirectory.resolve("data.int.db"); - final Random random = ThreadLocalRandom.current(); - final int numTimeValuePairs = 1000; - long blockNumber = -1; - final LongList expectedLongs = new LongList(); + public void testBlockStorageTimeValue() throws Exception { + final Path file = dataDirectory.resolve("data.int.db"); + final Random random = ThreadLocalRandom.current(); + final int numTimeValuePairs = 1000; + long blockNumber = -1; + final LongList expectedLongs = new LongList(); - long start = System.nanoTime(); - long lastEpochMilli = 0; - // - try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) { + long start = System.nanoTime(); + long lastEpochMilli = 0; + // + try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) { - try (final TimeSeriesFile bsFile = TimeSeriesFile.newFile(ds)) { + try (final TimeSeriesFile bsFile = TimeSeriesFile.newFile(ds)) { - blockNumber = bsFile.getRootBlockOffset(); + blockNumber = bsFile.getRootBlockOffset(); - for (long i = 0; i < numTimeValuePairs / 2; i++) { + for (long i = 0; i < numTimeValuePairs / 2; i++) { - final long epochMilli = lastEpochMilli + random.nextInt(1000); - final long value = random.nextInt(10000); + final long epochMilli = lastEpochMilli + random.nextInt(1000); + final long value = random.nextInt(10000); - lastEpochMilli = epochMilli; + lastEpochMilli = epochMilli; - bsFile.appendTimeValue(epochMilli, value); - expectedLongs.add(epochMilli); - expectedLongs.add(value); - } - } - try (final TimeSeriesFile bsFile = TimeSeriesFile.existingFile(blockNumber, ds)) { + bsFile.appendTimeValue(epochMilli, value); + expectedLongs.add(epochMilli); + expectedLongs.add(value); + } + } + try (final TimeSeriesFile bsFile = TimeSeriesFile.existingFile(blockNumber, ds)) { - for (long i = numTimeValuePairs / 2; i < numTimeValuePairs; i++) { - final long epochMilli = lastEpochMilli + random.nextInt(100); - final long value = random.nextInt(10000); + for (long i = numTimeValuePairs / 2; i < numTimeValuePairs; i++) { + final long epochMilli = lastEpochMilli + random.nextInt(100); + final long value = random.nextInt(10000); - lastEpochMilli = epochMilli; + lastEpochMilli = epochMilli; - bsFile.appendTimeValue(epochMilli, value); - expectedLongs.add(epochMilli); - expectedLongs.add(value); - } - } - } - System.out.println("duration write: " + (System.nanoTime() - start) / 1_000_000.0 + "ms"); + bsFile.appendTimeValue(epochMilli, value); + expectedLongs.add(epochMilli); + expectedLongs.add(value); + } + } + } + System.out.println("duration write: " + (System.nanoTime() - start) / 1_000_000.0 + "ms"); - start = System.nanoTime(); - try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) { - final TimeSeriesFile bsFile = TimeSeriesFile.existingFile(blockNumber, ds); - final LongList actualLongs = bsFile.asTimeValueLongList(); + start = System.nanoTime(); + try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) { + final TimeSeriesFile bsFile = TimeSeriesFile.existingFile(blockNumber, ds); + final LongList actualLongs = bsFile.asTimeValueLongList(); - Assert.assertEquals(actualLongs, expectedLongs); - } - System.out.println("duration read: " + (System.nanoTime() - start) / 1_000_000.0 + "ms"); - } + Assert.assertEquals(actualLongs, expectedLongs); + } + System.out.println("duration read: " + (System.nanoTime() - start) / 1_000_000.0 + "ms"); + } } diff --git a/block-storage/src/test/java/org/lucares/pdb/diskstorage/DiskStorageTest.java b/block-storage/src/test/java/org/lucares/pdb/diskstorage/DiskStorageTest.java index d6ec734..0ca00fe 100644 --- a/block-storage/src/test/java/org/lucares/pdb/diskstorage/DiskStorageTest.java +++ b/block-storage/src/test/java/org/lucares/pdb/diskstorage/DiskStorageTest.java @@ -18,289 +18,289 @@ import org.testng.annotations.Test; @Test public class DiskStorageTest { - private static final int BLOCK_SIZE = 512; + private static final int BLOCK_SIZE = 512; - private Path dataDirectory; + private Path dataDirectory; - @BeforeMethod - public void beforeMethod() throws IOException { - dataDirectory = Files.createTempDirectory("pdb"); - } + @BeforeMethod + public void beforeMethod() throws IOException { + dataDirectory = Files.createTempDirectory("pdb"); + } - @AfterMethod - public void afterMethod() throws IOException { - FileUtils.delete(dataDirectory); - } + @AfterMethod + public void afterMethod() throws IOException { + FileUtils.delete(dataDirectory); + } - /** - * File systems work with 4096 byte blocks, but we want to work with 512 bytes - * per block. Does flushing a 512 byte block flush the full 4096 byte block? - * - * @throws Exception - */ - @Test(enabled = false) - public void testFlushingASectorOrABlock() throws Exception { - final Path databaseFile = dataDirectory.resolve("db.ds"); - Files.deleteIfExists(databaseFile); + /** + * File systems work with 4096 byte blocks, but we want to work with 512 bytes + * per block. Does flushing a 512 byte block flush the full 4096 byte block? + * + * @throws Exception + */ + @Test(enabled = false) + public void testFlushingASectorOrABlock() throws Exception { + final Path databaseFile = dataDirectory.resolve("db.ds"); + Files.deleteIfExists(databaseFile); - try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) { - final int numBlocks = 10; + try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) { + final int numBlocks = 10; - allocateBlocks(ds, numBlocks, BLOCK_SIZE); - final List blocks = new ArrayList<>(); + allocateBlocks(ds, numBlocks, BLOCK_SIZE); + final List blocks = new ArrayList<>(); - // fill the first 16 512-byte blocks - // that is more than on 4096 byte block - for (int i = 0; i < numBlocks; i++) { - final DiskBlock diskBlock = ds.getDiskBlock(i, BLOCK_SIZE); - assertAllValuesAreEqual(diskBlock); - fill(diskBlock, (byte) i); - diskBlock.writeAsync(); - blocks.add(diskBlock); - } + // fill the first 16 512-byte blocks + // that is more than on 4096 byte block + for (int i = 0; i < numBlocks; i++) { + final DiskBlock diskBlock = ds.getDiskBlock(i, BLOCK_SIZE); + assertAllValuesAreEqual(diskBlock); + fill(diskBlock, (byte) i); + diskBlock.writeAsync(); + blocks.add(diskBlock); + } - // now force (aka flush) a block in the middle of the first 4096 byte block - blocks.get(3).writeAsync(); - blocks.get(3).force(); + // now force (aka flush) a block in the middle of the first 4096 byte block + blocks.get(3).writeAsync(); + blocks.get(3).force(); - System.exit(0); + System.exit(0); - // read all blocks again an check what they contain + // read all blocks again an check what they contain - // 1. we do this with the existing file channel - // this one should see every change, because we wrote them to the file channel - for (int i = 0; i < numBlocks; i++) { - final DiskBlock diskBlock = ds.getDiskBlock(i, BLOCK_SIZE); - assertAllValuesAreEqual(diskBlock, (byte) i); - fill(diskBlock, (byte) i); - blocks.add(diskBlock); - } + // 1. we do this with the existing file channel + // this one should see every change, because we wrote them to the file channel + for (int i = 0; i < numBlocks; i++) { + final DiskBlock diskBlock = ds.getDiskBlock(i, BLOCK_SIZE); + assertAllValuesAreEqual(diskBlock, (byte) i); + fill(diskBlock, (byte) i); + blocks.add(diskBlock); + } - // 2. we read the file from another file channel - // this one might not see changes made to the first file channel - // - // But it does see the changes. Most likely, because both channels - // use the same buffers from the operating system. - try (DiskStorage ds2 = new DiskStorage(databaseFile, dataDirectory)) { - for (int i = 0; i < numBlocks; i++) { - final DiskBlock diskBlock = ds2.getDiskBlock(i, BLOCK_SIZE); - assertAllValuesAreEqual(diskBlock, (byte) i); - fill(diskBlock, (byte) i); - blocks.add(diskBlock); - } - } - } - } + // 2. we read the file from another file channel + // this one might not see changes made to the first file channel + // + // But it does see the changes. Most likely, because both channels + // use the same buffers from the operating system. + try (DiskStorage ds2 = new DiskStorage(databaseFile, dataDirectory)) { + for (int i = 0; i < numBlocks; i++) { + final DiskBlock diskBlock = ds2.getDiskBlock(i, BLOCK_SIZE); + assertAllValuesAreEqual(diskBlock, (byte) i); + fill(diskBlock, (byte) i); + blocks.add(diskBlock); + } + } + } + } - @Test(enabled = true) - public void testDiskStorage() throws Exception { - final Path databaseFile = dataDirectory.resolve("db.ds"); + @Test(enabled = true) + public void testDiskStorage() throws Exception { + final Path databaseFile = dataDirectory.resolve("db.ds"); - final ExecutorService pool = Executors.newCachedThreadPool(); + final ExecutorService pool = Executors.newCachedThreadPool(); - try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) { - final int numBlocks = 10; + try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) { + final int numBlocks = 10; - final long[] blockOffsets = allocateBlocks(ds, numBlocks, BLOCK_SIZE); + final long[] blockOffsets = allocateBlocks(ds, numBlocks, BLOCK_SIZE); - for (final long blockOffset : blockOffsets) { + for (final long blockOffset : blockOffsets) { - final long block = blockOffset; - pool.submit(() -> { - final ThreadLocalRandom random = ThreadLocalRandom.current(); - try { - // now read/write random blocks - for (int j = 0; j < 10; j++) { - final DiskBlock diskBlock = ds.getDiskBlock(block, BLOCK_SIZE); + final long block = blockOffset; + pool.submit(() -> { + final ThreadLocalRandom random = ThreadLocalRandom.current(); + try { + // now read/write random blocks + for (int j = 0; j < 10; j++) { + final DiskBlock diskBlock = ds.getDiskBlock(block, BLOCK_SIZE); - assertAllValuesAreEqual(diskBlock); - fill(diskBlock, (byte) random.nextInt(127)); + assertAllValuesAreEqual(diskBlock); + fill(diskBlock, (byte) random.nextInt(127)); - if (random.nextBoolean()) { - diskBlock.writeAsync(); - } else { - diskBlock.writeAsync(); - diskBlock.force(); - } - } + if (random.nextBoolean()) { + diskBlock.writeAsync(); + } else { + diskBlock.writeAsync(); + diskBlock.force(); + } + } - } catch (final Exception e) { - e.printStackTrace(); - throw new RuntimeException(e); - } - }); - } + } catch (final Exception e) { + e.printStackTrace(); + throw new RuntimeException(e); + } + }); + } - pool.shutdown(); - pool.awaitTermination(1, TimeUnit.MINUTES); - } - } + pool.shutdown(); + pool.awaitTermination(1, TimeUnit.MINUTES); + } + } - @Test(enabled = true, expectedExceptions = IllegalArgumentException.class) - public void testAllocationSmallerThanMinimalBlockSize() throws Exception { - final Path databaseFile = dataDirectory.resolve("db.ds"); + @Test(enabled = true, expectedExceptions = IllegalArgumentException.class) + public void testAllocationSmallerThanMinimalBlockSize() throws Exception { + final Path databaseFile = dataDirectory.resolve("db.ds"); - try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) { + try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) { - final int blockSize = 31; // minimal block size is 32 - ds.allocateBlock(blockSize); - } - } + final int blockSize = 31; // minimal block size is 32 + ds.allocateBlock(blockSize); + } + } - @Test(enabled = true) - public void testAllocateAndFreeSingleBlockInFreeList() throws Exception { - final Path databaseFile = dataDirectory.resolve("db.ds"); + @Test(enabled = true) + public void testAllocateAndFreeSingleBlockInFreeList() throws Exception { + final Path databaseFile = dataDirectory.resolve("db.ds"); - try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) { + try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) { - final int blockSize = 32; - final long block_8_39 = ds.allocateBlock(blockSize); - final long block_40_71 = ds.allocateBlock(blockSize); - final long block_72_103 = ds.allocateBlock(blockSize); + final int blockSize = 32; + final long block_8_39 = ds.allocateBlock(blockSize); + final long block_40_71 = ds.allocateBlock(blockSize); + final long block_72_103 = ds.allocateBlock(blockSize); - Assert.assertEquals(block_8_39, 8); - Assert.assertEquals(block_40_71, 40); - Assert.assertEquals(block_72_103, 72); + Assert.assertEquals(block_8_39, 8); + Assert.assertEquals(block_40_71, 40); + Assert.assertEquals(block_72_103, 72); - ds.free(block_40_71, blockSize); + ds.free(block_40_71, blockSize); - // should reuse the block we just freed - final long actual_block_40_71 = ds.allocateBlock(blockSize); + // should reuse the block we just freed + final long actual_block_40_71 = ds.allocateBlock(blockSize); - Assert.assertEquals(actual_block_40_71, 40); - } - } + Assert.assertEquals(actual_block_40_71, 40); + } + } - @Test(enabled = true) - public void testAllocateAndFreeMultipleBlocksInFreeList() throws Exception { - final Path databaseFile = dataDirectory.resolve("db.ds"); + @Test(enabled = true) + public void testAllocateAndFreeMultipleBlocksInFreeList() throws Exception { + final Path databaseFile = dataDirectory.resolve("db.ds"); - try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) { + try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) { - final int blockSize = 32; - ds.allocateBlock(blockSize); - final long block_40_71 = ds.allocateBlock(blockSize); - final long block_72_103 = ds.allocateBlock(blockSize); - final long block_104_135 = ds.allocateBlock(blockSize); - ds.allocateBlock(blockSize); + final int blockSize = 32; + ds.allocateBlock(blockSize); + final long block_40_71 = ds.allocateBlock(blockSize); + final long block_72_103 = ds.allocateBlock(blockSize); + final long block_104_135 = ds.allocateBlock(blockSize); + ds.allocateBlock(blockSize); - ds.free(block_72_103, blockSize); - ds.free(block_104_135, blockSize); - ds.free(block_40_71, blockSize); // the block with the smaller index is freed last, this increases line - // coverage, because there is a branch for prepending the root node + ds.free(block_72_103, blockSize); + ds.free(block_104_135, blockSize); + ds.free(block_40_71, blockSize); // the block with the smaller index is freed last, this increases line + // coverage, because there is a branch for prepending the root node - // should reuse the first block we just freed - // this removes the root node of the free list - final long actual_block_40_71 = ds.allocateBlock(blockSize); - Assert.assertEquals(actual_block_40_71, 40); + // should reuse the first block we just freed + // this removes the root node of the free list + final long actual_block_40_71 = ds.allocateBlock(blockSize); + Assert.assertEquals(actual_block_40_71, 40); - // should reuse the second block we just freed - final long actual_block_72_103 = ds.allocateBlock(blockSize); - Assert.assertEquals(actual_block_72_103, 72); + // should reuse the second block we just freed + final long actual_block_72_103 = ds.allocateBlock(blockSize); + Assert.assertEquals(actual_block_72_103, 72); - // should reuse the third block we just freed - // this removes the last node of the free list - final long actual_block_104_135 = ds.allocateBlock(blockSize); - Assert.assertEquals(actual_block_104_135, 104); + // should reuse the third block we just freed + // this removes the last node of the free list + final long actual_block_104_135 = ds.allocateBlock(blockSize); + Assert.assertEquals(actual_block_104_135, 104); - final long block_168_199 = ds.allocateBlock(blockSize); - Assert.assertEquals(block_168_199, 168); - } - } + final long block_168_199 = ds.allocateBlock(blockSize); + Assert.assertEquals(block_168_199, 168); + } + } - @Test(enabled = true) - public void testAllocateAndFreeInsertFreeNodeInTheMiddleOfTheFreeList() throws Exception { - final Path databaseFile = dataDirectory.resolve("db.ds"); + @Test(enabled = true) + public void testAllocateAndFreeInsertFreeNodeInTheMiddleOfTheFreeList() throws Exception { + final Path databaseFile = dataDirectory.resolve("db.ds"); - try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) { + try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) { - final int blockSize = 32; - ds.allocateBlock(blockSize); - ds.allocateBlock(blockSize); - final long block_72_103 = ds.allocateBlock(blockSize); - final long block_104_135 = ds.allocateBlock(blockSize); - final long block_136_167 = ds.allocateBlock(blockSize); + final int blockSize = 32; + ds.allocateBlock(blockSize); + ds.allocateBlock(blockSize); + final long block_72_103 = ds.allocateBlock(blockSize); + final long block_104_135 = ds.allocateBlock(blockSize); + final long block_136_167 = ds.allocateBlock(blockSize); - // free the last block first, to increase code coverage - ds.free(block_136_167, blockSize); - ds.free(block_72_103, blockSize); - ds.free(block_104_135, blockSize); + // free the last block first, to increase code coverage + ds.free(block_136_167, blockSize); + ds.free(block_72_103, blockSize); + ds.free(block_104_135, blockSize); - // the first free block is re-used - final long actual_block_72_103 = ds.allocateBlock(blockSize); - Assert.assertEquals(actual_block_72_103, block_72_103); + // the first free block is re-used + final long actual_block_72_103 = ds.allocateBlock(blockSize); + Assert.assertEquals(actual_block_72_103, block_72_103); - final long actual_block_104_135 = ds.allocateBlock(blockSize); - Assert.assertEquals(actual_block_104_135, block_104_135); + final long actual_block_104_135 = ds.allocateBlock(blockSize); + Assert.assertEquals(actual_block_104_135, block_104_135); - final long actual_block_136_167 = ds.allocateBlock(blockSize); - Assert.assertEquals(actual_block_136_167, block_136_167); - } - } + final long actual_block_136_167 = ds.allocateBlock(blockSize); + Assert.assertEquals(actual_block_136_167, block_136_167); + } + } - @Test(enabled = true) - public void testAllocateAndFreeMultipleBlocksWithDifferentSizes() throws Exception { - final Path databaseFile = dataDirectory.resolve("db.ds"); + @Test(enabled = true) + public void testAllocateAndFreeMultipleBlocksWithDifferentSizes() throws Exception { + final Path databaseFile = dataDirectory.resolve("db.ds"); - try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) { + try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) { - final int blockSizeSmall = 32; - final int blockSizeBig = 64; - ds.allocateBlock(blockSizeSmall); - ds.allocateBlock(blockSizeSmall); - final long big_block_72_103 = ds.allocateBlock(blockSizeBig); - final long small_block_136_167 = ds.allocateBlock(blockSizeSmall); - ds.allocateBlock(blockSizeSmall); + final int blockSizeSmall = 32; + final int blockSizeBig = 64; + ds.allocateBlock(blockSizeSmall); + ds.allocateBlock(blockSizeSmall); + final long big_block_72_103 = ds.allocateBlock(blockSizeBig); + final long small_block_136_167 = ds.allocateBlock(blockSizeSmall); + ds.allocateBlock(blockSizeSmall); - ds.free(big_block_72_103, blockSizeBig); - ds.free(small_block_136_167, blockSizeSmall); + ds.free(big_block_72_103, blockSizeBig); + ds.free(small_block_136_167, blockSizeSmall); - final long actual_small_block_136_167 = ds.allocateBlock(blockSizeSmall); - Assert.assertEquals(actual_small_block_136_167, small_block_136_167); - } - } + final long actual_small_block_136_167 = ds.allocateBlock(blockSizeSmall); + Assert.assertEquals(actual_small_block_136_167, small_block_136_167); + } + } - private void assertAllValuesAreEqual(final DiskBlock diskBlock, final byte expectedVal) { - final byte[] buffer = diskBlock.getBuffer(); - for (int i = 0; i < buffer.length; i++) { - if (expectedVal != buffer[i]) { - System.err.println( - "block " + diskBlock.getBlockOffset() + " " + buffer[i] + " != " + expectedVal + " at " + i); - break; - } - } - } + private void assertAllValuesAreEqual(final DiskBlock diskBlock, final byte expectedVal) { + final byte[] buffer = diskBlock.getBuffer(); + for (int i = 0; i < buffer.length; i++) { + if (expectedVal != buffer[i]) { + System.err.println( + "block " + diskBlock.getBlockOffset() + " " + buffer[i] + " != " + expectedVal + " at " + i); + break; + } + } + } - private void assertAllValuesAreEqual(final DiskBlock diskBlock) { + private void assertAllValuesAreEqual(final DiskBlock diskBlock) { - final byte[] buffer = diskBlock.getBuffer(); - final byte expected = buffer[0]; - for (int i = 0; i < buffer.length; i++) { - if (expected != buffer[i]) { - System.err.println( - "block " + diskBlock.getBlockOffset() + " " + buffer[i] + " != " + expected + " at " + i); - break; - } - } + final byte[] buffer = diskBlock.getBuffer(); + final byte expected = buffer[0]; + for (int i = 0; i < buffer.length; i++) { + if (expected != buffer[i]) { + System.err.println( + "block " + diskBlock.getBlockOffset() + " " + buffer[i] + " != " + expected + " at " + i); + break; + } + } - } + } - private void fill(final DiskBlock diskBlock, final byte val) { - final byte[] buffer = diskBlock.getBuffer(); + private void fill(final DiskBlock diskBlock, final byte val) { + final byte[] buffer = diskBlock.getBuffer(); - for (int i = 0; i < buffer.length; i++) { - buffer[i] = val; - } - } + for (int i = 0; i < buffer.length; i++) { + buffer[i] = val; + } + } - private long[] allocateBlocks(final DiskStorage ds, final int numNewBlocks, final int blockSize) - throws IOException { + private long[] allocateBlocks(final DiskStorage ds, final int numNewBlocks, final int blockSize) + throws IOException { - final long[] result = new long[numNewBlocks]; - for (int i = 0; i < numNewBlocks; i++) { - final long blockOffset = ds.allocateBlock(blockSize); - result[i] = blockOffset; - } - return result; - } + final long[] result = new long[numNewBlocks]; + for (int i = 0; i < numNewBlocks; i++) { + final long blockOffset = ds.allocateBlock(blockSize); + result[i] = blockOffset; + } + return result; + } } diff --git a/block-storage/src/test/java/org/lucares/pdb/map/CsvTestDataCreator.java b/block-storage/src/test/java/org/lucares/pdb/map/CsvTestDataCreator.java index 50397a9..c78f27c 100644 --- a/block-storage/src/test/java/org/lucares/pdb/map/CsvTestDataCreator.java +++ b/block-storage/src/test/java/org/lucares/pdb/map/CsvTestDataCreator.java @@ -15,79 +15,79 @@ import java.util.concurrent.ThreadLocalRandom; public class CsvTestDataCreator { - private static final List PODS = Arrays.asList("vapbrewe01", "vapfinra01", "vapondem01", "vapondem02", - "vapondem03", "vapondem04", "vapnyse01", "vapnorto01", "vapfackb01", "vaprjrey01", "vadtrans01", - "vadaxcel09", "vadaxcel66"); - private static final List HOSTS = new ArrayList<>(); - private static final List CLASSES = Arrays.asList("AuditLog", "Brava", "Collection", "Folder", "Field", - "Tagging", "Arrangment", "Review", "Production", "ProductionExport", "View", "Jobs", "Navigation", - "RecentNavigation", "Entity", "Search", "Tasks", "PcWorkflow", "Batch", "Matter"); - private static final List ENDPOINTS = Arrays.asList("create", "remove", "update", "delete", "createBulk", - "removeBulk", "deleteBulk", "list", "index", "listing", "all"); - private static final List METHODS = new ArrayList<>(); - private static final List PROJECTS = new ArrayList<>(); - private static final List SOURCE = Arrays.asList("web", "service", "metrics"); - private static final List BUILDS = new ArrayList<>(); + private static final List PODS = Arrays.asList("vapbrewe01", "vapfinra01", "vapondem01", "vapondem02", + "vapondem03", "vapondem04", "vapnyse01", "vapnorto01", "vapfackb01", "vaprjrey01", "vadtrans01", + "vadaxcel09", "vadaxcel66"); + private static final List HOSTS = new ArrayList<>(); + private static final List CLASSES = Arrays.asList("AuditLog", "Brava", "Collection", "Folder", "Field", + "Tagging", "Arrangment", "Review", "Production", "ProductionExport", "View", "Jobs", "Navigation", + "RecentNavigation", "Entity", "Search", "Tasks", "PcWorkflow", "Batch", "Matter"); + private static final List ENDPOINTS = Arrays.asList("create", "remove", "update", "delete", "createBulk", + "removeBulk", "deleteBulk", "list", "index", "listing", "all"); + private static final List METHODS = new ArrayList<>(); + private static final List PROJECTS = new ArrayList<>(); + private static final List SOURCE = Arrays.asList("web", "service", "metrics"); + private static final List BUILDS = new ArrayList<>(); - static { - for (int i = 0; i < 500; i++) { - BUILDS.add("AXC_5.15_" + i); - } + static { + for (int i = 0; i < 500; i++) { + BUILDS.add("AXC_5.15_" + i); + } - for (int i = 0; i < 500; i++) { - HOSTS.add(UUID.randomUUID().toString().substring(1, 16)); - PROJECTS.add(UUID.randomUUID().toString().substring(1, 16) + "_Review"); - } + for (int i = 0; i < 500; i++) { + HOSTS.add(UUID.randomUUID().toString().substring(1, 16)); + PROJECTS.add(UUID.randomUUID().toString().substring(1, 16) + "_Review"); + } - for (final String clazz : CLASSES) { - for (final String endpoint : ENDPOINTS) { - METHODS.add(clazz + "Service." + endpoint); - METHODS.add(clazz + "Controller." + endpoint); - } - } - } + for (final String clazz : CLASSES) { + for (final String endpoint : ENDPOINTS) { + METHODS.add(clazz + "Service." + endpoint); + METHODS.add(clazz + "Controller." + endpoint); + } + } + } - public static void main(final String[] args) throws IOException { - final Path testdataFile = Files.createTempFile("testData", ".csv"); + public static void main(final String[] args) throws IOException { + final Path testdataFile = Files.createTempFile("testData", ".csv"); - final ThreadLocalRandom r = ThreadLocalRandom.current(); - int lines = 0; + final ThreadLocalRandom r = ThreadLocalRandom.current(); + int lines = 0; - try (FileWriter writer = new FileWriter(testdataFile.toFile())) { - writer.append("@timestamp,duration,pod,host,method,project,source,build\n"); + try (FileWriter writer = new FileWriter(testdataFile.toFile())) { + writer.append("@timestamp,duration,pod,host,method,project,source,build\n"); - for (lines = 0; lines < 1_000_000; lines++) { - final String timestamp = Instant.ofEpochMilli(r.nextLong(1234567890L, 12345678901L)) - .atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_OFFSET_DATE_TIME); - final String duration = String.valueOf(r.nextInt(10000)); - final String pod = PODS.get(r.nextInt(PODS.size())); - final String host = HOSTS.get(r.nextInt(HOSTS.size())); - final String method = METHODS.get(r.nextInt(METHODS.size())); - final String project = PROJECTS.get(r.nextInt(PROJECTS.size())); - final String source = SOURCE.get(r.nextInt(SOURCE.size())); - final String build = BUILDS.get(r.nextInt(BUILDS.size())); + for (lines = 0; lines < 1_000_000; lines++) { + final String timestamp = Instant.ofEpochMilli(r.nextLong(1234567890L, 12345678901L)) + .atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_OFFSET_DATE_TIME); + final String duration = String.valueOf(r.nextInt(10000)); + final String pod = PODS.get(r.nextInt(PODS.size())); + final String host = HOSTS.get(r.nextInt(HOSTS.size())); + final String method = METHODS.get(r.nextInt(METHODS.size())); + final String project = PROJECTS.get(r.nextInt(PROJECTS.size())); + final String source = SOURCE.get(r.nextInt(SOURCE.size())); + final String build = BUILDS.get(r.nextInt(BUILDS.size())); - writer.append(timestamp); - writer.append(","); - writer.append(duration); - writer.append(","); - writer.append(pod); - writer.append(","); - writer.append(host); - writer.append(","); - writer.append(method); - writer.append(","); - writer.append(project); - writer.append(","); - writer.append(source); - writer.append(","); - writer.append(build); - writer.append("\n"); + writer.append(timestamp); + writer.append(","); + writer.append(duration); + writer.append(","); + writer.append(pod); + writer.append(","); + writer.append(host); + writer.append(","); + writer.append(method); + writer.append(","); + writer.append(project); + writer.append(","); + writer.append(source); + writer.append(","); + writer.append(build); + writer.append("\n"); - if (lines % 1000 == 0) { - System.out.println("lines: " + lines); - } - } - } - } + if (lines % 1000 == 0) { + System.out.println("lines: " + lines); + } + } + } + } } diff --git a/block-storage/src/test/java/org/lucares/pdb/map/NodeEntryTest.java b/block-storage/src/test/java/org/lucares/pdb/map/NodeEntryTest.java index cf26d35..d0d39a8 100644 --- a/block-storage/src/test/java/org/lucares/pdb/map/NodeEntryTest.java +++ b/block-storage/src/test/java/org/lucares/pdb/map/NodeEntryTest.java @@ -11,27 +11,27 @@ import org.testng.annotations.Test; @Test public class NodeEntryTest { - @DataProvider - public Object[][] providerPrefixCompare() { - final List result = new ArrayList<>(); + @DataProvider + public Object[][] providerPrefixCompare() { + final List result = new ArrayList<>(); - result.add(new Object[] { "ab", "abc", -1 }); - result.add(new Object[] { "abb", "abc", -1 }); - result.add(new Object[] { "abc", "abc", 0 }); - result.add(new Object[] { "abcd", "abc", 0 }); - result.add(new Object[] { "abd", "abc", 1 }); - result.add(new Object[] { "abz", "abc", 23 }); + result.add(new Object[] { "ab", "abc", -1 }); + result.add(new Object[] { "abb", "abc", -1 }); + result.add(new Object[] { "abc", "abc", 0 }); + result.add(new Object[] { "abcd", "abc", 0 }); + result.add(new Object[] { "abd", "abc", 1 }); + result.add(new Object[] { "abz", "abc", 23 }); - return result.toArray(Object[][]::new); - } + return result.toArray(Object[][]::new); + } - @Test(dataProvider = "providerPrefixCompare") - public void testPrefixCompare(final String key, final String prefix, final int expected) { + @Test(dataProvider = "providerPrefixCompare") + public void testPrefixCompare(final String key, final String prefix, final int expected) { - final NodeEntry nodeEntry = new NodeEntry(ValueType.NODE_POINTER, key.getBytes(StandardCharsets.UTF_8), - new byte[0]); + final NodeEntry nodeEntry = new NodeEntry(ValueType.NODE_POINTER, key.getBytes(StandardCharsets.UTF_8), + new byte[0]); - final int actual = nodeEntry.compareKeyPrefix(prefix.getBytes(StandardCharsets.UTF_8)); - Assert.assertEquals(actual, expected, key + " ? " + prefix); - } + final int actual = nodeEntry.compareKeyPrefix(prefix.getBytes(StandardCharsets.UTF_8)); + Assert.assertEquals(actual, expected, key + " ? " + prefix); + } } diff --git a/block-storage/src/test/java/org/lucares/pdb/map/PersistentMapDiskNodeTest.java b/block-storage/src/test/java/org/lucares/pdb/map/PersistentMapDiskNodeTest.java index b4b5e35..461018f 100644 --- a/block-storage/src/test/java/org/lucares/pdb/map/PersistentMapDiskNodeTest.java +++ b/block-storage/src/test/java/org/lucares/pdb/map/PersistentMapDiskNodeTest.java @@ -14,29 +14,29 @@ import org.testng.annotations.Test; @Test public class PersistentMapDiskNodeTest { - public void serializeDeserialize() throws Exception { + public void serializeDeserialize() throws Exception { - final List entries = new ArrayList<>(); - entries.add(newNode(ValueType.NODE_POINTER, "key1", "value1")); - entries.add(newNode(ValueType.VALUE_INLINE, "key2_", "value2--")); - entries.add(newNode(ValueType.NODE_POINTER, "key3__", "value3---")); - entries.add(newNode(ValueType.VALUE_INLINE, "key4___", "value4----")); + final List entries = new ArrayList<>(); + entries.add(newNode(ValueType.NODE_POINTER, "key1", "value1")); + entries.add(newNode(ValueType.VALUE_INLINE, "key2_", "value2--")); + entries.add(newNode(ValueType.NODE_POINTER, "key3__", "value3---")); + entries.add(newNode(ValueType.VALUE_INLINE, "key4___", "value4----")); - final long nodeOffset = ThreadLocalRandom.current().nextInt(); - final PersistentMapDiskNode node = new PersistentMapDiskNode(nodeOffset, entries, null); + final long nodeOffset = ThreadLocalRandom.current().nextInt(); + final PersistentMapDiskNode node = new PersistentMapDiskNode(nodeOffset, entries, null); - final byte[] buffer = node.serialize(); + final byte[] buffer = node.serialize(); - final ByteBuffer byteBuffer = ByteBuffer.wrap(buffer); - final PersistentMapDiskNode actualNode = PersistentMapDiskNode.parse(nodeOffset, - new DiskBlock(nodeOffset, byteBuffer)); + final ByteBuffer byteBuffer = ByteBuffer.wrap(buffer); + final PersistentMapDiskNode actualNode = PersistentMapDiskNode.parse(nodeOffset, + new DiskBlock(nodeOffset, byteBuffer)); - Assert.assertEquals(actualNode.getEntries(), entries); - } + Assert.assertEquals(actualNode.getEntries(), entries); + } - private static NodeEntry newNode(final ValueType type, final String key, final String value) { - return new NodeEntry(ValueType.VALUE_INLINE, key.getBytes(StandardCharsets.UTF_8), - value.getBytes(StandardCharsets.UTF_8)); - } + private static NodeEntry newNode(final ValueType type, final String key, final String value) { + return new NodeEntry(ValueType.VALUE_INLINE, key.getBytes(StandardCharsets.UTF_8), + value.getBytes(StandardCharsets.UTF_8)); + } } diff --git a/block-storage/src/test/java/org/lucares/pdb/map/PersistentMapTest.java b/block-storage/src/test/java/org/lucares/pdb/map/PersistentMapTest.java index d7f11de..a06d84c 100644 --- a/block-storage/src/test/java/org/lucares/pdb/map/PersistentMapTest.java +++ b/block-storage/src/test/java/org/lucares/pdb/map/PersistentMapTest.java @@ -24,368 +24,369 @@ import org.testng.annotations.Test; @Test public class PersistentMapTest { - private Path dataDirectory; - - @BeforeMethod - public void beforeMethod() throws IOException { - dataDirectory = Files.createTempDirectory("pdb"); - } + private Path dataDirectory; + + @BeforeMethod + public void beforeMethod() throws IOException { + dataDirectory = Files.createTempDirectory("pdb"); + } - @AfterMethod - public void afterMethod() throws IOException { - FileUtils.delete(dataDirectory); - } - - public void testSingleValue() throws Exception { - final Path file = dataDirectory.resolve("map.db"); - final String value = "value1"; - final String key = "key1"; + @AfterMethod + public void afterMethod() throws IOException { + FileUtils.delete(dataDirectory); + } + + public void testSingleValue() throws Exception { + final Path file = dataDirectory.resolve("map.db"); + final String value = "value1"; + final String key = "key1"; - try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, PersistentMap.STRING_CODER, - PersistentMap.STRING_CODER)) { + try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, + PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) { - Assert.assertNull(map.getValue(key)); + Assert.assertNull(map.getValue(key)); - Assert.assertNull(map.putValue(key, value)); + Assert.assertNull(map.putValue(key, value)); - Assert.assertEquals(map.getValue(key), value); - } - try (final PersistentMap map = new PersistentMap<>(file, dataDirectory,PersistentMap.STRING_CODER, - PersistentMap.STRING_CODER)) { + Assert.assertEquals(map.getValue(key), value); + } + try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, + PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) { - Assert.assertEquals(map.getValue(key), value); - } - } + Assert.assertEquals(map.getValue(key), value); + } + } - @Test(invocationCount = 1) - public void testManyValues() throws Exception { - final Path file = dataDirectory.resolve("map.db"); - final var insertedValues = new HashMap(); + @Test(invocationCount = 1) + public void testManyValues() throws Exception { + final Path file = dataDirectory.resolve("map.db"); + final var insertedValues = new HashMap(); - final Random rnd = new Random(1); - - try (final PersistentMap map = new PersistentMap<>(file,dataDirectory, PersistentMap.STRING_CODER, - PersistentMap.STRING_CODER)) { - map.setMaxEntriesInNode(2); - - for (int i = 0; i < 100; i++) { - // System.out.println("\n\ninserting: " + i); - final UUID nextUUID = new UUID(rnd.nextLong(), rnd.nextLong()); - final String key = nextUUID.toString() + "__" + i; - final String value = "long value to waste some bytes " + i + "__" - + UUID.randomUUID().toString().repeat(1); - Assert.assertNull(map.getValue(key)); - - Assert.assertNull(map.putValue(key, value)); - - insertedValues.put(key, value); - - // map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER); - - final boolean failEarly = false; - if (failEarly) { - for (final var entry : insertedValues.entrySet()) { - final String actualValue = map.getValue(entry.getKey()); - - if (!Objects.equals(actualValue, entry.getValue())) { - map.print(); - } - - Assert.assertEquals(actualValue, entry.getValue(), - "value for key " + entry.getKey() + " in the " + i + "th iteration"); - } - } - } - } - - try (final PersistentMap map = new PersistentMap<>(file,dataDirectory, PersistentMap.STRING_CODER, - PersistentMap.STRING_CODER)) { - // map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER); - final AtomicInteger maxDepth = new AtomicInteger(); - map.visitNodeEntriesPreOrder( - (node, parentNode, nodeEntry, depth) -> maxDepth.set(Math.max(depth, maxDepth.get()))); - - Assert.assertTrue(maxDepth.get() >= 4, - "The tree's depth. This test must have at least depth 4, " - + "so that we can be sure that splitting parent nodes works recursively, but was " - + maxDepth.get()); - - for (final var entry : insertedValues.entrySet()) { - final String actualValue = map.getValue(entry.getKey()); - Assert.assertEquals(actualValue, entry.getValue(), - "value for key " + entry.getKey() + " after all iterations"); - } - - } - } - - @Test(invocationCount = 1) - public void testManySmallValues() throws Exception { - final Path file = dataDirectory.resolve("map.db"); - final var insertedValues = new HashMap(); - - final SecureRandom rnd = new SecureRandom(); - rnd.setSeed(1); - - try (final PersistentMap map = new PersistentMap<>(file,dataDirectory, PersistentMap.LONG_CODER, - PersistentMap.LONG_CODER)) { - - for (int i = 0; i < 1000; i++) { - // System.out.println("\n\ninserting: " + i); - - final Long key = (long) (rnd.nextGaussian() * Integer.MAX_VALUE); - final Long value = (long) (rnd.nextGaussian() * Integer.MAX_VALUE); - Assert.assertNull(map.getValue(key)); - - Assert.assertNull(map.putValue(key, value)); - - insertedValues.put(key, value); - - // map.print(); - - final boolean failEarly = false; - if (failEarly) { - for (final var entry : insertedValues.entrySet()) { - final Long actualValue = map.getValue(entry.getKey()); - - if (!Objects.equals(actualValue, entry.getValue())) { - map.print(); - } - - Assert.assertEquals(actualValue, entry.getValue(), - "value for key " + entry.getKey() + " in the " + i + "th iteration"); - } - } - } - } - - try (final PersistentMap map = new PersistentMap<>(file,dataDirectory, PersistentMap.LONG_CODER, - PersistentMap.LONG_CODER)) { - // map.print(PersistentMap.LONG_DECODER, PersistentMap.LONG_DECODER); - final AtomicInteger counter = new AtomicInteger(); - map.visitNodeEntriesPreOrder( - (node, parentNode, nodeEntry, depth) -> counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0)); - - Assert.assertEquals(counter.get(), 4, - "number of nodes should be small. Any number larger than 4 indicates, " - + "that new inner nodes are created even though the existing inner " - + "nodes could hold the values"); - - for (final var entry : insertedValues.entrySet()) { - final Long actualValue = map.getValue(entry.getKey()); - Assert.assertEquals(actualValue, entry.getValue(), - "value for key " + entry.getKey() + " after all iterations"); - } - - } - } - @Test(invocationCount = 1) - public void testManyEmptyValues() throws Exception { - final Path file = dataDirectory.resolve("map.db"); - final var insertedValues = new HashMap(); - - final SecureRandom rnd = new SecureRandom(); - rnd.setSeed(1); - - try (final PersistentMap map = new PersistentMap<>(file,dataDirectory, PersistentMap.LONG_CODER, - PersistentMap.EMPTY_ENCODER)) { - - for (int i = 0; i < 1500; i++) { - // System.out.println("\n\ninserting: " + i); - - final Long key = (long) (rnd.nextGaussian() * Integer.MAX_VALUE); - final Empty value = Empty.INSTANCE; - Assert.assertNull(map.getValue(key)); - - Assert.assertNull(map.putValue(key, value)); - - insertedValues.put(key, value); - - // map.print(); - - final boolean failEarly = false; - if (failEarly) { - for (final var entry : insertedValues.entrySet()) { - final Empty actualValue = map.getValue(entry.getKey()); - - if (!Objects.equals(actualValue, entry.getValue())) { - map.print(); - } - - Assert.assertEquals(actualValue, entry.getValue(), - "value for key " + entry.getKey() + " in the " + i + "th iteration"); - } - } - } - } - - try (final PersistentMap map = new PersistentMap<>(file,dataDirectory, PersistentMap.LONG_CODER, - PersistentMap.EMPTY_ENCODER)) { - map.print(); - final AtomicInteger counter = new AtomicInteger(); - map.visitNodeEntriesPreOrder( - (node, parentNode, nodeEntry, depth) -> counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0)); - - Assert.assertEquals(counter.get(), 4, - "number of nodes should be small. Any number larger than 4 indicates, " - + "that new inner nodes are created even though the existing inner " - + "nodes could hold the values"); - - for (final var entry : insertedValues.entrySet()) { - final Empty actualValue = map.getValue(entry.getKey()); - Assert.assertEquals(actualValue, entry.getValue(), - "value for key " + entry.getKey() + " after all iterations"); - } - - } - } - - @Test(invocationCount = 1) - public void testEasyValues() throws Exception { - final Path file = dataDirectory.resolve("map.db"); - final var insertedValues = new HashMap(); - - final Queue numbers = new LinkedList<>(Arrays.asList(1, 15, 11, 4, 16, 3, 13)); - - try (final PersistentMap map = new PersistentMap<>(file,dataDirectory, PersistentMap.STRING_CODER, - PersistentMap.STRING_CODER)) { - - final int numbersSize = numbers.size(); - for (int i = 0; i < numbersSize; i++) { - - final Integer keyNumber = numbers.poll(); - // System.out.println("\n\ninserting: " + keyNumber); - - final String key = "" + keyNumber; - final String value = "value"; - Assert.assertNull(map.getValue(key)); - - Assert.assertNull(map.putValue(key, value)); - - insertedValues.put(key, value); - - // map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER); - - for (final var entry : insertedValues.entrySet()) { - final String actualValue = map.getValue(entry.getKey()); - - Assert.assertEquals(actualValue, entry.getValue(), - "value for key " + entry.getKey() + " in the " + i + "th iteration"); - } - } - } - - try (final PersistentMap map = new PersistentMap<>(file,dataDirectory, PersistentMap.STRING_CODER, - PersistentMap.STRING_CODER)) { - // map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER); - - final AtomicInteger counter = new AtomicInteger(); - map.visitNodeEntriesPreOrder( - (node, parentNode, nodeEntry, depth) -> counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0)); - - for (final var entry : insertedValues.entrySet()) { - final String actualValue = map.getValue(entry.getKey()); - Assert.assertEquals(actualValue, entry.getValue(), - "value for key " + entry.getKey() + " after all iterations"); - } - - } - } - - @Test - public void testFindAllByPrefix() throws Exception { - final Path file = dataDirectory.resolve("map.db"); - - final Map expectedBar = new HashMap<>(); - for (int i = 0; i < 100; i++) { - // the value is a little bit longer to make sure that the values don't fit into - // a single leaf node - expectedBar.put("bar:" + i, "bar:" + i + "__##################################"); - } - - final Map input = new HashMap<>(); - input.putAll(expectedBar); - for (int i = 0; i < 500; i++) { - input.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); - } - - try (final PersistentMap map = new PersistentMap<>(file,dataDirectory, PersistentMap.STRING_CODER, - PersistentMap.STRING_CODER)) { - - map.putAllValues(input); - } - - try (final PersistentMap map = new PersistentMap<>(file,dataDirectory, PersistentMap.STRING_CODER, - PersistentMap.STRING_CODER)) { - - { - final LinkedHashMap actualBar = new LinkedHashMap<>(); - final Visitor visitor = (key, value) -> actualBar.put(key, value); - map.visitValues("bar:", visitor); - - Assert.assertEquals(actualBar, expectedBar); - } - } - } - - @Test(invocationCount = 1) - public void testLotsOfValues() throws Exception { - final Path file = dataDirectory.resolve("map.db"); - final var insertedValues = new HashMap(); - - final SecureRandom rnd = new SecureRandom(); - rnd.setSeed(1); - - try (final PersistentMap map = new PersistentMap<>(file,dataDirectory, PersistentMap.LONG_CODER, - PersistentMap.LONG_CODER)) { - - for (int i = 0; i < 1_000; i++) { - - final Long key = (long) (rnd.nextGaussian() * Integer.MAX_VALUE); - final Long value = (long) (rnd.nextGaussian() * Integer.MAX_VALUE); - - if (insertedValues.containsKey(key)) { - continue; - } - - Assert.assertNull(map.putValue(key, value)); - - insertedValues.put(key, value); - - final boolean failEarly = false; - if (failEarly) { - for (final var entry : insertedValues.entrySet()) { - final Long actualValue = map.getValue(entry.getKey()); - - if (!Objects.equals(actualValue, entry.getValue())) { - map.print(); - } - - Assert.assertEquals(actualValue, entry.getValue(), - "value for key " + entry.getKey() + " in the " + i + "th iteration"); - } - } - } - } - - try (final PersistentMap map = new PersistentMap<>(file,dataDirectory, PersistentMap.LONG_CODER, - PersistentMap.LONG_CODER)) { - final AtomicInteger counter = new AtomicInteger(); - final AtomicInteger maxDepth = new AtomicInteger(); - map.visitNodeEntriesPreOrder((node, parentNode, nodeEntry, depth) -> { - counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0); - maxDepth.set(Math.max(maxDepth.get(), depth)); - }); - - final long start = System.nanoTime(); - for (final var entry : insertedValues.entrySet()) { - final Long actualValue = map.getValue(entry.getKey()); - Assert.assertEquals(actualValue, entry.getValue(), - "value for key " + entry.getKey() + " after all iterations"); - } - System.out.println("nodes=" + counter.get() + ", depth=" + maxDepth.get() + ": " - + (System.nanoTime() - start) / 1_000_000.0 + "ms"); - } - } + final Random rnd = new Random(1); + + try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, + PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) { + map.setMaxEntriesInNode(2); + + for (int i = 0; i < 100; i++) { + // System.out.println("\n\ninserting: " + i); + final UUID nextUUID = new UUID(rnd.nextLong(), rnd.nextLong()); + final String key = nextUUID.toString() + "__" + i; + final String value = "long value to waste some bytes " + i + "__" + + UUID.randomUUID().toString().repeat(1); + Assert.assertNull(map.getValue(key)); + + Assert.assertNull(map.putValue(key, value)); + + insertedValues.put(key, value); + + // map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER); + + final boolean failEarly = false; + if (failEarly) { + for (final var entry : insertedValues.entrySet()) { + final String actualValue = map.getValue(entry.getKey()); + + if (!Objects.equals(actualValue, entry.getValue())) { + map.print(); + } + + Assert.assertEquals(actualValue, entry.getValue(), + "value for key " + entry.getKey() + " in the " + i + "th iteration"); + } + } + } + } + + try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, + PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) { + // map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER); + final AtomicInteger maxDepth = new AtomicInteger(); + map.visitNodeEntriesPreOrder( + (node, parentNode, nodeEntry, depth) -> maxDepth.set(Math.max(depth, maxDepth.get()))); + + Assert.assertTrue(maxDepth.get() >= 4, + "The tree's depth. This test must have at least depth 4, " + + "so that we can be sure that splitting parent nodes works recursively, but was " + + maxDepth.get()); + + for (final var entry : insertedValues.entrySet()) { + final String actualValue = map.getValue(entry.getKey()); + Assert.assertEquals(actualValue, entry.getValue(), + "value for key " + entry.getKey() + " after all iterations"); + } + + } + } + + @Test(invocationCount = 1) + public void testManySmallValues() throws Exception { + final Path file = dataDirectory.resolve("map.db"); + final var insertedValues = new HashMap(); + + final SecureRandom rnd = new SecureRandom(); + rnd.setSeed(1); + + try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER, + PersistentMap.LONG_CODER)) { + + for (int i = 0; i < 1000; i++) { + // System.out.println("\n\ninserting: " + i); + + final Long key = (long) (rnd.nextGaussian() * Integer.MAX_VALUE); + final Long value = (long) (rnd.nextGaussian() * Integer.MAX_VALUE); + Assert.assertNull(map.getValue(key)); + + Assert.assertNull(map.putValue(key, value)); + + insertedValues.put(key, value); + + // map.print(); + + final boolean failEarly = false; + if (failEarly) { + for (final var entry : insertedValues.entrySet()) { + final Long actualValue = map.getValue(entry.getKey()); + + if (!Objects.equals(actualValue, entry.getValue())) { + map.print(); + } + + Assert.assertEquals(actualValue, entry.getValue(), + "value for key " + entry.getKey() + " in the " + i + "th iteration"); + } + } + } + } + + try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER, + PersistentMap.LONG_CODER)) { + // map.print(PersistentMap.LONG_DECODER, PersistentMap.LONG_DECODER); + final AtomicInteger counter = new AtomicInteger(); + map.visitNodeEntriesPreOrder( + (node, parentNode, nodeEntry, depth) -> counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0)); + + Assert.assertEquals(counter.get(), 4, + "number of nodes should be small. Any number larger than 4 indicates, " + + "that new inner nodes are created even though the existing inner " + + "nodes could hold the values"); + + for (final var entry : insertedValues.entrySet()) { + final Long actualValue = map.getValue(entry.getKey()); + Assert.assertEquals(actualValue, entry.getValue(), + "value for key " + entry.getKey() + " after all iterations"); + } + + } + } + + @Test(invocationCount = 1) + public void testManyEmptyValues() throws Exception { + final Path file = dataDirectory.resolve("map.db"); + final var insertedValues = new HashMap(); + + final SecureRandom rnd = new SecureRandom(); + rnd.setSeed(1); + + try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER, + PersistentMap.EMPTY_ENCODER)) { + + for (int i = 0; i < 1500; i++) { + // System.out.println("\n\ninserting: " + i); + + final Long key = (long) (rnd.nextGaussian() * Integer.MAX_VALUE); + final Empty value = Empty.INSTANCE; + Assert.assertNull(map.getValue(key)); + + Assert.assertNull(map.putValue(key, value)); + + insertedValues.put(key, value); + + // map.print(); + + final boolean failEarly = false; + if (failEarly) { + for (final var entry : insertedValues.entrySet()) { + final Empty actualValue = map.getValue(entry.getKey()); + + if (!Objects.equals(actualValue, entry.getValue())) { + map.print(); + } + + Assert.assertEquals(actualValue, entry.getValue(), + "value for key " + entry.getKey() + " in the " + i + "th iteration"); + } + } + } + } + + try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER, + PersistentMap.EMPTY_ENCODER)) { + map.print(); + final AtomicInteger counter = new AtomicInteger(); + map.visitNodeEntriesPreOrder( + (node, parentNode, nodeEntry, depth) -> counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0)); + + Assert.assertEquals(counter.get(), 4, + "number of nodes should be small. Any number larger than 4 indicates, " + + "that new inner nodes are created even though the existing inner " + + "nodes could hold the values"); + + for (final var entry : insertedValues.entrySet()) { + final Empty actualValue = map.getValue(entry.getKey()); + Assert.assertEquals(actualValue, entry.getValue(), + "value for key " + entry.getKey() + " after all iterations"); + } + + } + } + + @Test(invocationCount = 1) + public void testEasyValues() throws Exception { + final Path file = dataDirectory.resolve("map.db"); + final var insertedValues = new HashMap(); + + final Queue numbers = new LinkedList<>(Arrays.asList(1, 15, 11, 4, 16, 3, 13)); + + try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, + PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) { + + final int numbersSize = numbers.size(); + for (int i = 0; i < numbersSize; i++) { + + final Integer keyNumber = numbers.poll(); + // System.out.println("\n\ninserting: " + keyNumber); + + final String key = "" + keyNumber; + final String value = "value"; + Assert.assertNull(map.getValue(key)); + + Assert.assertNull(map.putValue(key, value)); + + insertedValues.put(key, value); + + // map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER); + + for (final var entry : insertedValues.entrySet()) { + final String actualValue = map.getValue(entry.getKey()); + + Assert.assertEquals(actualValue, entry.getValue(), + "value for key " + entry.getKey() + " in the " + i + "th iteration"); + } + } + } + + try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, + PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) { + // map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER); + + final AtomicInteger counter = new AtomicInteger(); + map.visitNodeEntriesPreOrder( + (node, parentNode, nodeEntry, depth) -> counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0)); + + for (final var entry : insertedValues.entrySet()) { + final String actualValue = map.getValue(entry.getKey()); + Assert.assertEquals(actualValue, entry.getValue(), + "value for key " + entry.getKey() + " after all iterations"); + } + + } + } + + @Test + public void testFindAllByPrefix() throws Exception { + final Path file = dataDirectory.resolve("map.db"); + + final Map expectedBar = new HashMap<>(); + for (int i = 0; i < 100; i++) { + // the value is a little bit longer to make sure that the values don't fit into + // a single leaf node + expectedBar.put("bar:" + i, "bar:" + i + "__##################################"); + } + + final Map input = new HashMap<>(); + input.putAll(expectedBar); + for (int i = 0; i < 500; i++) { + input.put(UUID.randomUUID().toString(), UUID.randomUUID().toString()); + } + + try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, + PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) { + + map.putAllValues(input); + } + + try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, + PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) { + + { + final LinkedHashMap actualBar = new LinkedHashMap<>(); + final Visitor visitor = (key, value) -> actualBar.put(key, value); + map.visitValues("bar:", visitor); + + Assert.assertEquals(actualBar, expectedBar); + } + } + } + + @Test(invocationCount = 1) + public void testLotsOfValues() throws Exception { + final Path file = dataDirectory.resolve("map.db"); + final var insertedValues = new HashMap(); + + final SecureRandom rnd = new SecureRandom(); + rnd.setSeed(1); + + try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER, + PersistentMap.LONG_CODER)) { + + for (int i = 0; i < 1_000; i++) { + + final Long key = (long) (rnd.nextGaussian() * Integer.MAX_VALUE); + final Long value = (long) (rnd.nextGaussian() * Integer.MAX_VALUE); + + if (insertedValues.containsKey(key)) { + continue; + } + + Assert.assertNull(map.putValue(key, value)); + + insertedValues.put(key, value); + + final boolean failEarly = false; + if (failEarly) { + for (final var entry : insertedValues.entrySet()) { + final Long actualValue = map.getValue(entry.getKey()); + + if (!Objects.equals(actualValue, entry.getValue())) { + map.print(); + } + + Assert.assertEquals(actualValue, entry.getValue(), + "value for key " + entry.getKey() + " in the " + i + "th iteration"); + } + } + } + } + + try (final PersistentMap map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER, + PersistentMap.LONG_CODER)) { + final AtomicInteger counter = new AtomicInteger(); + final AtomicInteger maxDepth = new AtomicInteger(); + map.visitNodeEntriesPreOrder((node, parentNode, nodeEntry, depth) -> { + counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0); + maxDepth.set(Math.max(maxDepth.get(), depth)); + }); + + final long start = System.nanoTime(); + for (final var entry : insertedValues.entrySet()) { + final Long actualValue = map.getValue(entry.getKey()); + Assert.assertEquals(actualValue, entry.getValue(), + "value for key " + entry.getKey() + " after all iterations"); + } + System.out.println("nodes=" + counter.get() + ", depth=" + maxDepth.get() + ": " + + (System.nanoTime() - start) / 1_000_000.0 + "ms"); + } + } } diff --git a/byte-utils/src/main/java/org/lucares/utils/byteencoder/VariableByteEncoder.java b/byte-utils/src/main/java/org/lucares/utils/byteencoder/VariableByteEncoder.java index bc35c30..2fb69da 100644 --- a/byte-utils/src/main/java/org/lucares/utils/byteencoder/VariableByteEncoder.java +++ b/byte-utils/src/main/java/org/lucares/utils/byteencoder/VariableByteEncoder.java @@ -25,218 +25,218 @@ import org.lucares.collections.LongList; */ public class VariableByteEncoder { - public static final long MIN_VALUE = Long.MIN_VALUE / 2 + 1; - public static final long MAX_VALUE = Long.MAX_VALUE / 2; + public static final long MIN_VALUE = Long.MIN_VALUE / 2 + 1; + public static final long MAX_VALUE = Long.MAX_VALUE / 2; - private static final int MAX_BYTES_PER_VALUE = 10; + private static final int MAX_BYTES_PER_VALUE = 10; - private static final int CONTINUATION_BYTE_FLAG = 1 << 7; // 10000000 + private static final int CONTINUATION_BYTE_FLAG = 1 << 7; // 10000000 - private static final long DATA_BITS = (1 << 7) - 1; // 01111111 + private static final long DATA_BITS = (1 << 7) - 1; // 01111111 - private static final ThreadLocal SINGLE_VALUE_BUFFER = ThreadLocal - .withInitial(() -> new byte[MAX_BYTES_PER_VALUE]); + private static final ThreadLocal SINGLE_VALUE_BUFFER = ThreadLocal + .withInitial(() -> new byte[MAX_BYTES_PER_VALUE]); - /** - * Encodes time and value into the given buffer. - *

- * If the encoded values do not fit into the buffer, then 0 is returned. The - * caller will have to provide a new buffer with more space. - * - * @param value1 first value, (between -(2^62)+1 and 2^62) - * @param value2 second value, (between -(2^62)+1 and 2^62) - * @param buffer - * @param offsetInBuffer - * @return number of bytes appended to the provided buffer - */ - public static int encodeInto(final long value1, final long value2, final byte[] buffer, final int offsetInBuffer) { + /** + * Encodes time and value into the given buffer. + *

+ * If the encoded values do not fit into the buffer, then 0 is returned. The + * caller will have to provide a new buffer with more space. + * + * @param value1 first value, (between -(2^62)+1 and 2^62) + * @param value2 second value, (between -(2^62)+1 and 2^62) + * @param buffer + * @param offsetInBuffer + * @return number of bytes appended to the provided buffer + */ + public static int encodeInto(final long value1, final long value2, final byte[] buffer, final int offsetInBuffer) { - int offset = offsetInBuffer; - final int bytesAdded1 = encodeInto(value1, buffer, offset); - if (bytesAdded1 > 0) { - offset += bytesAdded1; - final int bytesAdded2 = encodeInto(value2, buffer, offset); + int offset = offsetInBuffer; + final int bytesAdded1 = encodeInto(value1, buffer, offset); + if (bytesAdded1 > 0) { + offset += bytesAdded1; + final int bytesAdded2 = encodeInto(value2, buffer, offset); - if (bytesAdded2 > 0) { - // both value fit into the buffer - // return the number of added bytes - return bytesAdded1 + bytesAdded2; - } else { - // second value did not fit into the buffer, - // remove the first value - // and return 0 to indicate that the values did not fit - Arrays.fill(buffer, offsetInBuffer, buffer.length, (byte) 0); - return 0; - } - } + if (bytesAdded2 > 0) { + // both value fit into the buffer + // return the number of added bytes + return bytesAdded1 + bytesAdded2; + } else { + // second value did not fit into the buffer, + // remove the first value + // and return 0 to indicate that the values did not fit + Arrays.fill(buffer, offsetInBuffer, buffer.length, (byte) 0); + return 0; + } + } - // return 0 if the encoded bytes do not fit - // the caller will have to provide a new buffer - return 0; - } + // return 0 if the encoded bytes do not fit + // the caller will have to provide a new buffer + return 0; + } - public static LongList decode(final byte[] buffer) { + public static LongList decode(final byte[] buffer) { - final LongList result = new LongList(); - decodeInto(buffer, result); - return result; - } + final LongList result = new LongList(); + decodeInto(buffer, result); + return result; + } - public static int encodeInto(final long value, final byte[] buffer, final int offsetInBuffer) { + public static int encodeInto(final long value, final byte[] buffer, final int offsetInBuffer) { - int offset = offsetInBuffer; + int offset = offsetInBuffer; - assert value >= MIN_VALUE : "min encodable value is -2^62+1 = " + MIN_VALUE; - assert value <= MAX_VALUE : "max encodable value is 2^62 = " + MAX_VALUE; + assert value >= MIN_VALUE : "min encodable value is -2^62+1 = " + MIN_VALUE; + assert value <= MAX_VALUE : "max encodable value is 2^62 = " + MAX_VALUE; - long normVal = encodeIntoPositiveValue(value); + long normVal = encodeIntoPositiveValue(value); - try { - final long maxFirstByteValue = 127; + try { + final long maxFirstByteValue = 127; - while (normVal > maxFirstByteValue) { - buffer[offset] = (byte) ((normVal & DATA_BITS) | CONTINUATION_BYTE_FLAG); - offset++; - normVal = normVal >> 7; // shift by number of value bits - } - buffer[offset] = (byte) (normVal); - return offset - offsetInBuffer + 1; // return number of encoded bytes - } catch (final ArrayIndexOutOfBoundsException e) { - // We need more bytes to store the value than are available. - // Reset the bytes we just wrote. - Arrays.fill(buffer, offsetInBuffer, buffer.length, (byte) 0); - return 0; - } - } + while (normVal > maxFirstByteValue) { + buffer[offset] = (byte) ((normVal & DATA_BITS) | CONTINUATION_BYTE_FLAG); + offset++; + normVal = normVal >> 7; // shift by number of value bits + } + buffer[offset] = (byte) (normVal); + return offset - offsetInBuffer + 1; // return number of encoded bytes + } catch (final ArrayIndexOutOfBoundsException e) { + // We need more bytes to store the value than are available. + // Reset the bytes we just wrote. + Arrays.fill(buffer, offsetInBuffer, buffer.length, (byte) 0); + return 0; + } + } - private static void decodeInto(final byte[] buffer, final LongList bufferedLongs) { - for (int i = 0; i < buffer.length; i++) { + private static void decodeInto(final byte[] buffer, final LongList bufferedLongs) { + for (int i = 0; i < buffer.length; i++) { - if (buffer[i] == 0) { - // no value is encoded to 0 => there are no further values - break; - } else { - long val = buffer[i] & DATA_BITS; - int shift = 7; - while (!isLastByte(buffer[i]) && i + 1 < buffer.length) { - val = val | ((buffer[i + 1] & DATA_BITS) << shift); - i++; - shift += 7; - } - bufferedLongs.add(decodeIntoSignedValue(val)); - } - } - } + if (buffer[i] == 0) { + // no value is encoded to 0 => there are no further values + break; + } else { + long val = buffer[i] & DATA_BITS; + int shift = 7; + while (!isLastByte(buffer[i]) && i + 1 < buffer.length) { + val = val | ((buffer[i + 1] & DATA_BITS) << shift); + i++; + shift += 7; + } + bufferedLongs.add(decodeIntoSignedValue(val)); + } + } + } - /** - * The input value (positive, negative or null) is encoded into a positive - * value. - * - *

-	 *
-	 * input:   0 1 -1 2 -2 3 -3
-	 * encoded: 1 2  3 4  5 6  7
-	 * 
- */ - private static long encodeIntoPositiveValue(final long value) { - return value > 0 ? value * 2 : (value * -2) + 1; - } + /** + * The input value (positive, negative or null) is encoded into a positive + * value. + * + *
+     *
+     * input:   0 1 -1 2 -2 3 -3
+     * encoded: 1 2  3 4  5 6  7
+     * 
+ */ + private static long encodeIntoPositiveValue(final long value) { + return value > 0 ? value * 2 : (value * -2) + 1; + } - /** - * inverse of {@link #encodeIntoPositiveValue(long)} - * - * @param value - * @return - */ - private static long decodeIntoSignedValue(final long value) { - return (value / 2) * (value % 2 == 0 ? 1 : -1); - } + /** + * inverse of {@link #encodeIntoPositiveValue(long)} + * + * @param value + * @return + */ + private static long decodeIntoSignedValue(final long value) { + return (value / 2) * (value % 2 == 0 ? 1 : -1); + } - private static boolean isLastByte(final byte b) { - return (b & CONTINUATION_BYTE_FLAG) == 0; - } + private static boolean isLastByte(final byte b) { + return (b & CONTINUATION_BYTE_FLAG) == 0; + } - public static byte[] encode(final long... longs) { + public static byte[] encode(final long... longs) { - int neededBytes = 0; - for (final long l : longs) { - neededBytes += VariableByteEncoder.neededBytes(l); - } + int neededBytes = 0; + for (final long l : longs) { + neededBytes += VariableByteEncoder.neededBytes(l); + } - final byte[] result = new byte[neededBytes]; + final byte[] result = new byte[neededBytes]; - final int bytesWritten = encodeInto(longs, result, 0); - if (bytesWritten <= 0) { - throw new IllegalStateException( - "Did not reserve enough space to store " + longs + ". We reserved only " + neededBytes + " bytes."); - } + final int bytesWritten = encodeInto(longs, result, 0); + if (bytesWritten <= 0) { + throw new IllegalStateException( + "Did not reserve enough space to store " + longs + ". We reserved only " + neededBytes + " bytes."); + } - return result; - } + return result; + } - public static long decodeFirstValue(final byte[] buffer) { + public static long decodeFirstValue(final byte[] buffer) { - int offset = 0; - long val = buffer[offset] & DATA_BITS; - int shift = 7; - while (!isLastByte(buffer[offset]) && offset + 1 < buffer.length) { - val = val | ((buffer[offset + 1] & DATA_BITS) << shift); - offset++; - shift += 7; - } - return decodeIntoSignedValue(val); - } + int offset = 0; + long val = buffer[offset] & DATA_BITS; + int shift = 7; + while (!isLastByte(buffer[offset]) && offset + 1 < buffer.length) { + val = val | ((buffer[offset + 1] & DATA_BITS) << shift); + offset++; + shift += 7; + } + return decodeIntoSignedValue(val); + } - public static int encodeInto(final LongList values, final byte[] buffer, final int offsetInBuffer) { + public static int encodeInto(final LongList values, final byte[] buffer, final int offsetInBuffer) { - int offset = offsetInBuffer; - for (int i = 0; i < values.size(); i++) { - final long value = values.get(i); + int offset = offsetInBuffer; + for (int i = 0; i < values.size(); i++) { + final long value = values.get(i); - final int bytesAdded = encodeInto(value, buffer, offset); - if (bytesAdded <= 0) { - Arrays.fill(buffer, offsetInBuffer, offset, (byte) 0); - return 0; - } - offset += bytesAdded; - } - return offset - offsetInBuffer; - } + final int bytesAdded = encodeInto(value, buffer, offset); + if (bytesAdded <= 0) { + Arrays.fill(buffer, offsetInBuffer, offset, (byte) 0); + return 0; + } + offset += bytesAdded; + } + return offset - offsetInBuffer; + } - public static int encodeInto(final long[] values, final byte[] buffer, final int offsetInBuffer) { + public static int encodeInto(final long[] values, final byte[] buffer, final int offsetInBuffer) { - int offset = offsetInBuffer; - for (int i = 0; i < values.length; i++) { - final long value = values[i]; + int offset = offsetInBuffer; + for (int i = 0; i < values.length; i++) { + final long value = values[i]; - final int bytesAdded = encodeInto(value, buffer, offset); - if (bytesAdded <= 0) { - Arrays.fill(buffer, offsetInBuffer, offset, (byte) 0); - return 0; - } - offset += bytesAdded; - } - return offset - offsetInBuffer; - } + final int bytesAdded = encodeInto(value, buffer, offset); + if (bytesAdded <= 0) { + Arrays.fill(buffer, offsetInBuffer, offset, (byte) 0); + return 0; + } + offset += bytesAdded; + } + return offset - offsetInBuffer; + } - public static byte[] encode(final LongList longs) { + public static byte[] encode(final LongList longs) { - final int neededBytes = longs.stream().mapToInt(VariableByteEncoder::neededBytes).sum(); - final byte[] result = new byte[neededBytes]; + final int neededBytes = longs.stream().mapToInt(VariableByteEncoder::neededBytes).sum(); + final byte[] result = new byte[neededBytes]; - final int bytesWritten = encodeInto(longs, result, 0); - if (bytesWritten <= 0) { - throw new IllegalStateException( - "Did not reserve enough space to store " + longs + ". We reserved only " + neededBytes + " bytes."); - } + final int bytesWritten = encodeInto(longs, result, 0); + if (bytesWritten <= 0) { + throw new IllegalStateException( + "Did not reserve enough space to store " + longs + ". We reserved only " + neededBytes + " bytes."); + } - return result; - } + return result; + } - public static int neededBytes(final long value) { - final byte[] buffer = SINGLE_VALUE_BUFFER.get(); - final int usedBytes = encodeInto(value, buffer, 0); - return usedBytes; - } + public static int neededBytes(final long value) { + final byte[] buffer = SINGLE_VALUE_BUFFER.get(); + final int usedBytes = encodeInto(value, buffer, 0); + return usedBytes; + } } diff --git a/byte-utils/src/test/java/org/lucares/utils/byteencoder/VariableByteEncoderTest.java b/byte-utils/src/test/java/org/lucares/utils/byteencoder/VariableByteEncoderTest.java index b975cfc..034847e 100644 --- a/byte-utils/src/test/java/org/lucares/utils/byteencoder/VariableByteEncoderTest.java +++ b/byte-utils/src/test/java/org/lucares/utils/byteencoder/VariableByteEncoderTest.java @@ -14,97 +14,97 @@ import org.testng.annotations.Test; @Test public class VariableByteEncoderTest { - @DataProvider - public Object[][] providerEncodeDecode() { - return new Object[][] { // - // encoded into 1 byte - { 10, -5, 5 }, // - { 10, 0, 5 }, // - { 10, -63, 63 }, // - // encoded into 2 bytes - { 10, 130, 131 }, // - // encoded into 3 bytes - { 10, -8191, 8191 }, // - // encoded into n bytes - { 1, Long.MAX_VALUE / 2 - 4, Long.MAX_VALUE / 2 }, // - { 1, Long.MIN_VALUE / 2, Long.MAX_VALUE / 2 }, // - { 11, Long.MIN_VALUE / 2 + 1, Long.MIN_VALUE / 2 + 3 }, // - { 12, Long.MAX_VALUE / 2 - 3, Long.MAX_VALUE / 2 },// - }; - } + @DataProvider + public Object[][] providerEncodeDecode() { + return new Object[][] { // + // encoded into 1 byte + { 10, -5, 5 }, // + { 10, 0, 5 }, // + { 10, -63, 63 }, // + // encoded into 2 bytes + { 10, 130, 131 }, // + // encoded into 3 bytes + { 10, -8191, 8191 }, // + // encoded into n bytes + { 1, Long.MAX_VALUE / 2 - 4, Long.MAX_VALUE / 2 }, // + { 1, Long.MIN_VALUE / 2, Long.MAX_VALUE / 2 }, // + { 11, Long.MIN_VALUE / 2 + 1, Long.MIN_VALUE / 2 + 3 }, // + { 12, Long.MAX_VALUE / 2 - 3, Long.MAX_VALUE / 2 },// + }; + } - @Test(dataProvider = "providerEncodeDecode") - public void testEncodeDecode(final long numValues, final long minValue, final long maxValue) { + @Test(dataProvider = "providerEncodeDecode") + public void testEncodeDecode(final long numValues, final long minValue, final long maxValue) { - final LongList originalValues = new LongList(); - final byte[] buffer = new byte[1024]; - final AtomicInteger offsetInBuffer = new AtomicInteger(0); + final LongList originalValues = new LongList(); + final byte[] buffer = new byte[1024]; + final AtomicInteger offsetInBuffer = new AtomicInteger(0); - ThreadLocalRandom.current().longs(numValues, minValue, maxValue).forEachOrdered(value -> { - originalValues.add(value); - final int appendedBytes = VariableByteEncoder.encodeInto(value, buffer, offsetInBuffer.get()); - offsetInBuffer.addAndGet(appendedBytes); - }); + ThreadLocalRandom.current().longs(numValues, minValue, maxValue).forEachOrdered(value -> { + originalValues.add(value); + final int appendedBytes = VariableByteEncoder.encodeInto(value, buffer, offsetInBuffer.get()); + offsetInBuffer.addAndGet(appendedBytes); + }); - final LongList actualValues = VariableByteEncoder.decode(buffer); + final LongList actualValues = VariableByteEncoder.decode(buffer); - assertEquals(actualValues.toString(), originalValues.toString()); - } + assertEquals(actualValues.toString(), originalValues.toString()); + } - @DataProvider - public Object[][] providerEncodeDecodeOfTwoValues() { - return new Object[][] { // - { 12345, 67890, false, 1 }, // first value needs three bytes, it does not fit - { 12345, 67890, false, 2 }, // first value needs three bytes, it does not fit - { 12345, 67890, false, 3 }, // first value needs three bytes, second value does not fit - { 12345, 67890, false, 4 }, // first value needs three bytes, second value does not fit - { 12345, 67890, false, 5 }, // first value needs three bytes, second value does not fit - { 12345, 67890, true, 6 }, // both values need three bytes - { 12345, 67890, true, 10 }, // - }; - } + @DataProvider + public Object[][] providerEncodeDecodeOfTwoValues() { + return new Object[][] { // + { 12345, 67890, false, 1 }, // first value needs three bytes, it does not fit + { 12345, 67890, false, 2 }, // first value needs three bytes, it does not fit + { 12345, 67890, false, 3 }, // first value needs three bytes, second value does not fit + { 12345, 67890, false, 4 }, // first value needs three bytes, second value does not fit + { 12345, 67890, false, 5 }, // first value needs three bytes, second value does not fit + { 12345, 67890, true, 6 }, // both values need three bytes + { 12345, 67890, true, 10 }, // + }; + } - @Test(dataProvider = "providerEncodeDecodeOfTwoValues") - public void testEncodeDecodeOfTwoValues(final long value1, final long value2, final boolean fits, - final int bufferSize) { - final LongList originalValues = new LongList(); - final byte[] buffer = new byte[bufferSize]; + @Test(dataProvider = "providerEncodeDecodeOfTwoValues") + public void testEncodeDecodeOfTwoValues(final long value1, final long value2, final boolean fits, + final int bufferSize) { + final LongList originalValues = new LongList(); + final byte[] buffer = new byte[bufferSize]; - final int bytesAdded = VariableByteEncoder.encodeInto(value1, value2, buffer, 0); - Assert.assertEquals(bytesAdded > 0, fits); - if (fits) { - originalValues.addAll(value1, value2); - } else { - Assert.assertEquals(buffer, new byte[bufferSize], - "checks that buffer is resetted after it discovers the values do not fit"); - } + final int bytesAdded = VariableByteEncoder.encodeInto(value1, value2, buffer, 0); + Assert.assertEquals(bytesAdded > 0, fits); + if (fits) { + originalValues.addAll(value1, value2); + } else { + Assert.assertEquals(buffer, new byte[bufferSize], + "checks that buffer is resetted after it discovers the values do not fit"); + } - final LongList decodedValues = VariableByteEncoder.decode(buffer); - Assert.assertEquals(decodedValues, originalValues); - } + final LongList decodedValues = VariableByteEncoder.decode(buffer); + Assert.assertEquals(decodedValues, originalValues); + } - @DataProvider - public Object[][] providerNededBytes() { - return new Object[][] { // - { 0, 1 }, // - { -10, 1 }, // - { 10, 1 }, // - { -63, 1 }, // - { 63, 1 }, // - { -64, 2 }, // - { 64, 2 }, // - { -8191, 2 }, // - { 8191, 2 }, // - { -8192, 3 }, // - { 8192, 3 }, // - }; - } + @DataProvider + public Object[][] providerNededBytes() { + return new Object[][] { // + { 0, 1 }, // + { -10, 1 }, // + { 10, 1 }, // + { -63, 1 }, // + { 63, 1 }, // + { -64, 2 }, // + { 64, 2 }, // + { -8191, 2 }, // + { 8191, 2 }, // + { -8192, 3 }, // + { 8192, 3 }, // + }; + } - @Test(dataProvider = "providerNededBytes") - public void testNeededBytes(final long value, final int expectedNeededBytes) { + @Test(dataProvider = "providerNededBytes") + public void testNeededBytes(final long value, final int expectedNeededBytes) { - final int neededBytes = VariableByteEncoder.neededBytes(value); - final byte[] encoded = VariableByteEncoder.encode(value); - Assert.assertEquals(encoded.length, neededBytes); - } + final int neededBytes = VariableByteEncoder.neededBytes(value); + final byte[] encoded = VariableByteEncoder.encode(value); + Assert.assertEquals(encoded.length, neededBytes); + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/Doc.java b/data-store/src/main/java/org/lucares/pdb/datastore/Doc.java index c0b91ef..2bbdba3 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/Doc.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/Doc.java @@ -5,60 +5,60 @@ import org.lucares.pdb.blockstorage.BSFile; import org.lucares.pdb.datastore.internal.ParititionId; public class Doc { - private final Tags tags; + private final Tags tags; - /** - * the block number used by {@link BSFile} - */ - private final long rootBlockNumber; + /** + * the block number used by {@link BSFile} + */ + private final long rootBlockNumber; - private ParititionId partitionId; + private ParititionId partitionId; - /** - * Initializes a new document. - *

- * The path can be {@code null}. If path is {@code null}, then - * {@code offsetInListingFile} must be set. The path will be initialized lazily - * when needed. - *

- * This is used to reduce the memory footprint. - * - * @param tags - * @param offsetInListingFile must be set if {@code path} is {@code null} - * @param storageBasePath the storage base path. - * @param relativePath optional, can be {@code null}. This path is - * relative to {@code storageBasePath} - */ - public Doc(final ParititionId partitionId, final Tags tags, final long rootBlockNumber) { - this.partitionId = partitionId; - this.tags = tags; - this.rootBlockNumber = rootBlockNumber; - } + /** + * Initializes a new document. + *

+ * The path can be {@code null}. If path is {@code null}, then + * {@code offsetInListingFile} must be set. The path will be initialized lazily + * when needed. + *

+ * This is used to reduce the memory footprint. + * + * @param tags + * @param offsetInListingFile must be set if {@code path} is {@code null} + * @param storageBasePath the storage base path. + * @param relativePath optional, can be {@code null}. This path is + * relative to {@code storageBasePath} + */ + public Doc(final ParititionId partitionId, final Tags tags, final long rootBlockNumber) { + this.partitionId = partitionId; + this.tags = tags; + this.rootBlockNumber = rootBlockNumber; + } - public ParititionId getPartitionId() { - return partitionId; - } + public ParititionId getPartitionId() { + return partitionId; + } - public Tags getTags() { - return tags; - } + public Tags getTags() { + return tags; + } - /** - * the block number used by {@link BSFile} - * - * @return the root block number of this document - */ - public long getRootBlockNumber() { - return rootBlockNumber; - } + /** + * the block number used by {@link BSFile} + * + * @return the root block number of this document + */ + public long getRootBlockNumber() { + return rootBlockNumber; + } - public void setPartitionId(final ParititionId partitionId) { - this.partitionId = partitionId; - } + public void setPartitionId(final ParititionId partitionId) { + this.partitionId = partitionId; + } - @Override - public String toString() { - return "Doc [partitionId=" + partitionId + ", tags=" + tags + ", rootBlockNumber=" + rootBlockNumber + "]"; - } + @Override + public String toString() { + return "Doc [partitionId=" + partitionId + ", tags=" + tags + ", rootBlockNumber=" + rootBlockNumber + "]"; + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/InvalidValueException.java b/data-store/src/main/java/org/lucares/pdb/datastore/InvalidValueException.java index 020987d..047010b 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/InvalidValueException.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/InvalidValueException.java @@ -2,9 +2,9 @@ package org.lucares.pdb.datastore; public class InvalidValueException extends IllegalArgumentException { - private static final long serialVersionUID = -8707541995666127297L; + private static final long serialVersionUID = -8707541995666127297L; - public InvalidValueException(final String msg) { - super(msg); - } + public InvalidValueException(final String msg) { + super(msg); + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/PdbFile.java b/data-store/src/main/java/org/lucares/pdb/datastore/PdbFile.java index 364b41c..f7f8273 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/PdbFile.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/PdbFile.java @@ -14,86 +14,86 @@ import org.lucares.pdb.diskstorage.DiskStorage; public class PdbFile { - private static class PdbFileToLongStream implements Function> { + private static class PdbFileToLongStream implements Function> { - private final PartitionDiskStore partitionDiskStorage; + private final PartitionDiskStore partitionDiskStorage; - public PdbFileToLongStream(final PartitionDiskStore partitionDiskStorage) { - this.partitionDiskStorage = partitionDiskStorage; - } + public PdbFileToLongStream(final PartitionDiskStore partitionDiskStorage) { + this.partitionDiskStorage = partitionDiskStorage; + } - @Override - public Stream apply(final PdbFile pdbFile) { - final DiskStorage diskStorage = partitionDiskStorage.getExisting(pdbFile.getPartitionId()); - final TimeSeriesFile bsFile = TimeSeriesFile.existingFile(pdbFile.getRootBlockNumber(), diskStorage); - return bsFile.streamOfLongLists(); - } - } + @Override + public Stream apply(final PdbFile pdbFile) { + final DiskStorage diskStorage = partitionDiskStorage.getExisting(pdbFile.getPartitionId()); + final TimeSeriesFile bsFile = TimeSeriesFile.existingFile(pdbFile.getRootBlockNumber(), diskStorage); + return bsFile.streamOfLongLists(); + } + } - private final Tags tags; + private final Tags tags; - /** - * The rootBlockNumber to be used by {@link BSFile} - */ - private final long rootBlockNumber; + /** + * The rootBlockNumber to be used by {@link BSFile} + */ + private final long rootBlockNumber; - private final ParititionId partitionId; + private final ParititionId partitionId; - public PdbFile(final ParititionId partitionId, final long rootBlockNumber, final Tags tags) { - this.partitionId = partitionId; - this.rootBlockNumber = rootBlockNumber; - this.tags = tags; - } + public PdbFile(final ParititionId partitionId, final long rootBlockNumber, final Tags tags) { + this.partitionId = partitionId; + this.rootBlockNumber = rootBlockNumber; + this.tags = tags; + } - public Tags getTags() { - return tags; - } + public Tags getTags() { + return tags; + } - public long getRootBlockNumber() { - return rootBlockNumber; - } + public long getRootBlockNumber() { + return rootBlockNumber; + } - public ParititionId getPartitionId() { - return partitionId; - } + public ParititionId getPartitionId() { + return partitionId; + } - public static Stream toStream(final List pdbFiles, final PartitionDiskStore diskStorage) { + public static Stream toStream(final List pdbFiles, final PartitionDiskStore diskStorage) { - final Stream longStream = pdbFiles.stream().flatMap(new PdbFileToLongStream(diskStorage)); + final Stream longStream = pdbFiles.stream().flatMap(new PdbFileToLongStream(diskStorage)); - return longStream; - } + return longStream; + } - @Override - public String toString() { - return "PdbFile [tags=" + tags + ", rootBlockNumber=" + rootBlockNumber + ", partitionId="+partitionId+"]"; - } + @Override + public String toString() { + return "PdbFile [tags=" + tags + ", rootBlockNumber=" + rootBlockNumber + ", partitionId=" + partitionId + "]"; + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (int) (rootBlockNumber ^ (rootBlockNumber >>> 32)); - result = prime * result + ((tags == null) ? 0 : tags.hashCode()); - return result; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (int) (rootBlockNumber ^ (rootBlockNumber >>> 32)); + result = prime * result + ((tags == null) ? 0 : tags.hashCode()); + return result; + } - @Override - public boolean equals(final Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final PdbFile other = (PdbFile) obj; - if (rootBlockNumber != other.rootBlockNumber) - return false; - if (tags == null) { - if (other.tags != null) - return false; - } else if (!tags.equals(other.tags)) - return false; - return true; - } + @Override + public boolean equals(final Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + final PdbFile other = (PdbFile) obj; + if (rootBlockNumber != other.rootBlockNumber) + return false; + if (tags == null) { + if (other.tags != null) + return false; + } else if (!tags.equals(other.tags)) + return false; + return true; + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/Proposal.java b/data-store/src/main/java/org/lucares/pdb/datastore/Proposal.java index 40137c7..57680fc 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/Proposal.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/Proposal.java @@ -1,105 +1,105 @@ package org.lucares.pdb.datastore; public class Proposal implements Comparable { - private final String proposedTag; + private final String proposedTag; - private final String proposedQuery; + private final String proposedQuery; - private final boolean hasResults; + private final boolean hasResults; - private final String newQuery; + private final String newQuery; - private final int newCaretPosition; + private final int newCaretPosition; - public Proposal(final String proposedTag, final String proposedQuery, final boolean hasResults, - final String newQuery, final int newCaretPosition) { - super(); - this.proposedTag = proposedTag; - this.proposedQuery = proposedQuery; - this.hasResults = hasResults; - this.newQuery = newQuery; - this.newCaretPosition = newCaretPosition; - } + public Proposal(final String proposedTag, final String proposedQuery, final boolean hasResults, + final String newQuery, final int newCaretPosition) { + super(); + this.proposedTag = proposedTag; + this.proposedQuery = proposedQuery; + this.hasResults = hasResults; + this.newQuery = newQuery; + this.newCaretPosition = newCaretPosition; + } - public Proposal(final Proposal proposal, final boolean hasResults) { - this.proposedTag = proposal.proposedTag; - this.proposedQuery = proposal.proposedQuery; - this.hasResults = hasResults; - this.newQuery = proposal.newQuery; - this.newCaretPosition = proposal.newCaretPosition; - } + public Proposal(final Proposal proposal, final boolean hasResults) { + this.proposedTag = proposal.proposedTag; + this.proposedQuery = proposal.proposedQuery; + this.hasResults = hasResults; + this.newQuery = proposal.newQuery; + this.newCaretPosition = proposal.newCaretPosition; + } - public String getProposedTag() { - return proposedTag; - } + public String getProposedTag() { + return proposedTag; + } - public String getProposedQuery() { - return proposedQuery; - } + public String getProposedQuery() { + return proposedQuery; + } - public boolean hasResults() { - return hasResults; - } + public boolean hasResults() { + return hasResults; + } - public String getNewQuery() { - return newQuery; - } + public String getNewQuery() { + return newQuery; + } - public int getNewCaretPosition() { - return newCaretPosition; - } + public int getNewCaretPosition() { + return newCaretPosition; + } - @Override - public String toString() { - return "Proposal [proposedTag=" + proposedTag + ", proposedQuery=" + proposedQuery + ", hasResults=" - + hasResults + ", newQuery=" + newQuery + ", newCaretPosition=" + newCaretPosition + "]"; - } + @Override + public String toString() { + return "Proposal [proposedTag=" + proposedTag + ", proposedQuery=" + proposedQuery + ", hasResults=" + + hasResults + ", newQuery=" + newQuery + ", newCaretPosition=" + newCaretPosition + "]"; + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (hasResults ? 1231 : 1237); - result = prime * result + newCaretPosition; - result = prime * result + ((newQuery == null) ? 0 : newQuery.hashCode()); - result = prime * result + ((proposedQuery == null) ? 0 : proposedQuery.hashCode()); - result = prime * result + ((proposedTag == null) ? 0 : proposedTag.hashCode()); - return result; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (hasResults ? 1231 : 1237); + result = prime * result + newCaretPosition; + result = prime * result + ((newQuery == null) ? 0 : newQuery.hashCode()); + result = prime * result + ((proposedQuery == null) ? 0 : proposedQuery.hashCode()); + result = prime * result + ((proposedTag == null) ? 0 : proposedTag.hashCode()); + return result; + } - @Override - public boolean equals(final Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final Proposal other = (Proposal) obj; - if (hasResults != other.hasResults) - return false; - if (newCaretPosition != other.newCaretPosition) - return false; - if (newQuery == null) { - if (other.newQuery != null) - return false; - } else if (!newQuery.equals(other.newQuery)) - return false; - if (proposedQuery == null) { - if (other.proposedQuery != null) - return false; - } else if (!proposedQuery.equals(other.proposedQuery)) - return false; - if (proposedTag == null) { - if (other.proposedTag != null) - return false; - } else if (!proposedTag.equals(other.proposedTag)) - return false; - return true; - } + @Override + public boolean equals(final Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + final Proposal other = (Proposal) obj; + if (hasResults != other.hasResults) + return false; + if (newCaretPosition != other.newCaretPosition) + return false; + if (newQuery == null) { + if (other.newQuery != null) + return false; + } else if (!newQuery.equals(other.newQuery)) + return false; + if (proposedQuery == null) { + if (other.proposedQuery != null) + return false; + } else if (!proposedQuery.equals(other.proposedQuery)) + return false; + if (proposedTag == null) { + if (other.proposedTag != null) + return false; + } else if (!proposedTag.equals(other.proposedTag)) + return false; + return true; + } - @Override - public int compareTo(final Proposal o) { - return proposedTag.compareTo(o.getProposedTag()); - } + @Override + public int compareTo(final Proposal o) { + return proposedTag.compareTo(o.getProposedTag()); + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/ReadException.java b/data-store/src/main/java/org/lucares/pdb/datastore/ReadException.java index bb77891..34d82bd 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/ReadException.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/ReadException.java @@ -2,9 +2,9 @@ package org.lucares.pdb.datastore; public class ReadException extends RuntimeException { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - public ReadException(final RuntimeException e) { - super(e); - } + public ReadException(final RuntimeException e) { + super(e); + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/ReadRuntimeException.java b/data-store/src/main/java/org/lucares/pdb/datastore/ReadRuntimeException.java index b67e448..f6532e7 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/ReadRuntimeException.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/ReadRuntimeException.java @@ -2,17 +2,17 @@ package org.lucares.pdb.datastore; public class ReadRuntimeException extends RuntimeException { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - public ReadRuntimeException(final String message, final Throwable cause) { - super(message, cause); - } + public ReadRuntimeException(final String message, final Throwable cause) { + super(message, cause); + } - public ReadRuntimeException(final String message) { - super(message); - } + public ReadRuntimeException(final String message) { + super(message); + } - public ReadRuntimeException(final Throwable cause) { - super(cause); - } + public ReadRuntimeException(final Throwable cause) { + super(cause); + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/WriteException.java b/data-store/src/main/java/org/lucares/pdb/datastore/WriteException.java index 7a569a2..7d8d8bc 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/WriteException.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/WriteException.java @@ -2,14 +2,14 @@ package org.lucares.pdb.datastore; public class WriteException extends RuntimeException { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - public WriteException(final String message, final Throwable cause) { - super(message, cause); - } + public WriteException(final String message, final Throwable cause) { + super(message, cause); + } - public WriteException(final Throwable cause) { - super(cause); - } + public WriteException(final Throwable cause) { + super(cause); + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DataStore.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DataStore.java index 5eb705a..dba14ac 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DataStore.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DataStore.java @@ -39,381 +39,381 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DataStore implements AutoCloseable { - private static final String ALL_DOCS_KEY = "\ue001allDocs"; // \ue001 is the second character in the private use - // area - private static final Logger EXECUTE_QUERY_LOGGER = LoggerFactory - .getLogger("org.lucares.metrics.dataStore.executeQuery"); - private static final Logger MAP_DOCS_TO_DOCID = LoggerFactory - .getLogger("org.lucares.metrics.dataStore.mapDocsToDocID"); - private final static Logger METRICS_LOGGER_NEW_WRITER = LoggerFactory - .getLogger("org.lucares.metrics.dataStore.newPdbWriter"); - private static final Logger LOGGER = LoggerFactory.getLogger(DataStore.class); + private static final String ALL_DOCS_KEY = "\ue001allDocs"; // \ue001 is the second character in the private use + // area + private static final Logger EXECUTE_QUERY_LOGGER = LoggerFactory + .getLogger("org.lucares.metrics.dataStore.executeQuery"); + private static final Logger MAP_DOCS_TO_DOCID = LoggerFactory + .getLogger("org.lucares.metrics.dataStore.mapDocsToDocID"); + private final static Logger METRICS_LOGGER_NEW_WRITER = LoggerFactory + .getLogger("org.lucares.metrics.dataStore.newPdbWriter"); + private static final Logger LOGGER = LoggerFactory.getLogger(DataStore.class); - public static final char LISTING_FILE_SEPARATOR = ','; + public static final char LISTING_FILE_SEPARATOR = ','; - public static final String SUBDIR_STORAGE = "storage"; + public static final String SUBDIR_STORAGE = "storage"; - // used to generate doc ids that are - // a) unique - // b) monotonically increasing (this is, so that we don't have to sort the doc - // ids when getting them from the BSFiles) - private static final AtomicLong NEXT_DOC_ID = new AtomicLong(System.currentTimeMillis()); + // used to generate doc ids that are + // a) unique + // b) monotonically increasing (this is, so that we don't have to sort the doc + // ids when getting them from the BSFiles) + private static final AtomicLong NEXT_DOC_ID = new AtomicLong(System.currentTimeMillis()); - public static Tag TAG_ALL_DOCS = null; + public static Tag TAG_ALL_DOCS = null; - private final PartitionPersistentMap docIdToDoc; + private final PartitionPersistentMap docIdToDoc; - private final PartitionPersistentMap tagsToDocId; + private final PartitionPersistentMap tagsToDocId; - private final PartitionPersistentMap tagToDocsId; + private final PartitionPersistentMap tagToDocsId; - private final QueryCompletionIndex queryCompletionIndex; + private final QueryCompletionIndex queryCompletionIndex; - // A Doc will never be changed once it is created. Therefore we can cache them - // easily. - private final HotEntryCache docIdToDocCache = new HotEntryCache<>(Duration.ofMinutes(30), 100_000); + // A Doc will never be changed once it is created. Therefore we can cache them + // easily. + private final HotEntryCache docIdToDocCache = new HotEntryCache<>(Duration.ofMinutes(30), 100_000); - private final HotEntryCache writerCache; + private final HotEntryCache writerCache; - private final PartitionDiskStore diskStorage; - private final Path storageBasePath; + private final PartitionDiskStore diskStorage; + private final Path storageBasePath; - public DataStore(final Path dataDirectory) throws IOException { - storageBasePath = storageDirectory(dataDirectory); + public DataStore(final Path dataDirectory) throws IOException { + storageBasePath = storageDirectory(dataDirectory); - Tags.STRING_COMPRESSOR = StringCompressor.create(keyCompressionFile(storageBasePath)); - Tags.STRING_COMPRESSOR.put(ALL_DOCS_KEY); - Tags.STRING_COMPRESSOR.put(""); - TAG_ALL_DOCS = new Tag(ALL_DOCS_KEY, ""); // Tag(String, String) uses the StringCompressor internally, so it - // must be initialized after the string compressor has been created + Tags.STRING_COMPRESSOR = StringCompressor.create(keyCompressionFile(storageBasePath)); + Tags.STRING_COMPRESSOR.put(ALL_DOCS_KEY); + Tags.STRING_COMPRESSOR.put(""); + TAG_ALL_DOCS = new Tag(ALL_DOCS_KEY, ""); // Tag(String, String) uses the StringCompressor internally, so it + // must be initialized after the string compressor has been created - diskStorage = new PartitionDiskStore(storageBasePath, "data.bs"); + diskStorage = new PartitionDiskStore(storageBasePath, "data.bs"); - tagToDocsId = new PartitionPersistentMap<>(storageBasePath, "keyToValueToDocIdsIndex.bs", - new TagEncoderDecoder(), PartitionAwareWrapper.wrap(PersistentMap.LONG_CODER)); + tagToDocsId = new PartitionPersistentMap<>(storageBasePath, "keyToValueToDocIdsIndex.bs", + new TagEncoderDecoder(), PartitionAwareWrapper.wrap(PersistentMap.LONG_CODER)); - tagsToDocId = new PartitionPersistentMap<>(storageBasePath, "tagsToDocIdIndex.bs", new TagsEncoderDecoder(), - PartitionAwareWrapper.wrap(PersistentMap.LONG_CODER)); + tagsToDocId = new PartitionPersistentMap<>(storageBasePath, "tagsToDocIdIndex.bs", new TagsEncoderDecoder(), + PartitionAwareWrapper.wrap(PersistentMap.LONG_CODER)); - docIdToDoc = new PartitionPersistentMap<>(storageBasePath, "docIdToDocIndex.bs", PersistentMap.LONG_CODER, - new DocEncoderDecoder()); + docIdToDoc = new PartitionPersistentMap<>(storageBasePath, "docIdToDocIndex.bs", PersistentMap.LONG_CODER, + new DocEncoderDecoder()); - queryCompletionIndex = new QueryCompletionIndex(storageBasePath); + queryCompletionIndex = new QueryCompletionIndex(storageBasePath); - writerCache = new HotEntryCache<>(Duration.ofSeconds(10), 1000); - writerCache.addListener((key, value) -> value.close()); - } + writerCache = new HotEntryCache<>(Duration.ofSeconds(10), 1000); + writerCache.addListener((key, value) -> value.close()); + } - private Path keyCompressionFile(final Path dataDirectory) throws IOException { - return dataDirectory.resolve("keys.csv"); - } + private Path keyCompressionFile(final Path dataDirectory) throws IOException { + return dataDirectory.resolve("keys.csv"); + } - public static Path storageDirectory(final Path dataDirectory) throws IOException { - return dataDirectory.resolve(SUBDIR_STORAGE); - } + public static Path storageDirectory(final Path dataDirectory) throws IOException { + return dataDirectory.resolve(SUBDIR_STORAGE); + } - public void write(final long dateAsEpochMilli, final Tags tags, final long value) { - final ParititionId partitionId = DateIndexExtension.toPartitionId(dateAsEpochMilli); - final PdbWriter writer = getWriter(partitionId, tags); - writer.write(dateAsEpochMilli, value); - } + public void write(final long dateAsEpochMilli, final Tags tags, final long value) { + final ParititionId partitionId = DateIndexExtension.toPartitionId(dateAsEpochMilli); + final PdbWriter writer = getWriter(partitionId, tags); + writer.write(dateAsEpochMilli, value); + } - // visible for test - QueryCompletionIndex getQueryCompletionIndex() { - return queryCompletionIndex; - } + // visible for test + QueryCompletionIndex getQueryCompletionIndex() { + return queryCompletionIndex; + } - public long createNewFile(final ParititionId partitionId, final Tags tags) { - try { - final long newFilesRootBlockOffset = diskStorage.allocateBlock(partitionId, BSFile.BLOCK_SIZE); + public long createNewFile(final ParititionId partitionId, final Tags tags) { + try { + final long newFilesRootBlockOffset = diskStorage.allocateBlock(partitionId, BSFile.BLOCK_SIZE); - final long docId = createUniqueDocId(); - final Doc doc = new Doc(partitionId, tags, newFilesRootBlockOffset); - docIdToDoc.putValue(partitionId, docId, doc); + final long docId = createUniqueDocId(); + final Doc doc = new Doc(partitionId, tags, newFilesRootBlockOffset); + docIdToDoc.putValue(partitionId, docId, doc); - final Long oldDocId = tagsToDocId.putValue(partitionId, tags, docId); - Preconditions.checkNull(oldDocId, "There must be at most one document for tags: {0}", tags); + final Long oldDocId = tagsToDocId.putValue(partitionId, tags, docId); + Preconditions.checkNull(oldDocId, "There must be at most one document for tags: {0}", tags); - // store mapping from tag to docId, so that we can find all docs for a given tag - final List ts = new ArrayList<>(tags.toTags()); - ts.add(TAG_ALL_DOCS); - for (final Tag tag : ts) { + // store mapping from tag to docId, so that we can find all docs for a given tag + final List ts = new ArrayList<>(tags.toTags()); + ts.add(TAG_ALL_DOCS); + for (final Tag tag : ts) { - Long diskStoreOffsetForDocIdsOfTag = tagToDocsId.getValue(partitionId, tag); + Long diskStoreOffsetForDocIdsOfTag = tagToDocsId.getValue(partitionId, tag); - if (diskStoreOffsetForDocIdsOfTag == null) { - diskStoreOffsetForDocIdsOfTag = diskStorage.allocateBlock(partitionId, BSFile.BLOCK_SIZE); - tagToDocsId.putValue(partitionId, tag, diskStoreOffsetForDocIdsOfTag); - } + if (diskStoreOffsetForDocIdsOfTag == null) { + diskStoreOffsetForDocIdsOfTag = diskStorage.allocateBlock(partitionId, BSFile.BLOCK_SIZE); + tagToDocsId.putValue(partitionId, tag, diskStoreOffsetForDocIdsOfTag); + } - try (final LongStreamFile docIdsOfTag = diskStorage.streamExistingFile(diskStoreOffsetForDocIdsOfTag, - partitionId)) { - docIdsOfTag.append(docId); - } - } + try (final LongStreamFile docIdsOfTag = diskStorage.streamExistingFile(diskStoreOffsetForDocIdsOfTag, + partitionId)) { + docIdsOfTag.append(docId); + } + } - // index the tags, so that we can efficiently find all possible values for a - // field in a query - queryCompletionIndex.addTags(partitionId, tags); + // index the tags, so that we can efficiently find all possible values for a + // field in a query + queryCompletionIndex.addTags(partitionId, tags); - return newFilesRootBlockOffset; - } catch (final IOException e) { - throw new RuntimeIOException(e); - } - } + return newFilesRootBlockOffset; + } catch (final IOException e) { + throw new RuntimeIOException(e); + } + } - private long createUniqueDocId() { - return NEXT_DOC_ID.getAndIncrement(); - } + private long createUniqueDocId() { + return NEXT_DOC_ID.getAndIncrement(); + } - public List getFilesForQuery(final Query query) { + public List getFilesForQuery(final Query query) { - final List searchResult = search(query); - if (searchResult.size() > 500_000) { - throw new IllegalStateException("Too many results."); - } + final List searchResult = search(query); + if (searchResult.size() > 500_000) { + throw new IllegalStateException("Too many results."); + } - final List result = toPdbFiles(searchResult); - return result; - } + final List result = toPdbFiles(searchResult); + return result; + } - private List toPdbFiles(final List searchResult) { - final List result = new ArrayList<>(searchResult.size()); - for (final Doc document : searchResult) { + private List toPdbFiles(final List searchResult) { + final List result = new ArrayList<>(searchResult.size()); + for (final Doc document : searchResult) { - final ParititionId partitionId = document.getPartitionId(); - final long rootBlockNumber = document.getRootBlockNumber(); - final Tags tags = document.getTags(); - final PdbFile pdbFile = new PdbFile(partitionId, rootBlockNumber, tags); + final ParititionId partitionId = document.getPartitionId(); + final long rootBlockNumber = document.getRootBlockNumber(); + final Tags tags = document.getTags(); + final PdbFile pdbFile = new PdbFile(partitionId, rootBlockNumber, tags); - result.add(pdbFile); - } - return result; - } + result.add(pdbFile); + } + return result; + } - public List search(final Query query) { - try { - final List result = new ArrayList<>(); + public List search(final Query query) { + try { + final List result = new ArrayList<>(); - final PartitionLongList docIdsList = executeQuery(query); - LOGGER.trace("query {} found {} docs", query, docIdsList.size()); - final List docs = mapDocIdsToDocs(docIdsList); - result.addAll(docs); - - return result; - } catch (final IOException e) { - throw new RuntimeIOException(e); - } - } - - public int count(final Query query) { - final PartitionLongList docIdsList = executeQuery(query); - return docIdsList.size(); - } - - public List getAvailableFields(final DateTimeRange dateRange) { - - final Set keys = new HashSet<>(); - - final Tag keyPrefix = new Tag("", ""); // will find everything - - final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); - tagToDocsId.visitValues(partitionIdSource, keyPrefix, (tags, __) -> keys.add(tags.getKeyAsString())); - - keys.remove(ALL_DOCS_KEY); - final List result = new ArrayList<>(keys); - Collections.sort(result); - return result; - - } - - private PartitionLongList executeQuery(final Query query) { - final long start = System.nanoTime(); - synchronized (docIdToDoc) { - final Expression expression = QueryLanguageParser.parse(query.getQuery()); - final ExpressionToDocIdVisitor visitor = new ExpressionToDocIdVisitor(query.getDateRange(), tagToDocsId, - diskStorage); - final PartitionLongList docIdsList = expression.visit(visitor); - EXECUTE_QUERY_LOGGER.debug("executeQuery({}) took {}ms returned {} results ", query, - (System.nanoTime() - start) / 1_000_000.0, docIdsList.size()); - return docIdsList; - } - } - - private List mapDocIdsToDocs(final PartitionLongList docIdsList) throws IOException { - final List result = new ArrayList<>(docIdsList.size()); - - synchronized (docIdToDoc) { - final long start = System.nanoTime(); - - for (final ParititionId partitionId : docIdsList) { - final LongList docIds = docIdsList.get(partitionId); - - for (int i = 0; i < docIds.size(); i++) { - final long docId = docIds.get(i); - - final Doc doc = getDocByDocId(partitionId, docId); - Objects.requireNonNull(doc, "Doc with id " + docId + " did not exist."); - - result.add(doc); - } - } - - MAP_DOCS_TO_DOCID.debug("mapDocIdsToDocs({}): {}ms", docIdsList.size(), - (System.nanoTime() - start) / 1_000_000.0); - } - return result; - } - - public Optional getByTags(final ParititionId partitionId, final Tags tags) { - - final Long docId = tagsToDocId.getValue(partitionId, tags); - - if (docId != null) { - final Doc doc = getDocByDocId(partitionId, docId); - return Optional.of(doc); - } - - return Optional.empty(); - } - - public List getByTags(final DateTimeRange dateRange, final Tags tags) { - - final List result = new ArrayList<>(); - final DatePartitioner datePartitioner = new DatePartitioner(dateRange); - final List docIds = tagsToDocId.getValues(datePartitioner, tags); - for (final Long docId : docIds) { - - if (docId != null) { - final Doc doc = getDocByDocId(dateRange, docId); - result.add(doc); - } - } - - return result; - } - - private Doc getDocByDocId(final ParititionId partitionId, final Long docId) { - return docIdToDocCache.putIfAbsent(docId, documentId -> { - return docIdToDoc.getValue(partitionId, documentId); - }); - } - - private Doc getDocByDocId(final DateTimeRange dateRange, final Long docId) { - return docIdToDocCache.putIfAbsent(docId, documentId -> { - - final DatePartitioner datePartitioner = new DatePartitioner(dateRange); - final List docIds = docIdToDoc.getValues(datePartitioner, documentId); - if (docIds.size() == 1) { - return docIds.get(0); - } else if (docIds.size() > 1) { - throw new IllegalStateException( - "Found multiple documents for " + dateRange + " and docId " + documentId + ": " + docIds); - } - throw new IllegalStateException("Found no documents for " + dateRange + " and docId " + documentId); - }); - } - - public List propose(final QueryWithCaretMarker query) { - - final NewProposerParser newProposerParser = new NewProposerParser(queryCompletionIndex); - final List proposals = newProposerParser.propose(query); - LOGGER.debug("Proposals for query {}: {}", query, proposals); - return proposals; - } - - public PartitionDiskStore getDiskStorage() { - return diskStorage; - } - - private PdbWriter getWriter(final ParititionId partitionId, final Tags tags) throws ReadException, WriteException { - - return writerCache.putIfAbsent(tags, t -> getWriterInternal(partitionId, tags)); - } - - // visible for test - long sizeWriterCache() { - return writerCache.size(); - } - - private PdbWriter getWriterInternal(final ParititionId partitionId, final Tags tags) { - final Optional docsForTags = getByTags(partitionId, tags); - PdbWriter writer; - if (docsForTags.isPresent()) { - try { - final Doc doc = docsForTags.get(); - final PdbFile pdbFile = new PdbFile(partitionId, doc.getRootBlockNumber(), tags); - writer = new PdbWriter(pdbFile, diskStorage.getExisting(partitionId)); - } catch (final RuntimeException e) { - throw new ReadException(e); - } - } else { - writer = newPdbWriter(partitionId, tags); - } - return writer; - } - - private PdbWriter newPdbWriter(final ParititionId partitionId, final Tags tags) { - final long start = System.nanoTime(); - try { - final PdbFile pdbFile = createNewPdbFile(partitionId, tags); - final PdbWriter result = new PdbWriter(pdbFile, diskStorage.getExisting(partitionId)); - - METRICS_LOGGER_NEW_WRITER.debug("newPdbWriter took {}ms tags: {}", - (System.nanoTime() - start) / 1_000_000.0, tags); - return result; - } catch (final RuntimeException e) { - throw new WriteException(e); - } - } - - private PdbFile createNewPdbFile(final ParititionId partitionId, final Tags tags) { - - final long rootBlockNumber = createNewFile(partitionId, tags); - - final PdbFile result = new PdbFile(partitionId, rootBlockNumber, tags); - return result; - } - - @Override - public void close() throws RuntimeIOException { - try { - // we cannot simply clear the cache, because the cache implementation (Guava at - // the time of writing) handles eviction events asynchronously. - forEachWriter(cachedWriter -> { - try { - cachedWriter.close(); - } catch (final Exception e) { - throw new WriteException(e); - } - }); - } finally { - try { - diskStorage.close(); - } finally { - tagToDocsId.close(); - } - } - } - - private void forEachWriter(final Consumer consumer) { - writerCache.forEach(writer -> { - try { - consumer.accept(writer); - } catch (final RuntimeException e) { - LOGGER.warn("Exception while applying consumer to PdbWriter for " + writer.getPdbFile(), e); - } - }); - } - - public void flush() { - forEachWriter(t -> { - try { - t.flush(); - } catch (final Exception e) { - throw new WriteException(e); - } - }); - } + final PartitionLongList docIdsList = executeQuery(query); + LOGGER.trace("query {} found {} docs", query, docIdsList.size()); + final List docs = mapDocIdsToDocs(docIdsList); + result.addAll(docs); + + return result; + } catch (final IOException e) { + throw new RuntimeIOException(e); + } + } + + public int count(final Query query) { + final PartitionLongList docIdsList = executeQuery(query); + return docIdsList.size(); + } + + public List getAvailableFields(final DateTimeRange dateRange) { + + final Set keys = new HashSet<>(); + + final Tag keyPrefix = new Tag("", ""); // will find everything + + final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); + tagToDocsId.visitValues(partitionIdSource, keyPrefix, (tags, __) -> keys.add(tags.getKeyAsString())); + + keys.remove(ALL_DOCS_KEY); + final List result = new ArrayList<>(keys); + Collections.sort(result); + return result; + + } + + private PartitionLongList executeQuery(final Query query) { + final long start = System.nanoTime(); + synchronized (docIdToDoc) { + final Expression expression = QueryLanguageParser.parse(query.getQuery()); + final ExpressionToDocIdVisitor visitor = new ExpressionToDocIdVisitor(query.getDateRange(), tagToDocsId, + diskStorage); + final PartitionLongList docIdsList = expression.visit(visitor); + EXECUTE_QUERY_LOGGER.debug("executeQuery({}) took {}ms returned {} results ", query, + (System.nanoTime() - start) / 1_000_000.0, docIdsList.size()); + return docIdsList; + } + } + + private List mapDocIdsToDocs(final PartitionLongList docIdsList) throws IOException { + final List result = new ArrayList<>(docIdsList.size()); + + synchronized (docIdToDoc) { + final long start = System.nanoTime(); + + for (final ParititionId partitionId : docIdsList) { + final LongList docIds = docIdsList.get(partitionId); + + for (int i = 0; i < docIds.size(); i++) { + final long docId = docIds.get(i); + + final Doc doc = getDocByDocId(partitionId, docId); + Objects.requireNonNull(doc, "Doc with id " + docId + " did not exist."); + + result.add(doc); + } + } + + MAP_DOCS_TO_DOCID.debug("mapDocIdsToDocs({}): {}ms", docIdsList.size(), + (System.nanoTime() - start) / 1_000_000.0); + } + return result; + } + + public Optional getByTags(final ParititionId partitionId, final Tags tags) { + + final Long docId = tagsToDocId.getValue(partitionId, tags); + + if (docId != null) { + final Doc doc = getDocByDocId(partitionId, docId); + return Optional.of(doc); + } + + return Optional.empty(); + } + + public List getByTags(final DateTimeRange dateRange, final Tags tags) { + + final List result = new ArrayList<>(); + final DatePartitioner datePartitioner = new DatePartitioner(dateRange); + final List docIds = tagsToDocId.getValues(datePartitioner, tags); + for (final Long docId : docIds) { + + if (docId != null) { + final Doc doc = getDocByDocId(dateRange, docId); + result.add(doc); + } + } + + return result; + } + + private Doc getDocByDocId(final ParititionId partitionId, final Long docId) { + return docIdToDocCache.putIfAbsent(docId, documentId -> { + return docIdToDoc.getValue(partitionId, documentId); + }); + } + + private Doc getDocByDocId(final DateTimeRange dateRange, final Long docId) { + return docIdToDocCache.putIfAbsent(docId, documentId -> { + + final DatePartitioner datePartitioner = new DatePartitioner(dateRange); + final List docIds = docIdToDoc.getValues(datePartitioner, documentId); + if (docIds.size() == 1) { + return docIds.get(0); + } else if (docIds.size() > 1) { + throw new IllegalStateException( + "Found multiple documents for " + dateRange + " and docId " + documentId + ": " + docIds); + } + throw new IllegalStateException("Found no documents for " + dateRange + " and docId " + documentId); + }); + } + + public List propose(final QueryWithCaretMarker query) { + + final NewProposerParser newProposerParser = new NewProposerParser(queryCompletionIndex); + final List proposals = newProposerParser.propose(query); + LOGGER.debug("Proposals for query {}: {}", query, proposals); + return proposals; + } + + public PartitionDiskStore getDiskStorage() { + return diskStorage; + } + + private PdbWriter getWriter(final ParititionId partitionId, final Tags tags) throws ReadException, WriteException { + + return writerCache.putIfAbsent(tags, t -> getWriterInternal(partitionId, tags)); + } + + // visible for test + long sizeWriterCache() { + return writerCache.size(); + } + + private PdbWriter getWriterInternal(final ParititionId partitionId, final Tags tags) { + final Optional docsForTags = getByTags(partitionId, tags); + PdbWriter writer; + if (docsForTags.isPresent()) { + try { + final Doc doc = docsForTags.get(); + final PdbFile pdbFile = new PdbFile(partitionId, doc.getRootBlockNumber(), tags); + writer = new PdbWriter(pdbFile, diskStorage.getExisting(partitionId)); + } catch (final RuntimeException e) { + throw new ReadException(e); + } + } else { + writer = newPdbWriter(partitionId, tags); + } + return writer; + } + + private PdbWriter newPdbWriter(final ParititionId partitionId, final Tags tags) { + final long start = System.nanoTime(); + try { + final PdbFile pdbFile = createNewPdbFile(partitionId, tags); + final PdbWriter result = new PdbWriter(pdbFile, diskStorage.getExisting(partitionId)); + + METRICS_LOGGER_NEW_WRITER.debug("newPdbWriter took {}ms tags: {}", + (System.nanoTime() - start) / 1_000_000.0, tags); + return result; + } catch (final RuntimeException e) { + throw new WriteException(e); + } + } + + private PdbFile createNewPdbFile(final ParititionId partitionId, final Tags tags) { + + final long rootBlockNumber = createNewFile(partitionId, tags); + + final PdbFile result = new PdbFile(partitionId, rootBlockNumber, tags); + return result; + } + + @Override + public void close() throws RuntimeIOException { + try { + // we cannot simply clear the cache, because the cache implementation (Guava at + // the time of writing) handles eviction events asynchronously. + forEachWriter(cachedWriter -> { + try { + cachedWriter.close(); + } catch (final Exception e) { + throw new WriteException(e); + } + }); + } finally { + try { + diskStorage.close(); + } finally { + tagToDocsId.close(); + } + } + } + + private void forEachWriter(final Consumer consumer) { + writerCache.forEach(writer -> { + try { + consumer.accept(writer); + } catch (final RuntimeException e) { + LOGGER.warn("Exception while applying consumer to PdbWriter for " + writer.getPdbFile(), e); + } + }); + } + + public void flush() { + forEachWriter(t -> { + try { + t.flush(); + } catch (final Exception e) { + throw new WriteException(e); + } + }); + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DateIndexExtension.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DateIndexExtension.java index 6611330..8f498b7 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DateIndexExtension.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DateIndexExtension.java @@ -19,178 +19,178 @@ import org.lucares.pdb.api.DateTimeRange; public class DateIndexExtension { - /** - * This date pattern defines the resolution of the date index - */ - private static final DateTimeFormatter DATE_PATTERN = DateTimeFormatter.ofPattern("yyyyMM"); + /** + * This date pattern defines the resolution of the date index + */ + private static final DateTimeFormatter DATE_PATTERN = DateTimeFormatter.ofPattern("yyyyMM"); - // visible for test - static final ConcurrentNavigableMap DATE_PREFIX_CACHE = new ConcurrentSkipListMap<>(); + // visible for test + static final ConcurrentNavigableMap DATE_PREFIX_CACHE = new ConcurrentSkipListMap<>(); - private static final AtomicReference LAST_ACCESSED = new AtomicReference<>(null); + private static final AtomicReference LAST_ACCESSED = new AtomicReference<>(null); - static Set toDateIndexPrefix(final DateTimeRange dateRange) { - final Set result = new TreeSet<>(); + static Set toDateIndexPrefix(final DateTimeRange dateRange) { + final Set result = new TreeSet<>(); - OffsetDateTime current = dateRange.getStart(); - while (current.isBefore(dateRange.getEnd())) { + OffsetDateTime current = dateRange.getStart(); + while (current.isBefore(dateRange.getEnd())) { - result.add(toDateIndexPrefix(current)); - current = current.plusMonths(1); + result.add(toDateIndexPrefix(current)); + current = current.plusMonths(1); - } - result.add(toDateIndexPrefix(dateRange.getEnd())); + } + result.add(toDateIndexPrefix(dateRange.getEnd())); - return result; - } + return result; + } - static String toDateIndexPrefix(final OffsetDateTime time) { - return time.format(DATE_PATTERN); - } + static String toDateIndexPrefix(final OffsetDateTime time) { + return time.format(DATE_PATTERN); + } - public static ParititionId toPartitionId(final long epochMilli) { - String result; - final DatePrefixAndRange lastAccessed = LAST_ACCESSED.get(); - if (lastAccessed != null && lastAccessed.getMinEpochMilli() <= epochMilli - && lastAccessed.getMaxEpochMilli() >= epochMilli) { - result = lastAccessed.getDatePrefix(); - } else { - final Entry value = DATE_PREFIX_CACHE.floorEntry(epochMilli); + public static ParititionId toPartitionId(final long epochMilli) { + String result; + final DatePrefixAndRange lastAccessed = LAST_ACCESSED.get(); + if (lastAccessed != null && lastAccessed.getMinEpochMilli() <= epochMilli + && lastAccessed.getMaxEpochMilli() >= epochMilli) { + result = lastAccessed.getDatePrefix(); + } else { + final Entry value = DATE_PREFIX_CACHE.floorEntry(epochMilli); - if (value == null || !value.getValue().contains(epochMilli)) { - final DatePrefixAndRange newValue = toDatePrefixAndRange(epochMilli); - DATE_PREFIX_CACHE.put(newValue.getMinEpochMilli(), newValue); - result = newValue.getDatePrefix(); - LAST_ACCESSED.set(newValue); - } else { - result = value.getValue().getDatePrefix(); - LAST_ACCESSED.set(value.getValue()); - } - } - return new ParititionId(result); - } + if (value == null || !value.getValue().contains(epochMilli)) { + final DatePrefixAndRange newValue = toDatePrefixAndRange(epochMilli); + DATE_PREFIX_CACHE.put(newValue.getMinEpochMilli(), newValue); + result = newValue.getDatePrefix(); + LAST_ACCESSED.set(newValue); + } else { + result = value.getValue().getDatePrefix(); + LAST_ACCESSED.set(value.getValue()); + } + } + return new ParititionId(result); + } - public static String toDateIndexPrefix(final long epochMilli) { + public static String toDateIndexPrefix(final long epochMilli) { - final Entry value = DATE_PREFIX_CACHE.floorEntry(epochMilli); + final Entry value = DATE_PREFIX_CACHE.floorEntry(epochMilli); - String result; - if (value == null || !value.getValue().contains(epochMilli)) { - final DatePrefixAndRange newValue = toDatePrefixAndRange(epochMilli); - DATE_PREFIX_CACHE.put(newValue.getMinEpochMilli(), newValue); - result = newValue.getDatePrefix(); - } else { - result = value.getValue().getDatePrefix(); - } + String result; + if (value == null || !value.getValue().contains(epochMilli)) { + final DatePrefixAndRange newValue = toDatePrefixAndRange(epochMilli); + DATE_PREFIX_CACHE.put(newValue.getMinEpochMilli(), newValue); + result = newValue.getDatePrefix(); + } else { + result = value.getValue().getDatePrefix(); + } - return result; - } + return result; + } - /** - * only for tests, use toPartitionIds(final DateTimeRange dateRange,final - * Collection availablePartitionIds) instead - * - * @param dateRange - * @return - */ - static List toPartitionIds(final DateTimeRange dateRange) { - final List result = new ArrayList<>(); + /** + * only for tests, use toPartitionIds(final DateTimeRange dateRange,final + * Collection availablePartitionIds) instead + * + * @param dateRange + * @return + */ + static List toPartitionIds(final DateTimeRange dateRange) { + final List result = new ArrayList<>(); - OffsetDateTime current = dateRange.getStart(); - final OffsetDateTime end = dateRange.getEnd(); - current = current.withOffsetSameInstant(ZoneOffset.UTC).withDayOfMonth(1).withHour(0).withMinute(0) - .withSecond(0).withNano(0); + OffsetDateTime current = dateRange.getStart(); + final OffsetDateTime end = dateRange.getEnd(); + current = current.withOffsetSameInstant(ZoneOffset.UTC).withDayOfMonth(1).withHour(0).withMinute(0) + .withSecond(0).withNano(0); - while (!current.isAfter(end)) { - final String id = current.format(DATE_PATTERN); - final ParititionId partitionId = new ParititionId(id); - result.add(partitionId); - current = current.plusMonths(1); - } + while (!current.isAfter(end)) { + final String id = current.format(DATE_PATTERN); + final ParititionId partitionId = new ParititionId(id); + result.add(partitionId); + current = current.plusMonths(1); + } - return result; - } + return result; + } - public static Set toPartitionIds(final DateTimeRange dateRange, - final Collection availablePartitionIds) { - final Set result = new LinkedHashSet<>(); + public static Set toPartitionIds(final DateTimeRange dateRange, + final Collection availablePartitionIds) { + final Set result = new LinkedHashSet<>(); - final ParititionId start = toPartitionId(dateRange.getStart().toInstant().toEpochMilli()); - final ParititionId end = toPartitionId(dateRange.getEnd().toInstant().toEpochMilli()); + final ParititionId start = toPartitionId(dateRange.getStart().toInstant().toEpochMilli()); + final ParititionId end = toPartitionId(dateRange.getEnd().toInstant().toEpochMilli()); - for (final ParititionId partitionId : availablePartitionIds) { - if (start.compareTo(partitionId) <= 0 && end.compareTo(partitionId) >= 0) { - result.add(partitionId); - } - } + for (final ParititionId partitionId : availablePartitionIds) { + if (start.compareTo(partitionId) <= 0 && end.compareTo(partitionId) >= 0) { + result.add(partitionId); + } + } - return result; - } + return result; + } - public static DatePrefixAndRange toDatePrefixAndRange(final long epochMilli) { - final OffsetDateTime date = Instant.ofEpochMilli(epochMilli).atOffset(ZoneOffset.UTC); - final OffsetDateTime beginOfMonth = date.withDayOfMonth(1).withHour(0).withMinute(0).withSecond(0).withNano(0); - final OffsetDateTime endOfMonth = beginOfMonth.plusMonths(1).minusNanos(1); + public static DatePrefixAndRange toDatePrefixAndRange(final long epochMilli) { + final OffsetDateTime date = Instant.ofEpochMilli(epochMilli).atOffset(ZoneOffset.UTC); + final OffsetDateTime beginOfMonth = date.withDayOfMonth(1).withHour(0).withMinute(0).withSecond(0).withNano(0); + final OffsetDateTime endOfMonth = beginOfMonth.plusMonths(1).minusNanos(1); - final String datePrefix = date.format(DATE_PATTERN); - final long minEpochMilli = beginOfMonth.toInstant().toEpochMilli(); - final long maxEpochMilli = endOfMonth.toInstant().toEpochMilli(); + final String datePrefix = date.format(DATE_PATTERN); + final long minEpochMilli = beginOfMonth.toInstant().toEpochMilli(); + final long maxEpochMilli = endOfMonth.toInstant().toEpochMilli(); - return new DatePrefixAndRange(datePrefix, minEpochMilli, maxEpochMilli); - } + return new DatePrefixAndRange(datePrefix, minEpochMilli, maxEpochMilli); + } - public static List toDateIndexEpochMillis(final DateTimeRange dateRange) { - final List result = new ArrayList<>(); + public static List toDateIndexEpochMillis(final DateTimeRange dateRange) { + final List result = new ArrayList<>(); - OffsetDateTime current = dateRange.getStart(); - final OffsetDateTime end = dateRange.getEnd(); - current = current.withOffsetSameInstant(ZoneOffset.UTC).withDayOfMonth(1).withHour(0).withMinute(0) - .withSecond(0).withNano(0); + OffsetDateTime current = dateRange.getStart(); + final OffsetDateTime end = dateRange.getEnd(); + current = current.withOffsetSameInstant(ZoneOffset.UTC).withDayOfMonth(1).withHour(0).withMinute(0) + .withSecond(0).withNano(0); - while (!current.isAfter(end)) { - result.add(current.toInstant().toEpochMilli()); - current = current.plusMonths(1); - } + while (!current.isAfter(end)) { + result.add(current.toInstant().toEpochMilli()); + current = current.plusMonths(1); + } - return result; - } + return result; + } - public static ParititionId now() { - return toPartitionId(System.currentTimeMillis()); - } + public static ParititionId now() { + return toPartitionId(System.currentTimeMillis()); + } } class DatePrefixAndRange { - private final String datePrefix; - private final long minEpochMilli; - private final long maxEpochMilli; + private final String datePrefix; + private final long minEpochMilli; + private final long maxEpochMilli; - public DatePrefixAndRange(final String datePrefix, final long minEpochMilli, final long maxEpochMilli) { - super(); - this.datePrefix = datePrefix; - this.minEpochMilli = minEpochMilli; - this.maxEpochMilli = maxEpochMilli; - } + public DatePrefixAndRange(final String datePrefix, final long minEpochMilli, final long maxEpochMilli) { + super(); + this.datePrefix = datePrefix; + this.minEpochMilli = minEpochMilli; + this.maxEpochMilli = maxEpochMilli; + } - public String getDatePrefix() { - return datePrefix; - } + public String getDatePrefix() { + return datePrefix; + } - public long getMinEpochMilli() { - return minEpochMilli; - } + public long getMinEpochMilli() { + return minEpochMilli; + } - public long getMaxEpochMilli() { - return maxEpochMilli; - } + public long getMaxEpochMilli() { + return maxEpochMilli; + } - public boolean contains(final long epochMilli) { - return minEpochMilli <= epochMilli && epochMilli <= maxEpochMilli; - } + public boolean contains(final long epochMilli) { + return minEpochMilli <= epochMilli && epochMilli <= maxEpochMilli; + } - @Override - public String toString() { - return datePrefix + " (" + minEpochMilli + " - " + maxEpochMilli + ")"; - } + @Override + public String toString() { + return datePrefix + " (" + minEpochMilli + " - " + maxEpochMilli + ")"; + } } \ No newline at end of file diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DatePartitioner.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DatePartitioner.java index 9fbc4d3..65bb4a4 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DatePartitioner.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DatePartitioner.java @@ -6,14 +6,14 @@ import org.lucares.pdb.api.DateTimeRange; public class DatePartitioner implements PartitionIdSource { - private final DateTimeRange dateRange; + private final DateTimeRange dateRange; - public DatePartitioner(final DateTimeRange dateRange) { - this.dateRange = dateRange; - } + public DatePartitioner(final DateTimeRange dateRange) { + this.dateRange = dateRange; + } - @Override - public Set toPartitionIds(final Set availablePartitions) { - return DateIndexExtension.toPartitionIds(dateRange, availablePartitions); - } + @Override + public Set toPartitionIds(final Set availablePartitions) { + return DateIndexExtension.toPartitionIds(dateRange, availablePartitions); + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DocEncoderDecoder.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DocEncoderDecoder.java index dd005fc..1bfaf62 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DocEncoderDecoder.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DocEncoderDecoder.java @@ -8,43 +8,43 @@ import org.lucares.utils.byteencoder.VariableByteEncoder; class DocEncoderDecoder implements PartitionAwareEncoderDecoder { - @Override - public byte[] encode(final Doc doc) { + @Override + public byte[] encode(final Doc doc) { - final byte[] rootBlockNumber = VariableByteEncoder.encode(doc.getRootBlockNumber()); - final byte[] tags = doc.getTags().toBytes(); + final byte[] rootBlockNumber = VariableByteEncoder.encode(doc.getRootBlockNumber()); + final byte[] tags = doc.getTags().toBytes(); - final byte[] result = new byte[rootBlockNumber.length + tags.length]; + final byte[] result = new byte[rootBlockNumber.length + tags.length]; - System.arraycopy(rootBlockNumber, 0, result, 0, rootBlockNumber.length); - System.arraycopy(tags, 0, result, rootBlockNumber.length, tags.length); + System.arraycopy(rootBlockNumber, 0, result, 0, rootBlockNumber.length); + System.arraycopy(tags, 0, result, rootBlockNumber.length, tags.length); - return result; - } + return result; + } - @Override - public Doc decode(final byte[] bytes) { + @Override + public Doc decode(final byte[] bytes) { - final long rootBlockNumber = VariableByteEncoder.decodeFirstValue(bytes); - final int bytesRootBlockNumber = VariableByteEncoder.neededBytes(rootBlockNumber); - final Tags tags = Tags.fromBytes(Arrays.copyOfRange(bytes, bytesRootBlockNumber, bytes.length)); - return new Doc(null, tags, rootBlockNumber); - } + final long rootBlockNumber = VariableByteEncoder.decodeFirstValue(bytes); + final int bytesRootBlockNumber = VariableByteEncoder.neededBytes(rootBlockNumber); + final Tags tags = Tags.fromBytes(Arrays.copyOfRange(bytes, bytesRootBlockNumber, bytes.length)); + return new Doc(null, tags, rootBlockNumber); + } - @Override - public Doc encodeValue(final Doc v) { - return v; - } + @Override + public Doc encodeValue(final Doc v) { + return v; + } - @Override - public Doc decodeValue(final ParititionId partitionId, final Doc t) { - if (t != null) { - t.setPartitionId(partitionId); - } - return t; - } - - public byte[] getEmptyValue() { - return new byte[] {0}; - } + @Override + public Doc decodeValue(final ParititionId partitionId, final Doc t) { + if (t != null) { + t.setPartitionId(partitionId); + } + return t; + } + + public byte[] getEmptyValue() { + return new byte[] { 0 }; + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/GlobMatcher.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/GlobMatcher.java index 9a97294..3ae1b58 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/GlobMatcher.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/GlobMatcher.java @@ -7,18 +7,18 @@ import org.lucares.pdb.datastore.lang.GloblikePattern; public class GlobMatcher { - private final Pattern pattern; + private final Pattern pattern; - public GlobMatcher(final String globlike) { - pattern = GloblikePattern.globlikeToRegex(globlike); - } + public GlobMatcher(final String globlike) { + pattern = GloblikePattern.globlikeToRegex(globlike); + } - public GlobMatcher(final Iterable globlikes) { - pattern = GloblikePattern.globlikeToRegex(globlikes); - } + public GlobMatcher(final Iterable globlikes) { + pattern = GloblikePattern.globlikeToRegex(globlikes); + } - public boolean matches(final String s) { - final Matcher matcher = pattern.matcher(s); - return matcher.find(); - } + public boolean matches(final String s) { + final Matcher matcher = pattern.matcher(s); + return matcher.find(); + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/ParititionId.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/ParititionId.java index 97ef75a..fe15791 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/ParititionId.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/ParititionId.java @@ -1,65 +1,65 @@ package org.lucares.pdb.datastore.internal; public class ParititionId implements Comparable { - private final String partitionId; + private final String partitionId; - /** - * Create a new partition id. - * - * @param partitionId the id, e.g. a time like 201902 (partition for entries of - * February 2019) - */ - public ParititionId(final String partitionId) { - super(); - this.partitionId = partitionId; - } + /** + * Create a new partition id. + * + * @param partitionId the id, e.g. a time like 201902 (partition for entries of + * February 2019) + */ + public ParititionId(final String partitionId) { + super(); + this.partitionId = partitionId; + } - public static ParititionId of(final String partitionId) { - return new ParititionId(partitionId); - } + public static ParititionId of(final String partitionId) { + return new ParititionId(partitionId); + } - @Override - public int compareTo(final ParititionId other) { - return partitionId.compareTo(other.getPartitionId()); - } + @Override + public int compareTo(final ParititionId other) { + return partitionId.compareTo(other.getPartitionId()); + } - /** - * @return the id, e.g. a time like 201902 (partition for entries of February - * 2019) - */ - public String getPartitionId() { - return partitionId; - } + /** + * @return the id, e.g. a time like 201902 (partition for entries of February + * 2019) + */ + public String getPartitionId() { + return partitionId; + } - @Override - public String toString() { - return partitionId; - } + @Override + public String toString() { + return partitionId; + } - /* - * non-standard hashcode implementation! This class is just a wrapper for - * string, so we delegate directly to String.hashCode(). - */ - @Override - public int hashCode() { - return partitionId.hashCode(); - } + /* + * non-standard hashcode implementation! This class is just a wrapper for + * string, so we delegate directly to String.hashCode(). + */ + @Override + public int hashCode() { + return partitionId.hashCode(); + } - @Override - public boolean equals(final Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final ParititionId other = (ParititionId) obj; - if (partitionId == null) { - if (other.partitionId != null) - return false; - } else if (!partitionId.equals(other.partitionId)) - return false; - return true; - } + @Override + public boolean equals(final Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + final ParititionId other = (ParititionId) obj; + if (partitionId == null) { + if (other.partitionId != null) + return false; + } else if (!partitionId.equals(other.partitionId)) + return false; + return true; + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionAwareEncoderDecoder.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionAwareEncoderDecoder.java index 051cbff..5f71a17 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionAwareEncoderDecoder.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionAwareEncoderDecoder.java @@ -4,7 +4,7 @@ import org.lucares.pdb.map.PersistentMap.EncoderDecoder; public interface PartitionAwareEncoderDecoder extends EncoderDecoder

{ - public P encodeValue(V v); + public P encodeValue(V v); - public V decodeValue(ParititionId partitionId, P p); + public V decodeValue(ParititionId partitionId, P p); } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionAwareWrapper.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionAwareWrapper.java index 0ba33fc..a3e0d58 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionAwareWrapper.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionAwareWrapper.java @@ -4,37 +4,37 @@ import org.lucares.pdb.map.PersistentMap.EncoderDecoder; public final class PartitionAwareWrapper implements PartitionAwareEncoderDecoder { - private final EncoderDecoder delegate; + private final EncoderDecoder delegate; - public PartitionAwareWrapper(final EncoderDecoder delegate) { - this.delegate = delegate; - } + public PartitionAwareWrapper(final EncoderDecoder delegate) { + this.delegate = delegate; + } - @Override - public byte[] encode(final O object) { - return delegate.encode(object); - } + @Override + public byte[] encode(final O object) { + return delegate.encode(object); + } - @Override - public O decode(final byte[] bytes) { - return delegate.decode(bytes); - } + @Override + public O decode(final byte[] bytes) { + return delegate.decode(bytes); + } - @Override - public O encodeValue(final O v) { - return v; - } + @Override + public O encodeValue(final O v) { + return v; + } - @Override - public O decodeValue(final ParititionId partitionId, final O p) { - return p; - } + @Override + public O decodeValue(final ParititionId partitionId, final O p) { + return p; + } - public static PartitionAwareEncoderDecoder wrap(final EncoderDecoder encoder) { - return new PartitionAwareWrapper<>(encoder); - } - - public byte[] getEmptyValue() { - return delegate.getEmptyValue(); - } + public static PartitionAwareEncoderDecoder wrap(final EncoderDecoder encoder) { + return new PartitionAwareWrapper<>(encoder); + } + + public byte[] getEmptyValue() { + return delegate.getEmptyValue(); + } } \ No newline at end of file diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionDiskStore.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionDiskStore.java index b2df65e..68af164 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionDiskStore.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionDiskStore.java @@ -14,68 +14,68 @@ import org.lucares.pdb.blockstorage.LongStreamFile; import org.lucares.pdb.diskstorage.DiskStorage; public class PartitionDiskStore { - private final ConcurrentHashMap diskStorages = new ConcurrentHashMap<>(); + private final ConcurrentHashMap diskStorages = new ConcurrentHashMap<>(); - private final Function creator; - private final Function supplier; + private final Function creator; + private final Function supplier; - public PartitionDiskStore(final Path storageBasePath, final String filename) { + public PartitionDiskStore(final Path storageBasePath, final String filename) { - creator = partitionId -> { - final Path file = storageBasePath.resolve(partitionId.getPartitionId()).resolve(filename); - final boolean isNew = !Files.exists(file); - final DiskStorage diskStorage = new DiskStorage(file, storageBasePath); - if (isNew) { - diskStorage.ensureAlignmentForNewBlocks(BSFile.BLOCK_SIZE); - } - return diskStorage; - }; - supplier = partitionId -> { - final Path file = storageBasePath.resolve(partitionId.getPartitionId()).resolve(filename); - if (Files.exists(file)) { - return new DiskStorage(file, storageBasePath); - } - return null; - }; - } + creator = partitionId -> { + final Path file = storageBasePath.resolve(partitionId.getPartitionId()).resolve(filename); + final boolean isNew = !Files.exists(file); + final DiskStorage diskStorage = new DiskStorage(file, storageBasePath); + if (isNew) { + diskStorage.ensureAlignmentForNewBlocks(BSFile.BLOCK_SIZE); + } + return diskStorage; + }; + supplier = partitionId -> { + final Path file = storageBasePath.resolve(partitionId.getPartitionId()).resolve(filename); + if (Files.exists(file)) { + return new DiskStorage(file, storageBasePath); + } + return null; + }; + } - public DiskStorage getExisting(final ParititionId partitionId) { - return diskStorages.computeIfAbsent(partitionId, supplier); - } + public DiskStorage getExisting(final ParititionId partitionId) { + return diskStorages.computeIfAbsent(partitionId, supplier); + } - public DiskStorage getCreateIfNotExists(final ParititionId partitionId) { - return diskStorages.computeIfAbsent(partitionId, creator); - } + public DiskStorage getCreateIfNotExists(final ParititionId partitionId) { + return diskStorages.computeIfAbsent(partitionId, creator); + } - public long allocateBlock(final ParititionId partitionId, final int blockSize) { - final DiskStorage diskStorage = getCreateIfNotExists(partitionId); - return diskStorage.allocateBlock(blockSize); - } + public long allocateBlock(final ParititionId partitionId, final int blockSize) { + final DiskStorage diskStorage = getCreateIfNotExists(partitionId); + return diskStorage.allocateBlock(blockSize); + } - public LongStreamFile streamExistingFile(final Long diskStoreOffsetForDocIdsOfTag, final ParititionId partitionId) { - try { - final DiskStorage diskStorage = getExisting(partitionId); - return LongStreamFile.existingFile(diskStoreOffsetForDocIdsOfTag, diskStorage); - } catch (final IOException e) { - throw new RuntimeIOException(e); - } - } + public LongStreamFile streamExistingFile(final Long diskStoreOffsetForDocIdsOfTag, final ParititionId partitionId) { + try { + final DiskStorage diskStorage = getExisting(partitionId); + return LongStreamFile.existingFile(diskStoreOffsetForDocIdsOfTag, diskStorage); + } catch (final IOException e) { + throw new RuntimeIOException(e); + } + } - public void close() { - final List throwables = new ArrayList<>(); + public void close() { + final List throwables = new ArrayList<>(); - for (final DiskStorage diskStorage : diskStorages.values()) { - try { - diskStorage.close(); - } catch (final RuntimeException e) { - throwables.add(e); - } - } - if (!throwables.isEmpty()) { - final RuntimeException ex = new RuntimeException(); - throwables.forEach(ex::addSuppressed); - throw ex; - } + for (final DiskStorage diskStorage : diskStorages.values()) { + try { + diskStorage.close(); + } catch (final RuntimeException e) { + throwables.add(e); + } + } + if (!throwables.isEmpty()) { + final RuntimeException ex = new RuntimeException(); + throwables.forEach(ex::addSuppressed); + throw ex; + } - } + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionIdSource.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionIdSource.java index ffe36d0..5bcc214 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionIdSource.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionIdSource.java @@ -3,5 +3,5 @@ package org.lucares.pdb.datastore.internal; import java.util.Set; public interface PartitionIdSource { - Set toPartitionIds(Set availablePartitions); + Set toPartitionIds(Set availablePartitions); } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionLongList.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionLongList.java index 32a6f19..e16377c 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionLongList.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionLongList.java @@ -9,87 +9,87 @@ import java.util.Set; import org.lucares.collections.LongList; public class PartitionLongList implements Iterable { - private final Map lists = new HashMap<>(); + private final Map lists = new HashMap<>(); - public LongList put(final ParititionId partitionId, final LongList longList) { - return lists.put(partitionId, longList); - } + public LongList put(final ParititionId partitionId, final LongList longList) { + return lists.put(partitionId, longList); + } - public LongList get(final ParititionId partitionId) { - return lists.get(partitionId); - } + public LongList get(final ParititionId partitionId) { + return lists.get(partitionId); + } - @Override - public Iterator iterator() { - return lists.keySet().iterator(); - } + @Override + public Iterator iterator() { + return lists.keySet().iterator(); + } - public static PartitionLongList intersection(final PartitionLongList a, final PartitionLongList b) { - final PartitionLongList result = new PartitionLongList(); - final Set partitionIds = new HashSet<>(); - partitionIds.addAll(a.lists.keySet()); - partitionIds.addAll(b.lists.keySet()); + public static PartitionLongList intersection(final PartitionLongList a, final PartitionLongList b) { + final PartitionLongList result = new PartitionLongList(); + final Set partitionIds = new HashSet<>(); + partitionIds.addAll(a.lists.keySet()); + partitionIds.addAll(b.lists.keySet()); - for (final ParititionId partitionId : partitionIds) { - final LongList x = a.get(partitionId); - final LongList y = b.get(partitionId); + for (final ParititionId partitionId : partitionIds) { + final LongList x = a.get(partitionId); + final LongList y = b.get(partitionId); - if (x != null && y != null) { - final LongList intersection = LongList.intersection(x, y); - result.put(partitionId, intersection); - } else { - // one list is empty => the intersection is empty - } - } - return result; - } + if (x != null && y != null) { + final LongList intersection = LongList.intersection(x, y); + result.put(partitionId, intersection); + } else { + // one list is empty => the intersection is empty + } + } + return result; + } - public static PartitionLongList union(final PartitionLongList a, final PartitionLongList b) { - final PartitionLongList result = new PartitionLongList(); - final Set partitionIds = new HashSet<>(); - partitionIds.addAll(a.lists.keySet()); - partitionIds.addAll(b.lists.keySet()); - for (final ParititionId partitionId : partitionIds) { - final LongList x = a.get(partitionId); - final LongList y = b.get(partitionId); + public static PartitionLongList union(final PartitionLongList a, final PartitionLongList b) { + final PartitionLongList result = new PartitionLongList(); + final Set partitionIds = new HashSet<>(); + partitionIds.addAll(a.lists.keySet()); + partitionIds.addAll(b.lists.keySet()); + for (final ParititionId partitionId : partitionIds) { + final LongList x = a.get(partitionId); + final LongList y = b.get(partitionId); - if (x != null && y != null) { - final LongList intersection = LongList.union(x, y); - result.put(partitionId, intersection); - } else if (x != null) { - result.put(partitionId, x.clone()); - } else if (y != null) { - result.put(partitionId, y.clone()); - } - } - return result; - } + if (x != null && y != null) { + final LongList intersection = LongList.union(x, y); + result.put(partitionId, intersection); + } else if (x != null) { + result.put(partitionId, x.clone()); + } else if (y != null) { + result.put(partitionId, y.clone()); + } + } + return result; + } - public int size() { - int size = 0; + public int size() { + int size = 0; - for (final LongList longList : lists.values()) { - size += longList.size(); - } + for (final LongList longList : lists.values()) { + size += longList.size(); + } - return size; - } + return size; + } - public boolean isSorted() { - for (final LongList longList : lists.values()) { - if (!longList.isSorted()) { - return false; - } - } - return true; - } + public boolean isSorted() { + for (final LongList longList : lists.values()) { + if (!longList.isSorted()) { + return false; + } + } + return true; + } - public void removeAll(final PartitionLongList remove) { - for (final ParititionId partitionId : lists.keySet()) { - final LongList removeLongList = remove.get(partitionId); - if (removeLongList != null) { - lists.get(partitionId).removeAll(removeLongList); - } - } - } + public void removeAll(final PartitionLongList remove) { + for (final ParititionId partitionId : lists.keySet()) { + final LongList removeLongList = remove.get(partitionId); + if (removeLongList != null) { + lists.get(partitionId).removeAll(removeLongList); + } + } + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionPersistentMap.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionPersistentMap.java index 5e8ba43..899dfc1 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionPersistentMap.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/PartitionPersistentMap.java @@ -25,130 +25,130 @@ import org.lucares.pdb.map.Visitor; */ public class PartitionPersistentMap implements AutoCloseable { - private final ConcurrentHashMap> maps = new ConcurrentHashMap<>(); + private final ConcurrentHashMap> maps = new ConcurrentHashMap<>(); - private final Function> creator; - private final Function> supplier; + private final Function> creator; + private final Function> supplier; - private final PartitionAwareEncoderDecoder valueEncoder; + private final PartitionAwareEncoderDecoder valueEncoder; - public PartitionPersistentMap(final Path storageBasePath, final String filename, final EncoderDecoder keyEncoder, - final PartitionAwareEncoderDecoder valueEncoder) { + public PartitionPersistentMap(final Path storageBasePath, final String filename, final EncoderDecoder keyEncoder, + final PartitionAwareEncoderDecoder valueEncoder) { - this.valueEncoder = valueEncoder; - creator = partitionId -> { - final Path file = storageBasePath.resolve(partitionId.getPartitionId()).resolve(filename); - return new PersistentMap<>(file, storageBasePath, keyEncoder, valueEncoder); - }; - supplier = partitionId -> { - final Path file = storageBasePath.resolve(partitionId.getPartitionId()).resolve(filename); - if (Files.exists(file)) { - return new PersistentMap<>(file, storageBasePath, keyEncoder, valueEncoder); - } - return null; - }; - preload(storageBasePath); - } + this.valueEncoder = valueEncoder; + creator = partitionId -> { + final Path file = storageBasePath.resolve(partitionId.getPartitionId()).resolve(filename); + return new PersistentMap<>(file, storageBasePath, keyEncoder, valueEncoder); + }; + supplier = partitionId -> { + final Path file = storageBasePath.resolve(partitionId.getPartitionId()).resolve(filename); + if (Files.exists(file)) { + return new PersistentMap<>(file, storageBasePath, keyEncoder, valueEncoder); + } + return null; + }; + preload(storageBasePath); + } - private void preload(final Path storageBasePath) { - try { - Files.list(storageBasePath)// - .filter(Files::isDirectory)// - .map(Path::getFileName)// - .map(Path::toString)// - .map(ParititionId::of)// - .forEach(partitionId -> maps.computeIfAbsent(partitionId, supplier)); - } catch (final IOException e) { - throw new RuntimeIOException(e); - } - } + private void preload(final Path storageBasePath) { + try { + Files.list(storageBasePath)// + .filter(Files::isDirectory)// + .map(Path::getFileName)// + .map(Path::toString)// + .map(ParititionId::of)// + .forEach(partitionId -> maps.computeIfAbsent(partitionId, supplier)); + } catch (final IOException e) { + throw new RuntimeIOException(e); + } + } - private Set getAllPartitionIds() { - return maps.keySet(); - } + private Set getAllPartitionIds() { + return maps.keySet(); + } - public Set getAvailablePartitionIds(final PartitionIdSource partitionIdSource) { - return partitionIdSource.toPartitionIds(getAllPartitionIds()); - } + public Set getAvailablePartitionIds(final PartitionIdSource partitionIdSource) { + return partitionIdSource.toPartitionIds(getAllPartitionIds()); + } - private PersistentMap getExistingPersistentMap(final ParititionId partitionId) { - return maps.computeIfAbsent(partitionId, supplier); - } + private PersistentMap getExistingPersistentMap(final ParititionId partitionId) { + return maps.computeIfAbsent(partitionId, supplier); + } - private PersistentMap getPersistentMapCreateIfNotExists(final ParititionId partitionId) { - return maps.computeIfAbsent(partitionId, creator); - } + private PersistentMap getPersistentMapCreateIfNotExists(final ParititionId partitionId) { + return maps.computeIfAbsent(partitionId, creator); + } - public V getValue(final ParititionId partitionId, final K key) { - final PersistentMap map = getExistingPersistentMap(partitionId); - final P persistedValue = map != null ? map.getValue(key) : null; - return valueEncoder.decodeValue(partitionId, persistedValue); - } + public V getValue(final ParititionId partitionId, final K key) { + final PersistentMap map = getExistingPersistentMap(partitionId); + final P persistedValue = map != null ? map.getValue(key) : null; + return valueEncoder.decodeValue(partitionId, persistedValue); + } - public List getValues(final PartitionIdSource partitionIdSource, final K key) { - final List result = new ArrayList<>(); - final Set partitionIds = partitionIdSource.toPartitionIds(getAllPartitionIds()); + public List getValues(final PartitionIdSource partitionIdSource, final K key) { + final List result = new ArrayList<>(); + final Set partitionIds = partitionIdSource.toPartitionIds(getAllPartitionIds()); - for (final ParititionId partitionId : partitionIds) { - final PersistentMap map = getPersistentMapCreateIfNotExists(partitionId); - if (map != null) { - final V value = valueEncoder.decodeValue(partitionId, map.getValue(key)); - if (value != null) { - result.add(value); - } - } - } + for (final ParititionId partitionId : partitionIds) { + final PersistentMap map = getPersistentMapCreateIfNotExists(partitionId); + if (map != null) { + final V value = valueEncoder.decodeValue(partitionId, map.getValue(key)); + if (value != null) { + result.add(value); + } + } + } - return result; - } + return result; + } - public V putValue(final ParititionId partitionId, final K key, final V value) { - final PersistentMap map = getPersistentMapCreateIfNotExists(partitionId); - final P persistedValue = valueEncoder.encodeValue(value); - final P previousPersistedValue = map.putValue(key, persistedValue); - return valueEncoder.decodeValue(partitionId, previousPersistedValue); - } + public V putValue(final ParititionId partitionId, final K key, final V value) { + final PersistentMap map = getPersistentMapCreateIfNotExists(partitionId); + final P persistedValue = valueEncoder.encodeValue(value); + final P previousPersistedValue = map.putValue(key, persistedValue); + return valueEncoder.decodeValue(partitionId, previousPersistedValue); + } - public void visitValues(final ParititionId partitionId, final K keyPrefix, final Visitor visitor) { - final PersistentMap map = getExistingPersistentMap(partitionId); - if (map != null) { - map.visitValues(keyPrefix, (k, p) -> { - final V value = valueEncoder.decodeValue(partitionId, p); - visitor.visit(k, value); - }); - } - } + public void visitValues(final ParititionId partitionId, final K keyPrefix, final Visitor visitor) { + final PersistentMap map = getExistingPersistentMap(partitionId); + if (map != null) { + map.visitValues(keyPrefix, (k, p) -> { + final V value = valueEncoder.decodeValue(partitionId, p); + visitor.visit(k, value); + }); + } + } - public void visitValues(final PartitionIdSource partitionIdSource, final K keyPrefix, final Visitor visitor) { - final Set partitionIds = partitionIdSource.toPartitionIds(getAllPartitionIds()); + public void visitValues(final PartitionIdSource partitionIdSource, final K keyPrefix, final Visitor visitor) { + final Set partitionIds = partitionIdSource.toPartitionIds(getAllPartitionIds()); - for (final ParititionId partitionId : partitionIds) { - final PersistentMap map = getExistingPersistentMap(partitionId); - if (map != null) { - map.visitValues(keyPrefix, (k, p) -> { - final V value = valueEncoder.decodeValue(partitionId, p); - visitor.visit(k, value); - }); - } - } - } + for (final ParititionId partitionId : partitionIds) { + final PersistentMap map = getExistingPersistentMap(partitionId); + if (map != null) { + map.visitValues(keyPrefix, (k, p) -> { + final V value = valueEncoder.decodeValue(partitionId, p); + visitor.visit(k, value); + }); + } + } + } - @Override - public void close() { - final List throwables = new ArrayList<>(); + @Override + public void close() { + final List throwables = new ArrayList<>(); - for (final PersistentMap map : maps.values()) { - try { - map.close(); - } catch (final RuntimeException e) { - throwables.add(e); - } - } - if (!throwables.isEmpty()) { - final RuntimeException ex = new RuntimeException(); - throwables.forEach(ex::addSuppressed); - throw ex; - } - } + for (final PersistentMap map : maps.values()) { + try { + map.close(); + } catch (final RuntimeException e) { + throwables.add(e); + } + } + if (!throwables.isEmpty()) { + final RuntimeException ex = new RuntimeException(); + throwables.forEach(ex::addSuppressed); + throw ex; + } + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/PdbWriter.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/PdbWriter.java index ecb1a59..59263ee 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/PdbWriter.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/PdbWriter.java @@ -17,62 +17,62 @@ import org.slf4j.LoggerFactory; */ class PdbWriter implements AutoCloseable, Flushable { - private static final Logger LOGGER = LoggerFactory.getLogger(PdbWriter.class); + private static final Logger LOGGER = LoggerFactory.getLogger(PdbWriter.class); - private final PdbFile pdbFile; - private long lastEpochMilli; + private final PdbFile pdbFile; + private long lastEpochMilli; - private final TimeSeriesFile timeSeriesFile; + private final TimeSeriesFile timeSeriesFile; - public PdbWriter(final PdbFile pdbFile, final DiskStorage diskStorage) { - this.pdbFile = pdbFile; + public PdbWriter(final PdbFile pdbFile, final DiskStorage diskStorage) { + this.pdbFile = pdbFile; - timeSeriesFile = TimeSeriesFile.existingFile(pdbFile.getRootBlockNumber(), diskStorage); - final Optional optionalLastValue = timeSeriesFile.getLastValue(); // TODO is this last value correct? + timeSeriesFile = TimeSeriesFile.existingFile(pdbFile.getRootBlockNumber(), diskStorage); + final Optional optionalLastValue = timeSeriesFile.getLastValue(); // TODO is this last value correct? - lastEpochMilli = optionalLastValue.orElse(0L); - } + lastEpochMilli = optionalLastValue.orElse(0L); + } - public PdbFile getPdbFile() { - return pdbFile; - } + public PdbFile getPdbFile() { + return pdbFile; + } - public long getDateOffsetAsEpochMilli() { - return lastEpochMilli; - } + public long getDateOffsetAsEpochMilli() { + return lastEpochMilli; + } - public void write(final long epochMilli, final long value) throws WriteException, InvalidValueException { - try { - timeSeriesFile.appendTimeValue(epochMilli, value); + public void write(final long epochMilli, final long value) throws WriteException, InvalidValueException { + try { + timeSeriesFile.appendTimeValue(epochMilli, value); - lastEpochMilli = epochMilli; - } catch (final RuntimeException e) { - throw new WriteException(e); - } - } + lastEpochMilli = epochMilli; + } catch (final RuntimeException e) { + throw new WriteException(e); + } + } - @Override - public void close() { + @Override + public void close() { - LOGGER.debug("close PdbWriter {}", pdbFile); - timeSeriesFile.close(); - } + LOGGER.debug("close PdbWriter {}", pdbFile); + timeSeriesFile.close(); + } - @Override - public void flush() { - timeSeriesFile.flush(); - } + @Override + public void flush() { + timeSeriesFile.flush(); + } - public static void writeEntry(final PdbFile pdbFile, final DiskStorage diskStorage, final Entry... entries) { - try (PdbWriter writer = new PdbWriter(pdbFile, diskStorage)) { - for (final Entry entry : entries) { - writer.write(entry.getEpochMilli(), entry.getValue()); - } - } - } + public static void writeEntry(final PdbFile pdbFile, final DiskStorage diskStorage, final Entry... entries) { + try (PdbWriter writer = new PdbWriter(pdbFile, diskStorage)) { + for (final Entry entry : entries) { + writer.write(entry.getEpochMilli(), entry.getValue()); + } + } + } - @Override - public String toString() { - return "PdbWriter [pdbFile=" + pdbFile + ", lastEpochMilli=" + lastEpochMilli + "]"; - } + @Override + public String toString() { + return "PdbWriter [pdbFile=" + pdbFile + ", lastEpochMilli=" + lastEpochMilli + "]"; + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/QueryCompletionIndex.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/QueryCompletionIndex.java index 22269b9..709f9f0 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/QueryCompletionIndex.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/QueryCompletionIndex.java @@ -143,321 +143,321 @@ import org.lucares.utils.byteencoder.VariableByteEncoder; * */ public class QueryCompletionIndex implements AutoCloseable { - private static final class TwoTags { - private final Tag tagA; - private final Tag tagB; + private static final class TwoTags { + private final Tag tagA; + private final Tag tagB; - public TwoTags(final Tag tagA, final Tag tagB) { - this.tagA = tagA; - this.tagB = tagB; - } + public TwoTags(final Tag tagA, final Tag tagB) { + this.tagA = tagA; + this.tagB = tagB; + } - public TwoTags(final String fieldB, final String fieldA, final String valueA, final String valueB) { + public TwoTags(final String fieldB, final String fieldA, final String valueA, final String valueB) { - tagA = new Tag(fieldA, valueA); - tagB = new Tag(fieldB, valueB); - } + tagA = new Tag(fieldA, valueA); + tagB = new Tag(fieldB, valueB); + } - public Tag getTagA() { - return tagA; - } + public Tag getTagA() { + return tagA; + } - public Tag getTagB() { - return tagB; - } + public Tag getTagB() { + return tagB; + } - @Override - public String toString() { - return tagA + "::" + tagB; - } - } + @Override + public String toString() { + return tagA + "::" + tagB; + } + } - public static final class FieldField { - private final int fieldA; - private final int fieldB; + public static final class FieldField { + private final int fieldA; + private final int fieldB; - public FieldField(final int fieldA, final int fieldB) { - this.fieldA = fieldA; - this.fieldB = fieldB; - } + public FieldField(final int fieldA, final int fieldB) { + this.fieldA = fieldA; + this.fieldB = fieldB; + } - public int getFieldA() { - return fieldA; - } + public int getFieldA() { + return fieldA; + } - public int getFieldB() { - return fieldB; - } + public int getFieldB() { + return fieldB; + } - @Override - public String toString() { - return fieldA + "::" + fieldB; - } - } + @Override + public String toString() { + return fieldA + "::" + fieldB; + } + } - private static final class EncoderTwoTags implements EncoderDecoder { + private static final class EncoderTwoTags implements EncoderDecoder { - @Override - public byte[] encode(final TwoTags tagAndField) { - final LongList tmp = new LongList(4); - final Tag tagA = tagAndField.getTagA(); - final Tag tagB = tagAndField.getTagB(); + @Override + public byte[] encode(final TwoTags tagAndField) { + final LongList tmp = new LongList(4); + final Tag tagA = tagAndField.getTagA(); + final Tag tagB = tagAndField.getTagB(); - tmp.add(tagB.getKey()); - tmp.add(tagA.getKey()); + tmp.add(tagB.getKey()); + tmp.add(tagA.getKey()); - if (tagA.getValue() >= 0) { - tmp.add(tagA.getValue()); + if (tagA.getValue() >= 0) { + tmp.add(tagA.getValue()); - // A query for tagA.key and tagA.value and tagB.key is done by setting - // tagB.value==-1. - // The query is then executed as a prefix search. Thus tagB.value must not be - // part of the byte array that is returned. - if (tagB.getValue() >= 0) { - tmp.add(tagB.getValue()); - } - } else { - Preconditions.checkSmaller(tagB.getValue(), 0, - "if no value for tagA is given, then tagB must also be empty"); - } + // A query for tagA.key and tagA.value and tagB.key is done by setting + // tagB.value==-1. + // The query is then executed as a prefix search. Thus tagB.value must not be + // part of the byte array that is returned. + if (tagB.getValue() >= 0) { + tmp.add(tagB.getValue()); + } + } else { + Preconditions.checkSmaller(tagB.getValue(), 0, + "if no value for tagA is given, then tagB must also be empty"); + } - return VariableByteEncoder.encode(tmp); - } + return VariableByteEncoder.encode(tmp); + } - @Override - public TwoTags decode(final byte[] bytes) { + @Override + public TwoTags decode(final byte[] bytes) { - final LongList tmp = VariableByteEncoder.decode(bytes); - final int tagBKey = (int) tmp.get(0); - final int tagAKey = (int) tmp.get(1); - final int tagAValue = (int) tmp.get(2); - final int tagBValue = (int) tmp.get(3); + final LongList tmp = VariableByteEncoder.decode(bytes); + final int tagBKey = (int) tmp.get(0); + final int tagAKey = (int) tmp.get(1); + final int tagAValue = (int) tmp.get(2); + final int tagBValue = (int) tmp.get(3); - final Tag tagA = new Tag(tagAKey, tagAValue); - final Tag tagB = new Tag(tagBKey, tagBValue); + final Tag tagA = new Tag(tagAKey, tagAValue); + final Tag tagB = new Tag(tagBKey, tagBValue); - return new TwoTags(tagA, tagB); - } + return new TwoTags(tagA, tagB); + } - @Override - public byte[] getEmptyValue() { - return new byte[] { 0, 0, 0, 0 }; - } - } + @Override + public byte[] getEmptyValue() { + return new byte[] { 0, 0, 0, 0 }; + } + } - private static final class EncoderTag implements EncoderDecoder { + private static final class EncoderTag implements EncoderDecoder { - @Override - public byte[] encode(final Tag tag) { + @Override + public byte[] encode(final Tag tag) { - final LongList longList = new LongList(2); - longList.add(tag.getKey()); + final LongList longList = new LongList(2); + longList.add(tag.getKey()); - if (tag.getValue() >= 0) { - longList.add(tag.getValue()); - } - return VariableByteEncoder.encode(longList); - } + if (tag.getValue() >= 0) { + longList.add(tag.getValue()); + } + return VariableByteEncoder.encode(longList); + } - @Override - public Tag decode(final byte[] bytes) { - final LongList tmp = VariableByteEncoder.decode(bytes); - final int key = (int) tmp.get(0); - final int value = (int) tmp.get(1); - return new Tag(key, value); - } + @Override + public Tag decode(final byte[] bytes) { + final LongList tmp = VariableByteEncoder.decode(bytes); + final int key = (int) tmp.get(0); + final int value = (int) tmp.get(1); + return new Tag(key, value); + } - @Override - public byte[] getEmptyValue() { - return new byte[] { 0 }; - } - } + @Override + public byte[] getEmptyValue() { + return new byte[] { 0 }; + } + } - private static final class EncoderField implements EncoderDecoder { + private static final class EncoderField implements EncoderDecoder { - @Override - public byte[] encode(final String field) { + @Override + public byte[] encode(final String field) { - if (field.isEmpty()) { - return new byte[0]; - } + if (field.isEmpty()) { + return new byte[0]; + } - return VariableByteEncoder.encode(Tags.STRING_COMPRESSOR.put(field)); - } + return VariableByteEncoder.encode(Tags.STRING_COMPRESSOR.put(field)); + } - @Override - public String decode(final byte[] bytes) { - final long compressedString = VariableByteEncoder.decodeFirstValue(bytes); - return Tags.STRING_COMPRESSOR.get((int) compressedString); - } + @Override + public String decode(final byte[] bytes) { + final long compressedString = VariableByteEncoder.decodeFirstValue(bytes); + return Tags.STRING_COMPRESSOR.get((int) compressedString); + } - @Override - public byte[] getEmptyValue() { - return new byte[] { 0 }; - } - } + @Override + public byte[] getEmptyValue() { + return new byte[] { 0 }; + } + } - private final PartitionPersistentMap tagToTagIndex; - private final PartitionPersistentMap fieldToValueIndex; - private final PartitionPersistentMap fieldIndex; + private final PartitionPersistentMap tagToTagIndex; + private final PartitionPersistentMap fieldToValueIndex; + private final PartitionPersistentMap fieldIndex; - public QueryCompletionIndex(final Path basePath) throws IOException { - tagToTagIndex = new PartitionPersistentMap<>(basePath, "queryCompletionTagToTagIndex.bs", new EncoderTwoTags(), - PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER)); + public QueryCompletionIndex(final Path basePath) throws IOException { + tagToTagIndex = new PartitionPersistentMap<>(basePath, "queryCompletionTagToTagIndex.bs", new EncoderTwoTags(), + PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER)); - fieldToValueIndex = new PartitionPersistentMap<>(basePath, "queryCompletionFieldToValueIndex.bs", - new EncoderTag(), PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER)); + fieldToValueIndex = new PartitionPersistentMap<>(basePath, "queryCompletionFieldToValueIndex.bs", + new EncoderTag(), PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER)); - fieldIndex = new PartitionPersistentMap<>(basePath, "queryCompletionFieldIndex.bs", new EncoderField(), - PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER)); - } + fieldIndex = new PartitionPersistentMap<>(basePath, "queryCompletionFieldIndex.bs", new EncoderField(), + PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER)); + } - public void addTags(final ParititionId partitionId, final Tags tags) throws IOException { - final List listOfTagsA = tags.toTags(); - final List listOfTagsB = tags.toTags(); + public void addTags(final ParititionId partitionId, final Tags tags) throws IOException { + final List listOfTagsA = tags.toTags(); + final List listOfTagsB = tags.toTags(); - // index all combinations of tagA and tagB and fieldA to fieldB - for (final Tag tagA : listOfTagsA) { - for (final Tag tagB : listOfTagsB) { - final TwoTags key = new TwoTags(tagA, tagB); - tagToTagIndex.putValue(partitionId, key, Empty.INSTANCE); - } - } + // index all combinations of tagA and tagB and fieldA to fieldB + for (final Tag tagA : listOfTagsA) { + for (final Tag tagB : listOfTagsB) { + final TwoTags key = new TwoTags(tagA, tagB); + tagToTagIndex.putValue(partitionId, key, Empty.INSTANCE); + } + } - // create indices of all tags and all fields - for (final Tag tag : listOfTagsA) { - fieldToValueIndex.putValue(partitionId, tag, Empty.INSTANCE); - fieldIndex.putValue(partitionId, tag.getKeyAsString(), Empty.INSTANCE); - } - } + // create indices of all tags and all fields + for (final Tag tag : listOfTagsA) { + fieldToValueIndex.putValue(partitionId, tag, Empty.INSTANCE); + fieldIndex.putValue(partitionId, tag.getKeyAsString(), Empty.INSTANCE); + } + } - @Override - public void close() throws IOException { - tagToTagIndex.close(); - } + @Override + public void close() throws IOException { + tagToTagIndex.close(); + } - /** - * Find values for fieldB that are yield results when executing the query - * "fieldA=valueA and fieldB=???" - * - * @param dateRange the date range - * @param fieldA the other field of the and expression - * @param valueA {@link GlobMatcher} for the value of the other field - * @param fieldB the field we are searching values for - * @return values of fieldB - */ - public SortedSet find(final DateTimeRange dateRange, final String fieldA, final GlobMatcher valueA, - final String fieldB) { + /** + * Find values for fieldB that are yield results when executing the query + * "fieldA=valueA and fieldB=???" + * + * @param dateRange the date range + * @param fieldA the other field of the and expression + * @param valueA {@link GlobMatcher} for the value of the other field + * @param fieldB the field we are searching values for + * @return values of fieldB + */ + public SortedSet find(final DateTimeRange dateRange, final String fieldA, final GlobMatcher valueA, + final String fieldB) { - final SortedSet result = new TreeSet<>(); + final SortedSet result = new TreeSet<>(); - final TwoTags keyPrefix = new TwoTags(fieldB, fieldA, null, null); + final TwoTags keyPrefix = new TwoTags(fieldB, fieldA, null, null); - final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); - tagToTagIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> { + final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); + tagToTagIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> { - final String vA = k.getTagA().getValueAsString(); + final String vA = k.getTagA().getValueAsString(); - if (valueA.matches(vA)) { - result.add(k.getTagB().getValueAsString()); - } - }); + if (valueA.matches(vA)) { + result.add(k.getTagB().getValueAsString()); + } + }); - return result; - } + return result; + } - /** - * Find values for fieldB that are yield results when executing the query - * "tag.field=tag.value and fieldB=???" - * - * @param dateRange the date range - * @param tag the other tag - * @param field the field we are searching values for - * @return values for the field - */ - public SortedSet find(final DateTimeRange dateRange, final Tag tag, final String field) { + /** + * Find values for fieldB that are yield results when executing the query + * "tag.field=tag.value and fieldB=???" + * + * @param dateRange the date range + * @param tag the other tag + * @param field the field we are searching values for + * @return values for the field + */ + public SortedSet find(final DateTimeRange dateRange, final Tag tag, final String field) { - final SortedSet result = new TreeSet<>(); - final int tagBKey = Tags.STRING_COMPRESSOR.put(field); - final Tag tagB = new Tag(tagBKey, -1); // the value must be negative for the prefix search to work. See - // EncoderTwoTags - final TwoTags keyPrefix = new TwoTags(tag, tagB); + final SortedSet result = new TreeSet<>(); + final int tagBKey = Tags.STRING_COMPRESSOR.put(field); + final Tag tagB = new Tag(tagBKey, -1); // the value must be negative for the prefix search to work. See + // EncoderTwoTags + final TwoTags keyPrefix = new TwoTags(tag, tagB); - final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); - tagToTagIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> { - result.add(k.getTagB().getValueAsString()); - }); + final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); + tagToTagIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> { + result.add(k.getTagB().getValueAsString()); + }); - return result; - } + return result; + } - /** - * Find all values for the given field. - * - * @param dateRange the date range - * @param field the field - * @return the values - */ - public SortedSet findAllValuesForField(final DateTimeRange dateRange, final String field) { + /** + * Find all values for the given field. + * + * @param dateRange the date range + * @param field the field + * @return the values + */ + public SortedSet findAllValuesForField(final DateTimeRange dateRange, final String field) { - final SortedSet result = new TreeSet<>(); - final int tagKey = Tags.STRING_COMPRESSOR.put(field); - final Tag keyPrefix = new Tag(tagKey, -1); // the value must be negative for the prefix search to work. See + final SortedSet result = new TreeSet<>(); + final int tagKey = Tags.STRING_COMPRESSOR.put(field); + final Tag keyPrefix = new Tag(tagKey, -1); // the value must be negative for the prefix search to work. See - final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); - fieldToValueIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> { - result.add(k.getValueAsString()); - }); + final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); + fieldToValueIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> { + result.add(k.getValueAsString()); + }); - return result; - } + return result; + } - /** - * Find values for {@code field} that will yield results for the query - * "tag.field=tag.value and not field=???". - *

- * - * @param dateRange the date range - * @param tag the other tag - * @param field the field we are searching values for - * @return the values - */ - public SortedSet findAllValuesNotForField(final DateTimeRange dateRange, final Tag tag, - final String field) { - final SortedSet result = new TreeSet<>(); + /** + * Find values for {@code field} that will yield results for the query + * "tag.field=tag.value and not field=???". + *

+ * + * @param dateRange the date range + * @param tag the other tag + * @param field the field we are searching values for + * @return the values + */ + public SortedSet findAllValuesNotForField(final DateTimeRange dateRange, final Tag tag, + final String field) { + final SortedSet result = new TreeSet<>(); - final TwoTags keyPrefix = new TwoTags(field, tag.getKeyAsString(), null, null); + final TwoTags keyPrefix = new TwoTags(field, tag.getKeyAsString(), null, null); - final int negatedValueA = tag.getValue(); - final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); - tagToTagIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> { + final int negatedValueA = tag.getValue(); + final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); + tagToTagIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> { - final int valueA = k.getTagA().getValue(); - if (valueA != negatedValueA) { - result.add(k.getTagB().getValueAsString()); - } - }); + final int valueA = k.getTagA().getValue(); + if (valueA != negatedValueA) { + result.add(k.getTagB().getValueAsString()); + } + }); - return result; - } + return result; + } - public SortedSet findAllFields(final DateTimeRange dateRange) { - final SortedSet result = new TreeSet<>(); - final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); - fieldIndex.visitValues(partitionIdSource, "", (k, v) -> { - result.add(k); - }); - return result; - } + public SortedSet findAllFields(final DateTimeRange dateRange) { + final SortedSet result = new TreeSet<>(); + final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); + fieldIndex.visitValues(partitionIdSource, "", (k, v) -> { + result.add(k); + }); + return result; + } - public boolean hasField(final DateTimeRange dateRange, final String field) { - final AtomicBoolean found = new AtomicBoolean(false); - final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); - fieldIndex.visitValues(partitionIdSource, "", (k, v) -> { - if (k.equals(field)) { - found.set(true); - } - }); - return found.get(); - } + public boolean hasField(final DateTimeRange dateRange, final String field) { + final AtomicBoolean found = new AtomicBoolean(false); + final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); + fieldIndex.visitValues(partitionIdSource, "", (k, v) -> { + if (k.equals(field)) { + found.set(true); + } + }); + return found.get(); + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/TagEncoderDecoder.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/TagEncoderDecoder.java index d807eb1..c1ba7ec 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/TagEncoderDecoder.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/TagEncoderDecoder.java @@ -7,58 +7,58 @@ import org.lucares.pdb.map.PersistentMap.EncoderDecoder; import org.lucares.utils.byteencoder.VariableByteEncoder; class TagEncoderDecoder implements EncoderDecoder { - @Override - public byte[] encode(final Tag tag) { + @Override + public byte[] encode(final Tag tag) { - final LongList keyAndValueCompressed = new LongList(2); + final LongList keyAndValueCompressed = new LongList(2); - final String key = tag.getKeyAsString(); - final byte[] result; - if (!key.isEmpty()) { - final Integer keyAsLong = Tags.STRING_COMPRESSOR.put(key); - keyAndValueCompressed.add(keyAsLong); + final String key = tag.getKeyAsString(); + final byte[] result; + if (!key.isEmpty()) { + final Integer keyAsLong = Tags.STRING_COMPRESSOR.put(key); + keyAndValueCompressed.add(keyAsLong); - final String value = tag.getValueAsString(); - if (!value.isEmpty()) { - final Integer valueAsLong = Tags.STRING_COMPRESSOR.put(value); - keyAndValueCompressed.add(valueAsLong); - } - result = VariableByteEncoder.encode(keyAndValueCompressed); - } else { - result = new byte[0]; - } + final String value = tag.getValueAsString(); + if (!value.isEmpty()) { + final Integer valueAsLong = Tags.STRING_COMPRESSOR.put(value); + keyAndValueCompressed.add(valueAsLong); + } + result = VariableByteEncoder.encode(keyAndValueCompressed); + } else { + result = new byte[0]; + } - return result; - } + return result; + } - @Override - public Tag decode(final byte[] bytes) { - final LongList compressedStrings = VariableByteEncoder.decode(bytes); - final Tag result; - switch (compressedStrings.size()) { - case 0: + @Override + public Tag decode(final byte[] bytes) { + final LongList compressedStrings = VariableByteEncoder.decode(bytes); + final Tag result; + switch (compressedStrings.size()) { + case 0: - result = new Tag("", ""); - break; - case 1: - final String k = Tags.STRING_COMPRESSOR.get((int) compressedStrings.get(0)); - result = new Tag(k, ""); + result = new Tag("", ""); + break; + case 1: + final String k = Tags.STRING_COMPRESSOR.get((int) compressedStrings.get(0)); + result = new Tag(k, ""); - break; - case 2: - final String key = Tags.STRING_COMPRESSOR.get((int) compressedStrings.get(0)); - final String value = Tags.STRING_COMPRESSOR.get((int) compressedStrings.get(1)); - result = new Tag(key, value); - break; - default: - throw new IllegalStateException("too many values: " + compressedStrings); - } + break; + case 2: + final String key = Tags.STRING_COMPRESSOR.get((int) compressedStrings.get(0)); + final String value = Tags.STRING_COMPRESSOR.get((int) compressedStrings.get(1)); + result = new Tag(key, value); + break; + default: + throw new IllegalStateException("too many values: " + compressedStrings); + } - return result; - } - - @Override - public byte[] getEmptyValue() { - return new byte[] {0}; - } + return result; + } + + @Override + public byte[] getEmptyValue() { + return new byte[] { 0 }; + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/TagsEncoderDecoder.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/TagsEncoderDecoder.java index f2933d5..f25f700 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/TagsEncoderDecoder.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/TagsEncoderDecoder.java @@ -4,18 +4,18 @@ import org.lucares.pdb.api.Tags; import org.lucares.pdb.map.PersistentMap.EncoderDecoder; class TagsEncoderDecoder implements EncoderDecoder { - @Override - public byte[] encode(final Tags tags) { - return tags.toBytes(); - } + @Override + public byte[] encode(final Tags tags) { + return tags.toBytes(); + } - @Override - public Tags decode(final byte[] bytes) { - return Tags.fromBytes(bytes); - } - - @Override - public byte[] getEmptyValue() { - return new byte[] {}; - } + @Override + public Tags decode(final byte[] bytes) { + return Tags.fromBytes(bytes); + } + + @Override + public byte[] getEmptyValue() { + return new byte[] {}; + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/lang/CandidateGrouper.java b/data-store/src/main/java/org/lucares/pdb/datastore/lang/CandidateGrouper.java index 9e876aa..627144e 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/lang/CandidateGrouper.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/lang/CandidateGrouper.java @@ -9,43 +9,43 @@ import java.util.TreeSet; import java.util.regex.Pattern; public class CandidateGrouper { - public SortedSet group(final Collection values, final String queryWithCaretMarker) { + public SortedSet group(final Collection values, final String queryWithCaretMarker) { - final TreeSet result = new TreeSet<>(); - final int numDotsInValue = countDotsInValue(queryWithCaretMarker); + final TreeSet result = new TreeSet<>(); + final int numDotsInValue = countDotsInValue(queryWithCaretMarker); - for (final String value : values) { - // keep everything up to the (numDotsInValue+1)-th - final String[] token = value.split(Pattern.quote(".")); - final List tokenlist = new ArrayList<>(Arrays.asList(token)); - final List prefix = tokenlist.subList(0, numDotsInValue + 1); - String shortenedValue = String.join(".", prefix); - if (tokenlist.size() > numDotsInValue + 1) { - shortenedValue += "."; - } - result.add(shortenedValue); - } + for (final String value : values) { + // keep everything up to the (numDotsInValue+1)-th + final String[] token = value.split(Pattern.quote(".")); + final List tokenlist = new ArrayList<>(Arrays.asList(token)); + final List prefix = tokenlist.subList(0, numDotsInValue + 1); + String shortenedValue = String.join(".", prefix); + if (tokenlist.size() > numDotsInValue + 1) { + shortenedValue += "."; + } + result.add(shortenedValue); + } - return result; - } + return result; + } - private int countDotsInValue(final String queryWithCaretMarker) { + private int countDotsInValue(final String queryWithCaretMarker) { - int count = 0; - int index = queryWithCaretMarker.indexOf(NewProposerParser.CARET_MARKER) - 1; - final String delimiter = " (),=!"; + int count = 0; + int index = queryWithCaretMarker.indexOf(NewProposerParser.CARET_MARKER) - 1; + final String delimiter = " (),=!"; - while (index >= 0) { - final char c = queryWithCaretMarker.charAt(index); - if (delimiter.indexOf(c) >= 0) { - break; - } - if (c == '.') { - count++; - } - index--; - } + while (index >= 0) { + final char c = queryWithCaretMarker.charAt(index); + if (delimiter.indexOf(c) >= 0) { + break; + } + if (c == '.') { + count++; + } + index--; + } - return count; - } + return count; + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/lang/ErrorListener.java b/data-store/src/main/java/org/lucares/pdb/datastore/lang/ErrorListener.java index 7afc64e..571d50e 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/lang/ErrorListener.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/lang/ErrorListener.java @@ -6,14 +6,14 @@ import org.antlr.v4.runtime.Recognizer; public class ErrorListener extends BaseErrorListener { - @Override - public void syntaxError(final Recognizer recognizer, final Object offendingSymbol, final int line, - final int charPositionInLine, final String msg, final RecognitionException e) { + @Override + public void syntaxError(final Recognizer recognizer, final Object offendingSymbol, final int line, + final int charPositionInLine, final String msg, final RecognitionException e) { - final int lineStart = line; - final int startIndex = charPositionInLine; - final int lineStop = line; - final int stopIndex = charPositionInLine; - throw new SyntaxException(msg, lineStart, startIndex, lineStop, stopIndex); - } + final int lineStart = line; + final int startIndex = charPositionInLine; + final int lineStop = line; + final int stopIndex = charPositionInLine; + throw new SyntaxException(msg, lineStart, startIndex, lineStop, stopIndex); + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/lang/Expression.java b/data-store/src/main/java/org/lucares/pdb/datastore/lang/Expression.java index aa320f4..5ffe8fa 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/lang/Expression.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/lang/Expression.java @@ -9,717 +9,717 @@ import org.lucares.utils.Preconditions; abstract public class Expression { - public T visit(final ExpressionVisitor visitor) { - throw new UnsupportedOperationException(); - } - - boolean containsCaret() { - throw new UnsupportedOperationException(); - } - - abstract static class TemporaryExpression extends Expression { - - abstract Expression toExpression(Expression left, Expression right); - } - - public static MatchAll matchAll() { - return MatchAll.INSTANCE; - } - - static class OrTemporary extends TemporaryExpression { - - @Override - Expression toExpression(final Expression left, final Expression right) { - return new Or(left, right); - } - - @Override - public String toString() { - return "OrTemporary"; - } - } - - static class AndTemporary extends TemporaryExpression { - @Override - Expression toExpression(final Expression left, final Expression right) { - return new And(left, right); - } - - @Override - public String toString() { - return "AndTemporary"; - } - } - - static class Not extends Expression { - private final Expression expression; - - Not(final Expression expression) { - this.expression = expression; - } - - @Override - public T visit(final ExpressionVisitor visitor) { - return visitor.visit(this); - } - - Expression getExpression() { - return expression; - } - - @Override - public String toString() { - return "!" + expression; - } - - @Override - boolean containsCaret() { - return expression.containsCaret(); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((expression == null) ? 0 : expression.hashCode()); - return result; - } - - @Override - public boolean equals(final Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final Not other = (Not) obj; - if (expression == null) { - if (other.expression != null) { - return false; - } - } else if (!expression.equals(other.expression)) { - return false; - } - return true; - } - - } - - static class Or extends Expression { - private final Expression left; - private final Expression right; - - Or(final Expression left, final Expression right) { - this.left = left; - this.right = right; - } - - @Override - public T visit(final ExpressionVisitor visitor) { - return visitor.visit(this); - } - - Expression getLeft() { - return left; - } - - Expression getRight() { - return right; - } - - @Override - public String toString() { - - return "(" + left + " or " + right + ")"; - } - - @Override - boolean containsCaret() { - return left.containsCaret() || right.containsCaret(); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((left == null) ? 0 : left.hashCode()); - result = prime * result + ((right == null) ? 0 : right.hashCode()); - return result; - } - - @Override - public boolean equals(final Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final Or other = (Or) obj; - if (left == null) { - if (other.left != null) { - return false; - } - } else if (!left.equals(other.left)) { - return false; - } - if (right == null) { - if (other.right != null) { - return false; - } - } else if (!right.equals(other.right)) { - return false; - } - return true; - } - - public static Expression create(final List or) { - - if (or.size() == 1) { - return or.get(0); - } else { - Or result = new Or(or.get(0), or.get(1)); - for (int i = 2; i < or.size(); i++) { - result = new Or(result, or.get(i)); - } - return result; - } - } - - } - - static class And extends Expression { - private final Expression left; - private final Expression right; - - And(final Expression left, final Expression right) { - this.left = left; - this.right = right; - } - - @Override - public T visit(final ExpressionVisitor visitor) { - return visitor.visit(this); - } - - Expression getLeft() { - return left; - } - - Expression getRight() { - return right; - } - - @Override - public String toString() { - - return "(" + left + " and " + right + ")"; - } - - @Override - boolean containsCaret() { - return left.containsCaret() || right.containsCaret(); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((left == null) ? 0 : left.hashCode()); - result = prime * result + ((right == null) ? 0 : right.hashCode()); - return result; - } - - @Override - public boolean equals(final Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final And other = (And) obj; - if (left == null) { - if (other.left != null) { - return false; - } - } else if (!left.equals(other.left)) { - return false; - } - if (right == null) { - if (other.right != null) { - return false; - } - } else if (!right.equals(other.right)) { - return false; - } - return true; - } - - } - - static class MatchAll extends Expression { - - public static final MatchAll INSTANCE = new MatchAll(); - - private MatchAll() { - // - } - - @Override - public T visit(final ExpressionVisitor visitor) { - return visitor.visit(this); - } - - @Override - public String toString() { - - return "true"; - } - } - - static class Terminal extends Expression { - private final String value; - - Terminal(final String value) { - - this.value = value; - } - - @Override - public T visit(final ExpressionVisitor visitor) { - return visitor.visit(this); - } - - @Override - public String toString() { - - return value; - } - - @Override - boolean containsCaret() { - return value.contains(NewProposerParser.CARET_MARKER); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((value == null) ? 0 : value.hashCode()); - return result; - } - - @Override - public boolean equals(final Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final Terminal other = (Terminal) obj; - if (value == null) { - if (other.value != null) { - return false; - } - } else if (!value.equals(other.value)) { - return false; - } - return true; - } - - public String getValue() { - return value; - } - } - - static class Property extends Expression { - final String field; - final Terminal value; - - public Property(final String field, final Terminal value) { - this.field = field; - this.value = value; - } - - @Override - public T visit(final ExpressionVisitor visitor) { - return visitor.visit(this); - } - - @Override - public String toString() { - - return field + " = " + value.getValue(); - } - - @Override - boolean containsCaret() { - return value.containsCaret(); - } - - public String getField() { - return field; - } - - public Terminal getValue() { - return value; - } - - public String getValueAsString() { - return value.getValue(); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((field == null) ? 0 : field.hashCode()); - result = prime * result + ((value == null) ? 0 : value.hashCode()); - return result; - } - - @Override - public boolean equals(final Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final Property other = (Property) obj; - if (field == null) { - if (other.field != null) - return false; - } else if (!field.equals(other.field)) - return false; - if (value == null) { - if (other.value != null) - return false; - } else if (!value.equals(other.value)) - return false; - return true; - } - - } - - static class Parentheses extends Expression { - private final Expression expression; - - Parentheses(final Expression expression) { - this.expression = expression; - } - - @Override - public T visit(final ExpressionVisitor visitor) { - return visitor.visit(this); - } - - public Expression getExpression() { - return expression; - } - - @Override - public String toString() { - - return "[ " + expression + " ]"; - } - - @Override - boolean containsCaret() { - return expression.containsCaret(); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((expression == null) ? 0 : expression.hashCode()); - return result; - } - - @Override - public boolean equals(final Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final Parentheses other = (Parentheses) obj; - if (expression == null) { - if (other.expression != null) { - return false; - } - } else if (!expression.equals(other.expression)) { - return false; - } - return true; - } - } - - static class ListOfPropertyValues extends Expression { - private final List propertyValues = new ArrayList<>(); - - public ListOfPropertyValues(final Terminal propertyValue) { - propertyValues.add(propertyValue); - } - - public ListOfPropertyValues(final Terminal propertyValue, final ListOfPropertyValues listOfPropertyValues) { - propertyValues.addAll(listOfPropertyValues.propertyValues); - propertyValues.add(propertyValue); - } - - public List getValues() { - return CollectionUtils.map(propertyValues, Terminal::getValue); - } - - @Override - public String toString() { - return "(" + String.join(", ", getValues()) + ")"; - } - - @Override - boolean containsCaret() { - for (final Terminal terminal : propertyValues) { - if (terminal.containsCaret()) { - return true; - } - } - return false; - } - } - - static class InExpression extends Expression { - private final String field; - private final List values; - - public InExpression(final String field, final String value) { - this(field, Arrays.asList(value)); - } - - public InExpression(final String field, final List values) { - this.field = field; - this.values = values; - } - - @Override - public String toString() { - return field + " in (" + String.join(", ", values) + ")"; - } - - @Override - public T visit(final ExpressionVisitor visitor) { - return visitor.visit(this); - } - - public String getProperty() { - return field; - } - - public List getValues() { - return values; - } - - @Override - boolean containsCaret() { - for (final String value : values) { - if (value.contains(NewProposerParser.CARET_MARKER)) { - return true; - } - } - return false; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((field == null) ? 0 : field.hashCode()); - result = prime * result + ((values == null) ? 0 : values.hashCode()); - return result; - } - - @Override - public boolean equals(final Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final InExpression other = (InExpression) obj; - if (field == null) { - if (other.field != null) - return false; - } else if (!field.equals(other.field)) - return false; - if (values == null) { - if (other.values != null) - return false; - } else if (!values.equals(other.values)) - return false; - return true; - } - } - - public static final class AndCaretExpression extends Expression { - Property caretExpression; - Expression expression; - - public AndCaretExpression(final Property caretExpression, final Expression expression) { - Preconditions.checkTrue(caretExpression.containsCaret(), "the expression '{0}' must contain the caret", - caretExpression); - Preconditions.checkFalse(expression.containsCaret(), "the expression '{0}' must not contain the caret", - caretExpression); - this.caretExpression = caretExpression; - this.expression = expression; - } - - @Override - public T visit(final ExpressionVisitor visitor) { - return visitor.visit(this); - } - - @Override - boolean containsCaret() { - - return caretExpression.containsCaret(); - } - - public Property getCaretExpression() { - return caretExpression; - } - - public Expression getExpression() { - return expression; - } - - @Override - public String toString() { - return "(" + caretExpression + " and " + expression + ")"; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((caretExpression == null) ? 0 : caretExpression.hashCode()); - result = prime * result + ((expression == null) ? 0 : expression.hashCode()); - return result; - } - - @Override - public boolean equals(final Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final AndCaretExpression other = (AndCaretExpression) obj; - if (caretExpression == null) { - if (other.caretExpression != null) - return false; - } else if (!caretExpression.equals(other.caretExpression)) - return false; - if (expression == null) { - if (other.expression != null) - return false; - } else if (!expression.equals(other.expression)) - return false; - return true; - } - } - - public static final class AndNotCaretExpression extends Expression { - Property negatedCaretExpression; - Expression expression; - - public AndNotCaretExpression(final Property negatedCaretExpression, final Expression expression) { - Preconditions.checkTrue(negatedCaretExpression.containsCaret(), - "the expression '{0}' must contain the caret", negatedCaretExpression); - Preconditions.checkFalse(expression.containsCaret(), "the expression '{0}' must not contain the caret", - expression); - this.negatedCaretExpression = negatedCaretExpression; - this.expression = expression; - } - - @Override - public T visit(final ExpressionVisitor visitor) { - return visitor.visit(this); - } - - @Override - boolean containsCaret() { - - return negatedCaretExpression.containsCaret(); - } - - public Property getCaretExpression() { - return negatedCaretExpression; - } - - public Expression getExpression() { - return expression; - } - - @Override - public String toString() { - return "(!" + negatedCaretExpression + " and " + expression + ")"; - } - } - - public static final class CaretAndExpression extends Expression { - - private final Property caretExpression; - private final Property otherExpression; - - public CaretAndExpression(final Property caretExpression, final Property otherExpression) { - this.caretExpression = caretExpression; - this.otherExpression = otherExpression; - } - - @Override - public T visit(final ExpressionVisitor visitor) { - return super.visit(visitor); - } - - @Override - boolean containsCaret() { - Preconditions.checkTrue(caretExpression.containsCaret(), - "CaretAndExpression must contain the caret, but was: {0}", this); - return caretExpression.containsCaret(); - } - - @Override - public String toString() { - return "(caretAnd: " + caretExpression + " and " + otherExpression + ")"; - } - - public Property getCaretExpression() { - return caretExpression; - } - - public Property getOtherExpression() { - return otherExpression; - } - } + public T visit(final ExpressionVisitor visitor) { + throw new UnsupportedOperationException(); + } + + boolean containsCaret() { + throw new UnsupportedOperationException(); + } + + abstract static class TemporaryExpression extends Expression { + + abstract Expression toExpression(Expression left, Expression right); + } + + public static MatchAll matchAll() { + return MatchAll.INSTANCE; + } + + static class OrTemporary extends TemporaryExpression { + + @Override + Expression toExpression(final Expression left, final Expression right) { + return new Or(left, right); + } + + @Override + public String toString() { + return "OrTemporary"; + } + } + + static class AndTemporary extends TemporaryExpression { + @Override + Expression toExpression(final Expression left, final Expression right) { + return new And(left, right); + } + + @Override + public String toString() { + return "AndTemporary"; + } + } + + static class Not extends Expression { + private final Expression expression; + + Not(final Expression expression) { + this.expression = expression; + } + + @Override + public T visit(final ExpressionVisitor visitor) { + return visitor.visit(this); + } + + Expression getExpression() { + return expression; + } + + @Override + public String toString() { + return "!" + expression; + } + + @Override + boolean containsCaret() { + return expression.containsCaret(); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((expression == null) ? 0 : expression.hashCode()); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final Not other = (Not) obj; + if (expression == null) { + if (other.expression != null) { + return false; + } + } else if (!expression.equals(other.expression)) { + return false; + } + return true; + } + + } + + static class Or extends Expression { + private final Expression left; + private final Expression right; + + Or(final Expression left, final Expression right) { + this.left = left; + this.right = right; + } + + @Override + public T visit(final ExpressionVisitor visitor) { + return visitor.visit(this); + } + + Expression getLeft() { + return left; + } + + Expression getRight() { + return right; + } + + @Override + public String toString() { + + return "(" + left + " or " + right + ")"; + } + + @Override + boolean containsCaret() { + return left.containsCaret() || right.containsCaret(); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((left == null) ? 0 : left.hashCode()); + result = prime * result + ((right == null) ? 0 : right.hashCode()); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final Or other = (Or) obj; + if (left == null) { + if (other.left != null) { + return false; + } + } else if (!left.equals(other.left)) { + return false; + } + if (right == null) { + if (other.right != null) { + return false; + } + } else if (!right.equals(other.right)) { + return false; + } + return true; + } + + public static Expression create(final List or) { + + if (or.size() == 1) { + return or.get(0); + } else { + Or result = new Or(or.get(0), or.get(1)); + for (int i = 2; i < or.size(); i++) { + result = new Or(result, or.get(i)); + } + return result; + } + } + + } + + static class And extends Expression { + private final Expression left; + private final Expression right; + + And(final Expression left, final Expression right) { + this.left = left; + this.right = right; + } + + @Override + public T visit(final ExpressionVisitor visitor) { + return visitor.visit(this); + } + + Expression getLeft() { + return left; + } + + Expression getRight() { + return right; + } + + @Override + public String toString() { + + return "(" + left + " and " + right + ")"; + } + + @Override + boolean containsCaret() { + return left.containsCaret() || right.containsCaret(); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((left == null) ? 0 : left.hashCode()); + result = prime * result + ((right == null) ? 0 : right.hashCode()); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final And other = (And) obj; + if (left == null) { + if (other.left != null) { + return false; + } + } else if (!left.equals(other.left)) { + return false; + } + if (right == null) { + if (other.right != null) { + return false; + } + } else if (!right.equals(other.right)) { + return false; + } + return true; + } + + } + + static class MatchAll extends Expression { + + public static final MatchAll INSTANCE = new MatchAll(); + + private MatchAll() { + // + } + + @Override + public T visit(final ExpressionVisitor visitor) { + return visitor.visit(this); + } + + @Override + public String toString() { + + return "true"; + } + } + + static class Terminal extends Expression { + private final String value; + + Terminal(final String value) { + + this.value = value; + } + + @Override + public T visit(final ExpressionVisitor visitor) { + return visitor.visit(this); + } + + @Override + public String toString() { + + return value; + } + + @Override + boolean containsCaret() { + return value.contains(NewProposerParser.CARET_MARKER); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((value == null) ? 0 : value.hashCode()); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final Terminal other = (Terminal) obj; + if (value == null) { + if (other.value != null) { + return false; + } + } else if (!value.equals(other.value)) { + return false; + } + return true; + } + + public String getValue() { + return value; + } + } + + static class Property extends Expression { + final String field; + final Terminal value; + + public Property(final String field, final Terminal value) { + this.field = field; + this.value = value; + } + + @Override + public T visit(final ExpressionVisitor visitor) { + return visitor.visit(this); + } + + @Override + public String toString() { + + return field + " = " + value.getValue(); + } + + @Override + boolean containsCaret() { + return value.containsCaret(); + } + + public String getField() { + return field; + } + + public Terminal getValue() { + return value; + } + + public String getValueAsString() { + return value.getValue(); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((field == null) ? 0 : field.hashCode()); + result = prime * result + ((value == null) ? 0 : value.hashCode()); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + final Property other = (Property) obj; + if (field == null) { + if (other.field != null) + return false; + } else if (!field.equals(other.field)) + return false; + if (value == null) { + if (other.value != null) + return false; + } else if (!value.equals(other.value)) + return false; + return true; + } + + } + + static class Parentheses extends Expression { + private final Expression expression; + + Parentheses(final Expression expression) { + this.expression = expression; + } + + @Override + public T visit(final ExpressionVisitor visitor) { + return visitor.visit(this); + } + + public Expression getExpression() { + return expression; + } + + @Override + public String toString() { + + return "[ " + expression + " ]"; + } + + @Override + boolean containsCaret() { + return expression.containsCaret(); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((expression == null) ? 0 : expression.hashCode()); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final Parentheses other = (Parentheses) obj; + if (expression == null) { + if (other.expression != null) { + return false; + } + } else if (!expression.equals(other.expression)) { + return false; + } + return true; + } + } + + static class ListOfPropertyValues extends Expression { + private final List propertyValues = new ArrayList<>(); + + public ListOfPropertyValues(final Terminal propertyValue) { + propertyValues.add(propertyValue); + } + + public ListOfPropertyValues(final Terminal propertyValue, final ListOfPropertyValues listOfPropertyValues) { + propertyValues.addAll(listOfPropertyValues.propertyValues); + propertyValues.add(propertyValue); + } + + public List getValues() { + return CollectionUtils.map(propertyValues, Terminal::getValue); + } + + @Override + public String toString() { + return "(" + String.join(", ", getValues()) + ")"; + } + + @Override + boolean containsCaret() { + for (final Terminal terminal : propertyValues) { + if (terminal.containsCaret()) { + return true; + } + } + return false; + } + } + + static class InExpression extends Expression { + private final String field; + private final List values; + + public InExpression(final String field, final String value) { + this(field, Arrays.asList(value)); + } + + public InExpression(final String field, final List values) { + this.field = field; + this.values = values; + } + + @Override + public String toString() { + return field + " in (" + String.join(", ", values) + ")"; + } + + @Override + public T visit(final ExpressionVisitor visitor) { + return visitor.visit(this); + } + + public String getProperty() { + return field; + } + + public List getValues() { + return values; + } + + @Override + boolean containsCaret() { + for (final String value : values) { + if (value.contains(NewProposerParser.CARET_MARKER)) { + return true; + } + } + return false; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((field == null) ? 0 : field.hashCode()); + result = prime * result + ((values == null) ? 0 : values.hashCode()); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + final InExpression other = (InExpression) obj; + if (field == null) { + if (other.field != null) + return false; + } else if (!field.equals(other.field)) + return false; + if (values == null) { + if (other.values != null) + return false; + } else if (!values.equals(other.values)) + return false; + return true; + } + } + + public static final class AndCaretExpression extends Expression { + Property caretExpression; + Expression expression; + + public AndCaretExpression(final Property caretExpression, final Expression expression) { + Preconditions.checkTrue(caretExpression.containsCaret(), "the expression '{0}' must contain the caret", + caretExpression); + Preconditions.checkFalse(expression.containsCaret(), "the expression '{0}' must not contain the caret", + caretExpression); + this.caretExpression = caretExpression; + this.expression = expression; + } + + @Override + public T visit(final ExpressionVisitor visitor) { + return visitor.visit(this); + } + + @Override + boolean containsCaret() { + + return caretExpression.containsCaret(); + } + + public Property getCaretExpression() { + return caretExpression; + } + + public Expression getExpression() { + return expression; + } + + @Override + public String toString() { + return "(" + caretExpression + " and " + expression + ")"; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((caretExpression == null) ? 0 : caretExpression.hashCode()); + result = prime * result + ((expression == null) ? 0 : expression.hashCode()); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + final AndCaretExpression other = (AndCaretExpression) obj; + if (caretExpression == null) { + if (other.caretExpression != null) + return false; + } else if (!caretExpression.equals(other.caretExpression)) + return false; + if (expression == null) { + if (other.expression != null) + return false; + } else if (!expression.equals(other.expression)) + return false; + return true; + } + } + + public static final class AndNotCaretExpression extends Expression { + Property negatedCaretExpression; + Expression expression; + + public AndNotCaretExpression(final Property negatedCaretExpression, final Expression expression) { + Preconditions.checkTrue(negatedCaretExpression.containsCaret(), + "the expression '{0}' must contain the caret", negatedCaretExpression); + Preconditions.checkFalse(expression.containsCaret(), "the expression '{0}' must not contain the caret", + expression); + this.negatedCaretExpression = negatedCaretExpression; + this.expression = expression; + } + + @Override + public T visit(final ExpressionVisitor visitor) { + return visitor.visit(this); + } + + @Override + boolean containsCaret() { + + return negatedCaretExpression.containsCaret(); + } + + public Property getCaretExpression() { + return negatedCaretExpression; + } + + public Expression getExpression() { + return expression; + } + + @Override + public String toString() { + return "(!" + negatedCaretExpression + " and " + expression + ")"; + } + } + + public static final class CaretAndExpression extends Expression { + + private final Property caretExpression; + private final Property otherExpression; + + public CaretAndExpression(final Property caretExpression, final Property otherExpression) { + this.caretExpression = caretExpression; + this.otherExpression = otherExpression; + } + + @Override + public T visit(final ExpressionVisitor visitor) { + return super.visit(visitor); + } + + @Override + boolean containsCaret() { + Preconditions.checkTrue(caretExpression.containsCaret(), + "CaretAndExpression must contain the caret, but was: {0}", this); + return caretExpression.containsCaret(); + } + + @Override + public String toString() { + return "(caretAnd: " + caretExpression + " and " + otherExpression + ")"; + } + + public Property getCaretExpression() { + return caretExpression; + } + + public Property getOtherExpression() { + return otherExpression; + } + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/lang/ExpressionToDocIdVisitor.java b/data-store/src/main/java/org/lucares/pdb/datastore/lang/ExpressionToDocIdVisitor.java index f2c82c2..4c4187d 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/lang/ExpressionToDocIdVisitor.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/lang/ExpressionToDocIdVisitor.java @@ -26,179 +26,180 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ExpressionToDocIdVisitor extends ExpressionVisitor { - private static final Logger LOGGER = LoggerFactory.getLogger(ExpressionToDocIdVisitor.class); + private static final Logger LOGGER = LoggerFactory.getLogger(ExpressionToDocIdVisitor.class); - private final PartitionPersistentMap keyToValueToDocId; - private final PartitionDiskStore diskStorage; + private final PartitionPersistentMap keyToValueToDocId; + private final PartitionDiskStore diskStorage; - private final DatePartitioner datePartitioner; + private final DatePartitioner datePartitioner; - public ExpressionToDocIdVisitor(final DateTimeRange dateRange, - final PartitionPersistentMap keyToValueToDocsId, final PartitionDiskStore diskStorage) { - this.datePartitioner = new DatePartitioner(dateRange); - this.keyToValueToDocId = keyToValueToDocsId; - this.diskStorage = diskStorage; - } + public ExpressionToDocIdVisitor(final DateTimeRange dateRange, + final PartitionPersistentMap keyToValueToDocsId, final PartitionDiskStore diskStorage) { + this.datePartitioner = new DatePartitioner(dateRange); + this.keyToValueToDocId = keyToValueToDocsId; + this.diskStorage = diskStorage; + } - @Override - public PartitionLongList visit(final And expression) { - final Expression left = expression.getLeft(); - final Expression right = expression.getRight(); + @Override + public PartitionLongList visit(final And expression) { + final Expression left = expression.getLeft(); + final Expression right = expression.getRight(); - final PartitionLongList leftFiles = left.visit(this); - final PartitionLongList rightFiles = right.visit(this); + final PartitionLongList leftFiles = left.visit(this); + final PartitionLongList rightFiles = right.visit(this); - final long start = System.nanoTime(); - final PartitionLongList result = PartitionLongList.intersection(leftFiles, rightFiles); - LOGGER.trace("and: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, - result.size()); - assert result.isSorted(); + final long start = System.nanoTime(); + final PartitionLongList result = PartitionLongList.intersection(leftFiles, rightFiles); + LOGGER.trace("and: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, + result.size()); + assert result.isSorted(); - return result; - } + return result; + } - @Override - public PartitionLongList visit(final Or expression) { - final Expression left = expression.getLeft(); - final Expression right = expression.getRight(); + @Override + public PartitionLongList visit(final Or expression) { + final Expression left = expression.getLeft(); + final Expression right = expression.getRight(); - final PartitionLongList leftFiles = left.visit(this); - final PartitionLongList rightFiles = right.visit(this); - final long start = System.nanoTime(); - final PartitionLongList result = PartitionLongList.union(leftFiles, rightFiles); - LOGGER.trace("or: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, - result.size()); - assert result.isSorted(); + final PartitionLongList leftFiles = left.visit(this); + final PartitionLongList rightFiles = right.visit(this); + final long start = System.nanoTime(); + final PartitionLongList result = PartitionLongList.union(leftFiles, rightFiles); + LOGGER.trace("or: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, + result.size()); + assert result.isSorted(); - return result; - } + return result; + } - @Override - public PartitionLongList visit(final Not expression) { + @Override + public PartitionLongList visit(final Not expression) { - final Expression negatedExpression = expression.getExpression(); - final PartitionLongList docIdsToBeNegated = negatedExpression.visit(this); - final long start = System.nanoTime(); + final Expression negatedExpression = expression.getExpression(); + final PartitionLongList docIdsToBeNegated = negatedExpression.visit(this); + final long start = System.nanoTime(); - final PartitionLongList result = getAllDocIds(); - result.removeAll(docIdsToBeNegated); + final PartitionLongList result = getAllDocIds(); + result.removeAll(docIdsToBeNegated); - LOGGER.trace("not: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, - result.size()); + LOGGER.trace("not: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, + result.size()); - return result; - } + return result; + } - @Override - public PartitionLongList visit(final Parentheses parentheses) { + @Override + public PartitionLongList visit(final Parentheses parentheses) { - throw new UnsupportedOperationException( - "Parenthesis not supported. The correct order should come from the parser."); - } + throw new UnsupportedOperationException( + "Parenthesis not supported. The correct order should come from the parser."); + } - @Override - public PartitionLongList visit(final Expression.MatchAll expression) { - final long start = System.nanoTime(); - final PartitionLongList result = getAllDocIds(); - LOGGER.trace("matchAll: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, - result.size()); - return result; - } + @Override + public PartitionLongList visit(final Expression.MatchAll expression) { + final long start = System.nanoTime(); + final PartitionLongList result = getAllDocIds(); + LOGGER.trace("matchAll: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, + result.size()); + return result; + } - @Override - public PartitionLongList visit(final Expression.InExpression expression) { - final long start = System.nanoTime(); + @Override + public PartitionLongList visit(final Expression.InExpression expression) { + final long start = System.nanoTime(); - final String propertyName = expression.getProperty(); - final List values = expression.getValues(); + final String propertyName = expression.getProperty(); + final List values = expression.getValues(); - PartitionLongList result = new PartitionLongList(); + PartitionLongList result = new PartitionLongList(); - for (final String value : values) { + for (final String value : values) { - final PartitionLongList docIds = filterByWildcard(propertyName, GloblikePattern.globlikeToRegex(value)); - result = PartitionLongList.union(result, docIds); - } + final PartitionLongList docIds = filterByWildcard(propertyName, GloblikePattern.globlikeToRegex(value)); + result = PartitionLongList.union(result, docIds); + } - LOGGER.trace("in: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, - result.size()); - return result; - } + LOGGER.trace("in: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, + result.size()); + return result; + } - private PartitionLongList getAllDocIds() { - final PartitionLongList result = new PartitionLongList(); - final Set availablePartitionIds = keyToValueToDocId.getAvailablePartitionIds(datePartitioner); - for (final ParititionId partitionId : availablePartitionIds) { + private PartitionLongList getAllDocIds() { + final PartitionLongList result = new PartitionLongList(); + final Set availablePartitionIds = keyToValueToDocId.getAvailablePartitionIds(datePartitioner); + for (final ParititionId partitionId : availablePartitionIds) { - final Long blockOffset = keyToValueToDocId.getValue(partitionId, DataStore.TAG_ALL_DOCS); + final Long blockOffset = keyToValueToDocId.getValue(partitionId, DataStore.TAG_ALL_DOCS); - if (blockOffset != null) { - final LongStreamFile bsFile = diskStorage.streamExistingFile(blockOffset, partitionId); - final LongList tmp = bsFile.asLongList(); - result.put(partitionId, tmp); - } - } - return result; - } + if (blockOffset != null) { + final LongStreamFile bsFile = diskStorage.streamExistingFile(blockOffset, partitionId); + final LongList tmp = bsFile.asLongList(); + result.put(partitionId, tmp); + } + } + return result; + } - private PartitionLongList filterByWildcard(final String propertyName, final Pattern valuePattern) { - final PartitionLongList result = new PartitionLongList(); + private PartitionLongList filterByWildcard(final String propertyName, final Pattern valuePattern) { + final PartitionLongList result = new PartitionLongList(); - final long start = System.nanoTime(); - final Set availablePartitionIds = keyToValueToDocId.getAvailablePartitionIds(datePartitioner); - for (final ParititionId partitionId : availablePartitionIds) { - final List docIdsForPartition = new ArrayList<>(); - keyToValueToDocId.visitValues(partitionId, new Tag(propertyName, ""), (tags, blockOffsetToDocIds) -> { - if (valuePattern.matcher(tags.getValueAsString()).matches()) { - try (final LongStreamFile bsFile = diskStorage.streamExistingFile(blockOffsetToDocIds, partitionId)) { + final long start = System.nanoTime(); + final Set availablePartitionIds = keyToValueToDocId.getAvailablePartitionIds(datePartitioner); + for (final ParititionId partitionId : availablePartitionIds) { + final List docIdsForPartition = new ArrayList<>(); + keyToValueToDocId.visitValues(partitionId, new Tag(propertyName, ""), (tags, blockOffsetToDocIds) -> { + if (valuePattern.matcher(tags.getValueAsString()).matches()) { + try (final LongStreamFile bsFile = diskStorage.streamExistingFile(blockOffsetToDocIds, + partitionId)) { - // We know that all LongLists coming from a BSFile are sorted, non-overlapping - // and increasing, that means we can just concatenate them and get a sorted - // list. - final List longLists = bsFile.streamOfLongLists().collect(Collectors.toList()); - final LongList concatenatedLists = concatenateLists(longLists); + // We know that all LongLists coming from a BSFile are sorted, non-overlapping + // and increasing, that means we can just concatenate them and get a sorted + // list. + final List longLists = bsFile.streamOfLongLists().collect(Collectors.toList()); + final LongList concatenatedLists = concatenateLists(longLists); - Preconditions.checkTrue(concatenatedLists.isSorted(), - "The LongLists containing document ids must be sorted, " - + "non-overlapping and increasing, so that the concatenation " - + "is sorted. This is guaranteed by the fact that document ids " - + "are generated in monotonically increasing order."); + Preconditions.checkTrue(concatenatedLists.isSorted(), + "The LongLists containing document ids must be sorted, " + + "non-overlapping and increasing, so that the concatenation " + + "is sorted. This is guaranteed by the fact that document ids " + + "are generated in monotonically increasing order."); - docIdsForPartition.add(concatenatedLists); - } - } - }); + docIdsForPartition.add(concatenatedLists); + } + } + }); - final LongList mergedDocsIdsForPartition = merge(docIdsForPartition); - result.put(partitionId, mergedDocsIdsForPartition); - } + final LongList mergedDocsIdsForPartition = merge(docIdsForPartition); + result.put(partitionId, mergedDocsIdsForPartition); + } - LOGGER.trace("filterByWildcard: for key {} took {}ms", propertyName, (System.nanoTime() - start) / 1_000_000.0); + LOGGER.trace("filterByWildcard: for key {} took {}ms", propertyName, (System.nanoTime() - start) / 1_000_000.0); - return result; - } + return result; + } - private LongList merge(final Collection lists) { + private LongList merge(final Collection lists) { - LongList result = new LongList(); + LongList result = new LongList(); - for (final LongList list : lists) { - result = LongList.union(result, list); - } + for (final LongList list : lists) { + result = LongList.union(result, list); + } - return result; - } + return result; + } - private static LongList concatenateLists(final Collection lists) { + private static LongList concatenateLists(final Collection lists) { - final int totalSize = lists.stream().mapToInt(LongList::size).sum(); - final LongList result = new LongList(totalSize); + final int totalSize = lists.stream().mapToInt(LongList::size).sum(); + final LongList result = new LongList(totalSize); - for (final LongList list : lists) { - result.addAll(list); - } + for (final LongList list : lists) { + result.addAll(list); + } - return result; + return result; - } + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/lang/ExpressionVisitor.java b/data-store/src/main/java/org/lucares/pdb/datastore/lang/ExpressionVisitor.java index 7447609..828318c 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/lang/ExpressionVisitor.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/lang/ExpressionVisitor.java @@ -1,47 +1,47 @@ package org.lucares.pdb.datastore.lang; public abstract class ExpressionVisitor { - public T visit(final Expression.And expression) { - throw new UnsupportedOperationException(); - } + public T visit(final Expression.And expression) { + throw new UnsupportedOperationException(); + } - public T visit(final Expression.Or expression) { - throw new UnsupportedOperationException(); - } + public T visit(final Expression.Or expression) { + throw new UnsupportedOperationException(); + } - public T visit(final Expression.Not expression) { - throw new UnsupportedOperationException(); - } + public T visit(final Expression.Not expression) { + throw new UnsupportedOperationException(); + } - public T visit(final Expression.Property expression) { - throw new UnsupportedOperationException(); - } + public T visit(final Expression.Property expression) { + throw new UnsupportedOperationException(); + } - public T visit(final Expression.Terminal expression) { - throw new UnsupportedOperationException(); - } + public T visit(final Expression.Terminal expression) { + throw new UnsupportedOperationException(); + } - public T visit(final Expression.MatchAll expression) { - throw new UnsupportedOperationException(); - } + public T visit(final Expression.MatchAll expression) { + throw new UnsupportedOperationException(); + } - public T visit(final Expression.InExpression expression) { - throw new UnsupportedOperationException(); - } + public T visit(final Expression.InExpression expression) { + throw new UnsupportedOperationException(); + } - public T visit(final Expression.Parentheses parentheses) { - throw new UnsupportedOperationException(); - } + public T visit(final Expression.Parentheses parentheses) { + throw new UnsupportedOperationException(); + } - public T visit(final Expression.AndCaretExpression expression) { - throw new UnsupportedOperationException(); - } + public T visit(final Expression.AndCaretExpression expression) { + throw new UnsupportedOperationException(); + } - public T visit(final Expression.AndNotCaretExpression expression) { - throw new UnsupportedOperationException(); - } + public T visit(final Expression.AndNotCaretExpression expression) { + throw new UnsupportedOperationException(); + } - public T visit(final Expression.CaretAndExpression expression) { - throw new UnsupportedOperationException(); - } + public T visit(final Expression.CaretAndExpression expression) { + throw new UnsupportedOperationException(); + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/lang/FindValuesForQueryCompletion.java b/data-store/src/main/java/org/lucares/pdb/datastore/lang/FindValuesForQueryCompletion.java index a5cc922..91d24d4 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/lang/FindValuesForQueryCompletion.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/lang/FindValuesForQueryCompletion.java @@ -22,278 +22,278 @@ import org.slf4j.LoggerFactory; public class FindValuesForQueryCompletion extends ExpressionVisitor> { - private static final Logger METRIC_AND_CARET_LOGGER = LoggerFactory - .getLogger("org.lucares.metrics.queryCompletion.expressionEvaluation.andCaret"); + private static final Logger METRIC_AND_CARET_LOGGER = LoggerFactory + .getLogger("org.lucares.metrics.queryCompletion.expressionEvaluation.andCaret"); - private static final Logger METRIC_LOGGER = LoggerFactory - .getLogger("org.lucares.metrics.queryCompletion.expressionEvaluation"); + private static final Logger METRIC_LOGGER = LoggerFactory + .getLogger("org.lucares.metrics.queryCompletion.expressionEvaluation"); - private static final class AndCaretExpressionVisitor extends ExpressionVisitor> { - private final QueryCompletionIndex index; - private final String field; - private final DateTimeRange dateTimeRange; + private static final class AndCaretExpressionVisitor extends ExpressionVisitor> { + private final QueryCompletionIndex index; + private final String field; + private final DateTimeRange dateTimeRange; - public AndCaretExpressionVisitor(final DateTimeRange dateTimeRange, - final QueryCompletionIndex queryCompletionIndex, final String field) { - this.dateTimeRange = dateTimeRange; - index = queryCompletionIndex; - this.field = field; - } + public AndCaretExpressionVisitor(final DateTimeRange dateTimeRange, + final QueryCompletionIndex queryCompletionIndex, final String field) { + this.dateTimeRange = dateTimeRange; + index = queryCompletionIndex; + this.field = field; + } - @Override - public SortedSet visit(final Property property) { - final long start = System.nanoTime(); - final SortedSet result; + @Override + public SortedSet visit(final Property property) { + final long start = System.nanoTime(); + final SortedSet result; - final String fieldA = property.getField(); - final String valueA = property.getValue().getValue(); + final String fieldA = property.getField(); + final String valueA = property.getValue().getValue(); - final boolean hasField = index.hasField(dateTimeRange, fieldA); - if (hasField) { + final boolean hasField = index.hasField(dateTimeRange, fieldA); + if (hasField) { - final SortedSet allValuesForField = index.findAllValuesForField(dateTimeRange, fieldA); - final SortedSet valuesA = GloblikePattern.filterValues(allValuesForField, valueA, TreeSet::new); + final SortedSet allValuesForField = index.findAllValuesForField(dateTimeRange, fieldA); + final SortedSet valuesA = GloblikePattern.filterValues(allValuesForField, valueA, TreeSet::new); - final double valueInFieldAMatchPercentage = valuesA.size() / (double) allValuesForField.size(); - final boolean useMultiFetch = valuesA.size() <= 1 || valueInFieldAMatchPercentage < 0.5; // 50% was - // chosen - // arbitrarily - if (useMultiFetch) { - result = new TreeSet<>(); + final double valueInFieldAMatchPercentage = valuesA.size() / (double) allValuesForField.size(); + final boolean useMultiFetch = valuesA.size() <= 1 || valueInFieldAMatchPercentage < 0.5; // 50% was + // chosen + // arbitrarily + if (useMultiFetch) { + result = new TreeSet<>(); - for (final String v : valuesA) { - final Tag tagA = new Tag(fieldA, v); - final SortedSet tmp = index.find(dateTimeRange, tagA, field); - result.addAll(tmp); - } - } else { - result = index.find(dateTimeRange, fieldA, new GlobMatcher(valueA), field); - } + for (final String v : valuesA) { + final Tag tagA = new Tag(fieldA, v); + final SortedSet tmp = index.find(dateTimeRange, tagA, field); + result.addAll(tmp); + } + } else { + result = index.find(dateTimeRange, fieldA, new GlobMatcher(valueA), field); + } - METRIC_AND_CARET_LOGGER.debug("{}: {} and {}=???: {}ms matches in fieldA {} ({}%)", - useMultiFetch ? "multi-fetch" : "single-fetch", property, field, - (System.nanoTime() - start) / 1_000_000.0, valuesA.size(), valueInFieldAMatchPercentage * 100); + METRIC_AND_CARET_LOGGER.debug("{}: {} and {}=???: {}ms matches in fieldA {} ({}%)", + useMultiFetch ? "multi-fetch" : "single-fetch", property, field, + (System.nanoTime() - start) / 1_000_000.0, valuesA.size(), valueInFieldAMatchPercentage * 100); - } else { - result = new TreeSet<>(); - } - return result; - } + } else { + result = new TreeSet<>(); + } + return result; + } - @Override - public SortedSet visit(final InExpression expression) { - final long start = System.nanoTime(); - final SortedSet result; - final String fieldA = expression.getProperty(); - final List values = expression.getValues(); + @Override + public SortedSet visit(final InExpression expression) { + final long start = System.nanoTime(); + final SortedSet result; + final String fieldA = expression.getProperty(); + final List values = expression.getValues(); - result = index.find(dateTimeRange, fieldA, new GlobMatcher(values), field); + result = index.find(dateTimeRange, fieldA, new GlobMatcher(values), field); - METRIC_AND_CARET_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); - return result; - } + METRIC_AND_CARET_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); + return result; + } - @Override - public SortedSet visit(final And expression) { - final long start = System.nanoTime(); - try { - final Expression left = expression.getLeft(); - final Expression right = expression.getRight(); + @Override + public SortedSet visit(final And expression) { + final long start = System.nanoTime(); + try { + final Expression left = expression.getLeft(); + final Expression right = expression.getRight(); - if (left instanceof Property && right instanceof Not) { - final Property leftProperty = (Property) left; + if (left instanceof Property && right instanceof Not) { + final Property leftProperty = (Property) left; - final SortedSet allValuesForField = leftProperty.visit(this); + final SortedSet allValuesForField = leftProperty.visit(this); - final Expression rightInnerExpression = ((Not) right).getExpression(); - final SortedSet rightResult = rightInnerExpression.visit(this); + final Expression rightInnerExpression = ((Not) right).getExpression(); + final SortedSet rightResult = rightInnerExpression.visit(this); - return CollectionUtils.removeAll(allValuesForField, rightResult, TreeSet::new); + return CollectionUtils.removeAll(allValuesForField, rightResult, TreeSet::new); - } else { + } else { - final SortedSet result = left.visit(this); - final SortedSet rightResult = right.visit(this); + final SortedSet result = left.visit(this); + final SortedSet rightResult = right.visit(this); - result.retainAll(rightResult); + result.retainAll(rightResult); - return result; - } - } finally { - METRIC_AND_CARET_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); - } - } + return result; + } + } finally { + METRIC_AND_CARET_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); + } + } - @Override - public SortedSet visit(final Or expression) { + @Override + public SortedSet visit(final Or expression) { - final long start = System.nanoTime(); - final Expression left = expression.getLeft(); - final Expression right = expression.getRight(); + final long start = System.nanoTime(); + final Expression left = expression.getLeft(); + final Expression right = expression.getRight(); - final SortedSet result = left.visit(this); - final SortedSet rightResult = right.visit(this); + final SortedSet result = left.visit(this); + final SortedSet rightResult = right.visit(this); - result.addAll(rightResult); + result.addAll(rightResult); - METRIC_AND_CARET_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); - return result; - } + METRIC_AND_CARET_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); + return result; + } - @Override - public SortedSet visit(final Not expression) { + @Override + public SortedSet visit(final Not expression) { - final long start = System.nanoTime(); - if (!(expression.getExpression() instanceof Property)) { - throw new UnsupportedOperationException("NOT expressions like '" + expression - + "' are not supported. Only 'NOT property=value' expressions are supported."); - } + final long start = System.nanoTime(); + if (!(expression.getExpression() instanceof Property)) { + throw new UnsupportedOperationException("NOT expressions like '" + expression + + "' are not supported. Only 'NOT property=value' expressions are supported."); + } - final Property property = (Property) expression.getExpression(); - final Tag tag = new Tag(property.getField(), property.getValueAsString()); + final Property property = (Property) expression.getExpression(); + final Tag tag = new Tag(property.getField(), property.getValueAsString()); - final SortedSet valuesNotForField = index.findAllValuesNotForField(dateTimeRange, tag, field); - final SortedSet valuesForField = index.find(dateTimeRange, tag, field); - final SortedSet valuesOnlyAvailableInField = CollectionUtils.removeAll(valuesForField, - valuesNotForField, TreeSet::new); + final SortedSet valuesNotForField = index.findAllValuesNotForField(dateTimeRange, tag, field); + final SortedSet valuesForField = index.find(dateTimeRange, tag, field); + final SortedSet valuesOnlyAvailableInField = CollectionUtils.removeAll(valuesForField, + valuesNotForField, TreeSet::new); - final SortedSet result = CollectionUtils.removeAll(valuesNotForField, valuesOnlyAvailableInField, - TreeSet::new); + final SortedSet result = CollectionUtils.removeAll(valuesNotForField, valuesOnlyAvailableInField, + TreeSet::new); - METRIC_AND_CARET_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); - return result; - } - } - - private final QueryCompletionIndex queryCompletionIndex; - - private final DateTimeRange dateRange; - - public FindValuesForQueryCompletion(final DateTimeRange dateRange, - final QueryCompletionIndex queryCompletionIndex) { - this.dateRange = dateRange; - this.queryCompletionIndex = queryCompletionIndex; - } - - @Override - public SortedSet visit(final Property property) { - - final long start = System.nanoTime(); - final String field = property.getField(); - final String value = property.getValue().getValue(); - - final SortedSet allValuesForField = queryCompletionIndex.findAllValuesForField(dateRange, field); - - final String valuePrefix; - - if (value.indexOf(NewProposerParser.CARET_MARKER) >= 0) { - valuePrefix = value.substring(0, value.indexOf(NewProposerParser.CARET_MARKER)); - } else { - valuePrefix = value; + METRIC_AND_CARET_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); + return result; + } } - final TreeSet result = GloblikePattern.filterValues(allValuesForField, valuePrefix, TreeSet::new); - METRIC_LOGGER.debug("{}: {}ms", property, (System.nanoTime() - start) / 1_000_000.0); - return result; - } + private final QueryCompletionIndex queryCompletionIndex; - @Override - public SortedSet visit(final AndCaretExpression expression) { + private final DateTimeRange dateRange; - final long start = System.nanoTime(); - final Property caretExpression = expression.getCaretExpression(); - final String field = caretExpression.getField(); - final String valueWithCaretMarker = caretExpression.getValue().getValue(); - final String valuePrefix = valueWithCaretMarker.substring(0, - valueWithCaretMarker.indexOf(NewProposerParser.CARET_MARKER)); + public FindValuesForQueryCompletion(final DateTimeRange dateRange, + final QueryCompletionIndex queryCompletionIndex) { + this.dateRange = dateRange; + this.queryCompletionIndex = queryCompletionIndex; + } - final Expression rightHandExpression = expression.getExpression(); + @Override + public SortedSet visit(final Property property) { - final SortedSet candidateValues = rightHandExpression - .visit(new AndCaretExpressionVisitor(dateRange, queryCompletionIndex, field)); + final long start = System.nanoTime(); + final String field = property.getField(); + final String value = property.getValue().getValue(); - final TreeSet result = GloblikePattern.filterValues(candidateValues, valuePrefix, TreeSet::new); + final SortedSet allValuesForField = queryCompletionIndex.findAllValuesForField(dateRange, field); - METRIC_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); - return result; - } + final String valuePrefix; - @Override - public SortedSet visit(final AndNotCaretExpression expression) { + if (value.indexOf(NewProposerParser.CARET_MARKER) >= 0) { + valuePrefix = value.substring(0, value.indexOf(NewProposerParser.CARET_MARKER)); + } else { + valuePrefix = value; + } - final long start = System.nanoTime(); - final Property caretExpression = expression.getCaretExpression(); - final String field = caretExpression.getField(); - final String valueWithCaretMarker = caretExpression.getValue().getValue(); - final String valuePattern = valueWithCaretMarker.substring(0, - valueWithCaretMarker.indexOf(NewProposerParser.CARET_MARKER)); + final TreeSet result = GloblikePattern.filterValues(allValuesForField, valuePrefix, TreeSet::new); + METRIC_LOGGER.debug("{}: {}ms", property, (System.nanoTime() - start) / 1_000_000.0); + return result; + } - final SortedSet allValuesForField = queryCompletionIndex.findAllValuesForField(dateRange, - caretExpression.getField()); - final SortedSet valuesForFieldMatchingCaretExpression = GloblikePattern.filterValues(allValuesForField, - valuePattern, TreeSet::new); + @Override + public SortedSet visit(final AndCaretExpression expression) { - final Expression rightHandExpression = expression.getExpression(); + final long start = System.nanoTime(); + final Property caretExpression = expression.getCaretExpression(); + final String field = caretExpression.getField(); + final String valueWithCaretMarker = caretExpression.getValue().getValue(); + final String valuePrefix = valueWithCaretMarker.substring(0, + valueWithCaretMarker.indexOf(NewProposerParser.CARET_MARKER)); - final SortedSet rightHandValues = rightHandExpression - .visit(new AndCaretExpressionVisitor(dateRange, queryCompletionIndex, field)); + final Expression rightHandExpression = expression.getExpression(); - if (rightHandValues.size() == 1) { - // there is only one alternative and that one must not be chosen - return Collections.emptySortedSet(); - } - final SortedSet result = CollectionUtils.retainAll(rightHandValues, - valuesForFieldMatchingCaretExpression, TreeSet::new); - METRIC_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); - return result; - } + final SortedSet candidateValues = rightHandExpression + .visit(new AndCaretExpressionVisitor(dateRange, queryCompletionIndex, field)); - @Override - public SortedSet visit(final Not expression) { + final TreeSet result = GloblikePattern.filterValues(candidateValues, valuePrefix, TreeSet::new); - final String field; - final Expression innerExpression = expression.getExpression(); - if (innerExpression instanceof Property) { - final long start = System.nanoTime(); - field = ((Property) innerExpression).getField(); - final SortedSet allValuesForField = queryCompletionIndex.findAllValuesForField(dateRange, field); - final String valueWithCaretMarker = ((Property) innerExpression).getValue().getValue(); - final String valuePrefix = valueWithCaretMarker.substring(0, - valueWithCaretMarker.indexOf(NewProposerParser.CARET_MARKER)); - final TreeSet result = GloblikePattern.filterValues(allValuesForField, valuePrefix + "*", - TreeSet::new); - METRIC_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); - return result; - } else { - throw new UnsupportedOperationException(); - } - } + METRIC_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); + return result; + } - @Override - public SortedSet visit(final Or expression) { - final long start = System.nanoTime(); - final Expression left = expression.getLeft(); - final Expression right = expression.getRight(); + @Override + public SortedSet visit(final AndNotCaretExpression expression) { - final SortedSet result = left.visit(this); - final SortedSet rightResult = right.visit(this); + final long start = System.nanoTime(); + final Property caretExpression = expression.getCaretExpression(); + final String field = caretExpression.getField(); + final String valueWithCaretMarker = caretExpression.getValue().getValue(); + final String valuePattern = valueWithCaretMarker.substring(0, + valueWithCaretMarker.indexOf(NewProposerParser.CARET_MARKER)); - result.addAll(rightResult); - METRIC_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); - return result; - } + final SortedSet allValuesForField = queryCompletionIndex.findAllValuesForField(dateRange, + caretExpression.getField()); + final SortedSet valuesForFieldMatchingCaretExpression = GloblikePattern.filterValues(allValuesForField, + valuePattern, TreeSet::new); - @Override - public SortedSet visit(final And expression) { - final long start = System.nanoTime(); - final Expression left = expression.getLeft(); - final Expression right = expression.getRight(); + final Expression rightHandExpression = expression.getExpression(); - final SortedSet result = left.visit(this); - final SortedSet rightResult = right.visit(this); + final SortedSet rightHandValues = rightHandExpression + .visit(new AndCaretExpressionVisitor(dateRange, queryCompletionIndex, field)); - result.retainAll(rightResult); - METRIC_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); - return result; - } + if (rightHandValues.size() == 1) { + // there is only one alternative and that one must not be chosen + return Collections.emptySortedSet(); + } + final SortedSet result = CollectionUtils.retainAll(rightHandValues, + valuesForFieldMatchingCaretExpression, TreeSet::new); + METRIC_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); + return result; + } + + @Override + public SortedSet visit(final Not expression) { + + final String field; + final Expression innerExpression = expression.getExpression(); + if (innerExpression instanceof Property) { + final long start = System.nanoTime(); + field = ((Property) innerExpression).getField(); + final SortedSet allValuesForField = queryCompletionIndex.findAllValuesForField(dateRange, field); + final String valueWithCaretMarker = ((Property) innerExpression).getValue().getValue(); + final String valuePrefix = valueWithCaretMarker.substring(0, + valueWithCaretMarker.indexOf(NewProposerParser.CARET_MARKER)); + final TreeSet result = GloblikePattern.filterValues(allValuesForField, valuePrefix + "*", + TreeSet::new); + METRIC_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); + return result; + } else { + throw new UnsupportedOperationException(); + } + } + + @Override + public SortedSet visit(final Or expression) { + final long start = System.nanoTime(); + final Expression left = expression.getLeft(); + final Expression right = expression.getRight(); + + final SortedSet result = left.visit(this); + final SortedSet rightResult = right.visit(this); + + result.addAll(rightResult); + METRIC_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); + return result; + } + + @Override + public SortedSet visit(final And expression) { + final long start = System.nanoTime(); + final Expression left = expression.getLeft(); + final Expression right = expression.getRight(); + + final SortedSet result = left.visit(this); + final SortedSet rightResult = right.visit(this); + + result.retainAll(rightResult); + METRIC_LOGGER.debug("{}: {}ms", expression, (System.nanoTime() - start) / 1_000_000.0); + return result; + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/lang/GloblikePattern.java b/data-store/src/main/java/org/lucares/pdb/datastore/lang/GloblikePattern.java index 1632b09..971b6cd 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/lang/GloblikePattern.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/lang/GloblikePattern.java @@ -12,70 +12,70 @@ import org.slf4j.LoggerFactory; public class GloblikePattern { - private static final Logger LOGGER = LoggerFactory.getLogger(GloblikePattern.class); + private static final Logger LOGGER = LoggerFactory.getLogger(GloblikePattern.class); - enum FilterMode { - KEEP_EQUAL - } + enum FilterMode { + KEEP_EQUAL + } - public static Pattern globlikeToRegex(final String globlike) { + public static Pattern globlikeToRegex(final String globlike) { - final String valueRegex = "^" + globlikeToPattern(globlike); + final String valueRegex = "^" + globlikeToPattern(globlike); - LOGGER.trace(">{}< -> >{}<", globlike, valueRegex); + LOGGER.trace(">{}< -> >{}<", globlike, valueRegex); - return Pattern.compile(valueRegex); - } + return Pattern.compile(valueRegex); + } - public static Pattern globlikeToRegex(final Iterable globlikes) { + public static Pattern globlikeToRegex(final Iterable globlikes) { - final List regex = new ArrayList<>(); + final List regex = new ArrayList<>(); - for (final String globlike : globlikes) { - regex.add(globlikeToPattern(globlike)); - } - final StringBuilder fullRegex = new StringBuilder("^("); - fullRegex.append(String.join("|", regex)); - fullRegex.append(")"); + for (final String globlike : globlikes) { + regex.add(globlikeToPattern(globlike)); + } + final StringBuilder fullRegex = new StringBuilder("^("); + fullRegex.append(String.join("|", regex)); + fullRegex.append(")"); - LOGGER.trace(">{}< -> >{}<", globlikes, fullRegex); + LOGGER.trace(">{}< -> >{}<", globlikes, fullRegex); - return Pattern.compile(fullRegex.toString()); - } + return Pattern.compile(fullRegex.toString()); + } - private static String globlikeToPattern(final String globlike) { - // a character that cannot be in the globPattern - final String dotPlaceholder = "\ue003"; // fourth character in the private use area + private static String globlikeToPattern(final String globlike) { + // a character that cannot be in the globPattern + final String dotPlaceholder = "\ue003"; // fourth character in the private use area - final String valueRegex = globlike// - .replace("-", Pattern.quote("-"))// - .replace(".", dotPlaceholder)// - .replace("*", ".*")// - .replace(dotPlaceholder, ".*\\.")// - .replaceAll("([A-Z])", "[a-z]*$1"); - return valueRegex; - } + final String valueRegex = globlike// + .replace("-", Pattern.quote("-"))// + .replace(".", dotPlaceholder)// + .replace("*", ".*")// + .replace(dotPlaceholder, ".*\\.")// + .replaceAll("([A-Z])", "[a-z]*$1"); + return valueRegex; + } - public static > T filterValues(final Collection availableValues, - final String valuePattern, final Supplier generator) { - final T result = generator.get(); + public static > T filterValues(final Collection availableValues, + final String valuePattern, final Supplier generator) { + final T result = generator.get(); - return filterValues(result, availableValues, valuePattern); - } + return filterValues(result, availableValues, valuePattern); + } - public static > T filterValues(final T result, - final Collection availableValues, final String valuePattern) { + public static > T filterValues(final T result, + final Collection availableValues, final String valuePattern) { - final Pattern pattern = GloblikePattern.globlikeToRegex(valuePattern); + final Pattern pattern = GloblikePattern.globlikeToRegex(valuePattern); - for (final String value : availableValues) { - final Matcher matcher = pattern.matcher(value); - if (matcher.find()) { - result.add(value); - } - } + for (final String value : availableValues) { + final Matcher matcher = pattern.matcher(value); + if (matcher.find()) { + result.add(value); + } + } - return result; - } + return result; + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/lang/IdentityExpressionVisitor.java b/data-store/src/main/java/org/lucares/pdb/datastore/lang/IdentityExpressionVisitor.java index da7a647..2fa38d2 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/lang/IdentityExpressionVisitor.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/lang/IdentityExpressionVisitor.java @@ -14,66 +14,66 @@ import org.lucares.pdb.datastore.lang.Expression.Property; * as base class for visitors that modify expressions. */ public abstract class IdentityExpressionVisitor extends ExpressionVisitor { - @Override - public Expression visit(final And expression) { + @Override + public Expression visit(final And expression) { - final Expression left = expression.getLeft().visit(this); - final Expression right = expression.getRight().visit(this); + final Expression left = expression.getLeft().visit(this); + final Expression right = expression.getRight().visit(this); - return new And(left, right); - } + return new And(left, right); + } - @Override - public Expression visit(final Or expression) { - final Expression left = expression.getLeft().visit(this); - final Expression right = expression.getRight().visit(this); + @Override + public Expression visit(final Or expression) { + final Expression left = expression.getLeft().visit(this); + final Expression right = expression.getRight().visit(this); - return new Or(left, right); - } + return new Or(left, right); + } - @Override - public Expression visit(final Not expression) { - return new Not(expression.getExpression().visit(this)); - } + @Override + public Expression visit(final Not expression) { + return new Not(expression.getExpression().visit(this)); + } - @Override - public Expression visit(final Property expression) { - return expression; - } + @Override + public Expression visit(final Property expression) { + return expression; + } - @Override - public Expression visit(final Expression.Terminal expression) { - return expression; - } + @Override + public Expression visit(final Expression.Terminal expression) { + return expression; + } - @Override - public Expression visit(final Expression.MatchAll expression) { - return expression; - } + @Override + public Expression visit(final Expression.MatchAll expression) { + return expression; + } - @Override - public Expression visit(final Expression.InExpression expression) { - return expression; - } + @Override + public Expression visit(final Expression.InExpression expression) { + return expression; + } - @Override - public Expression visit(final Parentheses parentheses) { - return new Parentheses(parentheses.getExpression().visit(this)); - } + @Override + public Expression visit(final Parentheses parentheses) { + return new Parentheses(parentheses.getExpression().visit(this)); + } - @Override - public Expression visit(final AndCaretExpression expression) { - return expression; - } + @Override + public Expression visit(final AndCaretExpression expression) { + return expression; + } - @Override - public Expression visit(final AndNotCaretExpression expression) { - return expression; - } + @Override + public Expression visit(final AndNotCaretExpression expression) { + return expression; + } - @Override - public Expression visit(final CaretAndExpression expression) { - return expression; - } + @Override + public Expression visit(final CaretAndExpression expression) { + return expression; + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/lang/NewProposerParser.java b/data-store/src/main/java/org/lucares/pdb/datastore/lang/NewProposerParser.java index 6b105a6..3604558 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/lang/NewProposerParser.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/lang/NewProposerParser.java @@ -21,203 +21,203 @@ import org.slf4j.LoggerFactory; public class NewProposerParser implements QueryConstants { - private static final Logger LOGGER = LoggerFactory.getLogger(NewProposerParser.class); + private static final Logger LOGGER = LoggerFactory.getLogger(NewProposerParser.class); - private final static Logger METRICS_LOGGER_PROPOSE = LoggerFactory.getLogger("org.lucares.metrics.propose"); + private final static Logger METRICS_LOGGER_PROPOSE = LoggerFactory.getLogger("org.lucares.metrics.propose"); - /* - * Regex matching a java identifier without a caret marker. We define it as a - * blacklist, because this is easer. The regex is only used after the - * query has already been validated with the proper grammar. - */ - private static final String REGEX_IDENTIFIER = "[^\\s,!\\(\\)=" + CARET_MARKER + "]*"; + /* + * Regex matching a java identifier without a caret marker. We define it as a + * blacklist, because this is easer. The regex is only used after the + * query has already been validated with the proper grammar. + */ + private static final String REGEX_IDENTIFIER = "[^\\s,!\\(\\)=" + CARET_MARKER + "]*"; - private final QueryCompletionIndex queryCompletionIndex; + private final QueryCompletionIndex queryCompletionIndex; - public NewProposerParser(final QueryCompletionIndex queryCompletionIndex) { - this.queryCompletionIndex = queryCompletionIndex; - } + public NewProposerParser(final QueryCompletionIndex queryCompletionIndex) { + this.queryCompletionIndex = queryCompletionIndex; + } - public List propose(final QueryWithCaretMarker query) { - final long start = System.nanoTime(); - List proposals; - if (StringUtils.isBlank(query.getQuery())) { - proposals = proposeForAllKeys(query.getDateRange()); - } else { + public List propose(final QueryWithCaretMarker query) { + final long start = System.nanoTime(); + List proposals; + if (StringUtils.isBlank(query.getQuery())) { + proposals = proposeForAllKeys(query.getDateRange()); + } else { - final List foundProposals = proposalsForValues(query); - if (foundProposals.isEmpty()) { - proposals = proposalsForNonValues(query); - } else { - proposals = foundProposals; - } - } - final List nonEmptyProposals = CollectionUtils.filter(proposals, p -> p.hasResults()); + final List foundProposals = proposalsForValues(query); + if (foundProposals.isEmpty()) { + proposals = proposalsForNonValues(query); + } else { + proposals = foundProposals; + } + } + final List nonEmptyProposals = CollectionUtils.filter(proposals, p -> p.hasResults()); - METRICS_LOGGER_PROPOSE.debug("compute proposals took {}ms for query '{}' ", - (System.nanoTime() - start) / 1_000_000.0, query); + METRICS_LOGGER_PROPOSE.debug("compute proposals took {}ms for query '{}' ", + (System.nanoTime() - start) / 1_000_000.0, query); - return nonEmptyProposals; - } + return nonEmptyProposals; + } - private List proposalsForNonValues(final QueryWithCaretMarker query) { - final List proposals = new ArrayList<>(); + private List proposalsForNonValues(final QueryWithCaretMarker query) { + final List proposals = new ArrayList<>(); - /* - * This method is called when the query could not be parsed. It is likely that - * the next word is either a field or an operator. But is is also possible that - * the next word is a field-value, because the syntax error might be at another - * location in the query (not at the caret position). - */ + /* + * This method is called when the query could not be parsed. It is likely that + * the next word is either a field or an operator. But is is also possible that + * the next word is a field-value, because the syntax error might be at another + * location in the query (not at the caret position). + */ - final List tokens = QueryLanguage.getTokens(query.getQueryWithCaretMarker()); - final int indexTokenWithCaret = CollectionUtils.indexOf(tokens, t -> t.contains(CARET_MARKER)); + final List tokens = QueryLanguage.getTokens(query.getQueryWithCaretMarker()); + final int indexTokenWithCaret = CollectionUtils.indexOf(tokens, t -> t.contains(CARET_MARKER)); - if (indexTokenWithCaret > 0) { - final String previousToken = tokens.get(indexTokenWithCaret - 1); - switch (previousToken) { - case "(": - case "and": - case "or": - case "!": - proposals.addAll(proposeForAllKeys(query)); - break; + if (indexTokenWithCaret > 0) { + final String previousToken = tokens.get(indexTokenWithCaret - 1); + switch (previousToken) { + case "(": + case "and": + case "or": + case "!": + proposals.addAll(proposeForAllKeys(query)); + break; - case ")": - default: - // proposals.addAll(proposal); - break; - } - } else if (indexTokenWithCaret == 0) { - proposals.addAll(proposeForAllKeys(query)); - } + case ")": + default: + // proposals.addAll(proposal); + break; + } + } else if (indexTokenWithCaret == 0) { + proposals.addAll(proposeForAllKeys(query)); + } - return proposals; - } + return proposals; + } - private Collection proposeForAllKeys(final QueryWithCaretMarker query) { - final List proposals = new ArrayList<>(); - final String wordPrefix = wordPrefix(query.getQueryWithCaretMarker()); + private Collection proposeForAllKeys(final QueryWithCaretMarker query) { + final List proposals = new ArrayList<>(); + final String wordPrefix = wordPrefix(query.getQueryWithCaretMarker()); - if (wordPrefix != null) { - final SortedSet allFields = queryCompletionIndex.findAllFields(query.getDateRange()); - for (final String field : allFields) { + if (wordPrefix != null) { + final SortedSet allFields = queryCompletionIndex.findAllFields(query.getDateRange()); + for (final String field : allFields) { - if (!field.startsWith(wordPrefix)) { - continue; - } + if (!field.startsWith(wordPrefix)) { + continue; + } - final String queryWithCaretMarker = query.getQueryWithCaretMarker(); - final String proposedQuery = queryWithCaretMarker - .replaceAll(REGEX_IDENTIFIER + CARET_MARKER + REGEX_IDENTIFIER, field + "=* "); - final String newQueryWithCaretMarker = queryWithCaretMarker - .replaceAll(REGEX_IDENTIFIER + CARET_MARKER + REGEX_IDENTIFIER, field + "=" + CARET_MARKER); - final String newQuery = newQueryWithCaretMarker.replace(CARET_MARKER, ""); - final int newCaretPosition = newQueryWithCaretMarker.indexOf(CARET_MARKER); - final Proposal proposal = new Proposal(field, proposedQuery, true, newQuery, newCaretPosition); - proposals.add(proposal); - } - } + final String queryWithCaretMarker = query.getQueryWithCaretMarker(); + final String proposedQuery = queryWithCaretMarker + .replaceAll(REGEX_IDENTIFIER + CARET_MARKER + REGEX_IDENTIFIER, field + "=* "); + final String newQueryWithCaretMarker = queryWithCaretMarker + .replaceAll(REGEX_IDENTIFIER + CARET_MARKER + REGEX_IDENTIFIER, field + "=" + CARET_MARKER); + final String newQuery = newQueryWithCaretMarker.replace(CARET_MARKER, ""); + final int newCaretPosition = newQueryWithCaretMarker.indexOf(CARET_MARKER); + final Proposal proposal = new Proposal(field, proposedQuery, true, newQuery, newCaretPosition); + proposals.add(proposal); + } + } - return proposals; - } + return proposals; + } - private String wordPrefix(final String queryWithCaretMarker) { + private String wordPrefix(final String queryWithCaretMarker) { - final Pattern pattern = Pattern.compile("(" + REGEX_IDENTIFIER + CARET_MARKER + ")"); - final Matcher matcher = pattern.matcher(queryWithCaretMarker); - if (matcher.find()) { - final String group = matcher.group(); - return group.replace(CARET_MARKER, ""); - } + final Pattern pattern = Pattern.compile("(" + REGEX_IDENTIFIER + CARET_MARKER + ")"); + final Matcher matcher = pattern.matcher(queryWithCaretMarker); + if (matcher.find()) { + final String group = matcher.group(); + return group.replace(CARET_MARKER, ""); + } - return null; - } + return null; + } - private List proposeForAllKeys(final DateTimeRange dateRange) { - final List proposals = new ArrayList<>(); + private List proposeForAllKeys(final DateTimeRange dateRange) { + final List proposals = new ArrayList<>(); - final SortedSet allFields = queryCompletionIndex.findAllFields(dateRange); - for (final String field : allFields) { - final String proposedQuery = field + "=*"; - final String newQuery = field + "="; - final int newCaretPosition = newQuery.length(); - final Proposal proposal = new Proposal(field, proposedQuery, true, newQuery, newCaretPosition); - proposals.add(proposal); - } + final SortedSet allFields = queryCompletionIndex.findAllFields(dateRange); + for (final String field : allFields) { + final String proposedQuery = field + "=*"; + final String newQuery = field + "="; + final int newCaretPosition = newQuery.length(); + final Proposal proposal = new Proposal(field, proposedQuery, true, newQuery, newCaretPosition); + proposals.add(proposal); + } - return proposals; - } + return proposals; + } - List proposalsForValues(final QueryWithCaretMarker query) { - try { - // Add caret marker, so that we know where the caret is. - // This also makes sure that a query like "name=|" ('|' is the caret) can be - // parsed. - // Without the caret marker the query would be "name=", which is not a valid - // expression. - final String queryWithCaretMarker = query.getQueryWithCaretMarker(); + List proposalsForValues(final QueryWithCaretMarker query) { + try { + // Add caret marker, so that we know where the caret is. + // This also makes sure that a query like "name=|" ('|' is the caret) can be + // parsed. + // Without the caret marker the query would be "name=", which is not a valid + // expression. + final String queryWithCaretMarker = query.getQueryWithCaretMarker(); - // parse the query - final Expression expression = QueryLanguageParser.parse(queryWithCaretMarker); + // parse the query + final Expression expression = QueryLanguageParser.parse(queryWithCaretMarker); - // normalize it, so that we can use the queryCompletionIndex to search for - // candidate values - final QueryCompletionExpressionOptimizer optimizer = new QueryCompletionExpressionOptimizer(); - final Expression normalizedExpression = optimizer.normalizeExpression(expression); + // normalize it, so that we can use the queryCompletionIndex to search for + // candidate values + final QueryCompletionExpressionOptimizer optimizer = new QueryCompletionExpressionOptimizer(); + final Expression normalizedExpression = optimizer.normalizeExpression(expression); - // find all candidate values - final SortedSet candidateValues = normalizedExpression - .visit(new FindValuesForQueryCompletion(query.getDateRange(), queryCompletionIndex)); + // find all candidate values + final SortedSet candidateValues = normalizedExpression + .visit(new FindValuesForQueryCompletion(query.getDateRange(), queryCompletionIndex)); - final SortedSet sortedAndPreparedCandidateValues = resultFilter(query.getResultMode(), - candidateValues, queryWithCaretMarker); + final SortedSet sortedAndPreparedCandidateValues = resultFilter(query.getResultMode(), + candidateValues, queryWithCaretMarker); - // translate the candidate values to proposals - final List proposals = generateProposals(queryWithCaretMarker, sortedAndPreparedCandidateValues); + // translate the candidate values to proposals + final List proposals = generateProposals(queryWithCaretMarker, sortedAndPreparedCandidateValues); - return proposals; - } catch (final SyntaxException e) { - LOGGER.debug("Query ({}) is not valid. This is expected to happen " - + "unless we are looking for proposals of values.", query, e); - return Collections.emptyList(); - } - } + return proposals; + } catch (final SyntaxException e) { + LOGGER.debug("Query ({}) is not valid. This is expected to happen " + + "unless we are looking for proposals of values.", query, e); + return Collections.emptyList(); + } + } - private SortedSet resultFilter(final ResultMode resultMode, final SortedSet candidateValues, - final String queryWithCaretMarker) { - switch (resultMode) { - case CUT_AT_DOT: - return cutAtDots(candidateValues, queryWithCaretMarker); - case FULL_VALUES: - return candidateValues; - default: - throw new IllegalArgumentException("Unexpected value: " + resultMode); - } - } + private SortedSet resultFilter(final ResultMode resultMode, final SortedSet candidateValues, + final String queryWithCaretMarker) { + switch (resultMode) { + case CUT_AT_DOT: + return cutAtDots(candidateValues, queryWithCaretMarker); + case FULL_VALUES: + return candidateValues; + default: + throw new IllegalArgumentException("Unexpected value: " + resultMode); + } + } - private SortedSet cutAtDots(final SortedSet candidateValues, final String queryWithCaretMarker) { - final CandidateGrouper grouper = new CandidateGrouper(); - return grouper.group(candidateValues, queryWithCaretMarker); - } + private SortedSet cutAtDots(final SortedSet candidateValues, final String queryWithCaretMarker) { + final CandidateGrouper grouper = new CandidateGrouper(); + return grouper.group(candidateValues, queryWithCaretMarker); + } - private List generateProposals(final String queryWithCaretMarker, - final SortedSet candidateValues) { - final List proposals = new ArrayList<>(); + private List generateProposals(final String queryWithCaretMarker, + final SortedSet candidateValues) { + final List proposals = new ArrayList<>(); - for (final String proposedTag : candidateValues) { + for (final String proposedTag : candidateValues) { - final String proposedQueryWithCaretMarker = queryWithCaretMarker - .replaceAll(REGEX_IDENTIFIER + CARET_MARKER + REGEX_IDENTIFIER, proposedTag + CARET_MARKER); + final String proposedQueryWithCaretMarker = queryWithCaretMarker + .replaceAll(REGEX_IDENTIFIER + CARET_MARKER + REGEX_IDENTIFIER, proposedTag + CARET_MARKER); - final String proposedQuery = proposedQueryWithCaretMarker.replace(CARET_MARKER, ""); - final int newCaretPosition = proposedQueryWithCaretMarker.indexOf(CARET_MARKER); + final String proposedQuery = proposedQueryWithCaretMarker.replace(CARET_MARKER, ""); + final int newCaretPosition = proposedQueryWithCaretMarker.indexOf(CARET_MARKER); - final Proposal proposal = new Proposal(proposedTag, proposedQuery, true, proposedQuery, newCaretPosition); - proposals.add(proposal); - } + final Proposal proposal = new Proposal(proposedTag, proposedQuery, true, proposedQuery, newCaretPosition); + proposals.add(proposal); + } - return proposals; - } + return proposals; + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/lang/QueryCompletionExpressionOptimizer.java b/data-store/src/main/java/org/lucares/pdb/datastore/lang/QueryCompletionExpressionOptimizer.java index 1effe15..4c92fb5 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/lang/QueryCompletionExpressionOptimizer.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/lang/QueryCompletionExpressionOptimizer.java @@ -45,228 +45,228 @@ import org.slf4j.LoggerFactory; */ public class QueryCompletionExpressionOptimizer { - private static final Logger LOGGER = LoggerFactory.getLogger(QueryCompletionExpressionOptimizer.class); + private static final Logger LOGGER = LoggerFactory.getLogger(QueryCompletionExpressionOptimizer.class); - private static final class ReplaceINExpressionsWithPropertyExpressionsVisitor extends IdentityExpressionVisitor { + private static final class ReplaceINExpressionsWithPropertyExpressionsVisitor extends IdentityExpressionVisitor { - @Override - public Expression visit(final InExpression expression) { - if (expression.containsCaret() || expression.getValues().size() == 1) { - final String property = expression.getProperty(); - final List values = expression.getValues(); + @Override + public Expression visit(final InExpression expression) { + if (expression.containsCaret() || expression.getValues().size() == 1) { + final String property = expression.getProperty(); + final List values = expression.getValues(); - final List propertyExpressions = new ArrayList<>(); + final List propertyExpressions = new ArrayList<>(); - for (final String value : values) { - propertyExpressions.add(new Property(property, new Terminal(value))); - } + for (final String value : values) { + propertyExpressions.add(new Property(property, new Terminal(value))); + } - return Expression.Or.create(propertyExpressions); - } else { - return super.visit(expression); - } - }; - } + return Expression.Or.create(propertyExpressions); + } else { + return super.visit(expression); + } + }; + } - private static final class RemoveOrEdExpressions extends IdentityExpressionVisitor { - @Override - public Expression visit(final Or expression) { - final Expression left = expression.getLeft(); - final Expression right = expression.getRight(); + private static final class RemoveOrEdExpressions extends IdentityExpressionVisitor { + @Override + public Expression visit(final Or expression) { + final Expression left = expression.getLeft(); + final Expression right = expression.getRight(); - if (left.containsCaret() && !right.containsCaret()) { - return left; - } - if (!left.containsCaret() && right.containsCaret()) { - return right; - } - return super.visit(expression); - }; - } + if (left.containsCaret() && !right.containsCaret()) { + return left; + } + if (!left.containsCaret() && right.containsCaret()) { + return right; + } + return super.visit(expression); + }; + } - private static final class DistributiveNormalization extends IdentityExpressionVisitor { + private static final class DistributiveNormalization extends IdentityExpressionVisitor { - @Override - public Expression visit(final And expression) { - final Expression left = expression.getLeft(); - final Expression right = expression.getRight(); + @Override + public Expression visit(final And expression) { + final Expression left = expression.getLeft(); + final Expression right = expression.getRight(); - if (left instanceof Or) { - // (a or b) and c - // becomes - // a and c or b and c - final Expression ac = new And(((Or) left).getLeft(), right); - final Expression bc = new And(((Or) left).getRight(), right); - return new Or(ac, bc); - } + if (left instanceof Or) { + // (a or b) and c + // becomes + // a and c or b and c + final Expression ac = new And(((Or) left).getLeft(), right); + final Expression bc = new And(((Or) left).getRight(), right); + return new Or(ac, bc); + } - if (right instanceof Or) { - // a and (b or c) - // becomes - // a and b or a and c - final Expression ab = new And(left, ((Or) right).getLeft()); - final Expression ac = new And(left, ((Or) right).getRight()); - return new Or(ab, ac); - } - return super.visit(expression); - }; - } + if (right instanceof Or) { + // a and (b or c) + // becomes + // a and b or a and c + final Expression ab = new And(left, ((Or) right).getLeft()); + final Expression ac = new And(left, ((Or) right).getRight()); + return new Or(ab, ac); + } + return super.visit(expression); + }; + } - private static final class RotateAndExpressions extends IdentityExpressionVisitor { - @Override - public Expression visit(final And expression) { + private static final class RotateAndExpressions extends IdentityExpressionVisitor { + @Override + public Expression visit(final And expression) { - final Expression left = expression.getLeft(); - final Expression right = expression.getRight(); + final Expression left = expression.getLeft(); + final Expression right = expression.getRight(); - // (| and a) and b => | and (a and b) - // - // The expression with the caret is moved up - if (left.containsCaret() && left instanceof And) { - final Expression leftLeft = ((And) left).getLeft(); - final Expression leftRight = ((And) left).getRight(); + // (| and a) and b => | and (a and b) + // + // The expression with the caret is moved up + if (left.containsCaret() && left instanceof And) { + final Expression leftLeft = ((And) left).getLeft(); + final Expression leftRight = ((And) left).getRight(); - if (leftLeft.containsCaret()) { - return new And(leftLeft, new And(leftRight, right)); - } else { - return new And(new And(leftLeft, right), leftRight); - } - } else if (right.containsCaret() && right instanceof And) { - final Expression rightLeft = ((And) right).getLeft(); - final Expression rightRight = ((And) right).getRight(); + if (leftLeft.containsCaret()) { + return new And(leftLeft, new And(leftRight, right)); + } else { + return new And(new And(leftLeft, right), leftRight); + } + } else if (right.containsCaret() && right instanceof And) { + final Expression rightLeft = ((And) right).getLeft(); + final Expression rightRight = ((And) right).getRight(); - if (rightLeft.containsCaret()) { - return new And(rightLeft, new And(rightRight, left)); - } else { - return new And(new And(rightLeft, left), rightRight); - } - } + if (rightLeft.containsCaret()) { + return new And(rightLeft, new And(rightRight, left)); + } else { + return new And(new And(rightLeft, left), rightRight); + } + } - return super.visit(expression); - } - } + return super.visit(expression); + } + } - private static final class DoubleNegationExpressions extends IdentityExpressionVisitor { - @Override - public Expression visit(final Not expression) { - if (expression instanceof Not) { - if (expression.getExpression() instanceof Not) { - return ((Not) expression.getExpression()).getExpression(); - } - } - return super.visit(expression); - } - } + private static final class DoubleNegationExpressions extends IdentityExpressionVisitor { + @Override + public Expression visit(final Not expression) { + if (expression instanceof Not) { + if (expression.getExpression() instanceof Not) { + return ((Not) expression.getExpression()).getExpression(); + } + } + return super.visit(expression); + } + } - private static final class DeMorgan extends IdentityExpressionVisitor { - @Override - public Expression visit(final Not expression) { + private static final class DeMorgan extends IdentityExpressionVisitor { + @Override + public Expression visit(final Not expression) { - if (expression.getExpression() instanceof And) { - final And andExpression = (And) expression.getExpression(); - final Expression left = andExpression.getLeft(); - final Expression right = andExpression.getRight(); + if (expression.getExpression() instanceof And) { + final And andExpression = (And) expression.getExpression(); + final Expression left = andExpression.getLeft(); + final Expression right = andExpression.getRight(); - final Expression notLeft = new Not(left); - final Expression notRight = new Not(right); + final Expression notLeft = new Not(left); + final Expression notRight = new Not(right); - return new Or(notLeft, notRight); - } + return new Or(notLeft, notRight); + } - return super.visit(expression); - } - } + return super.visit(expression); + } + } - private static final class ToAndCaretExpressions extends IdentityExpressionVisitor { - @Override - public Expression visit(final And expression) { + private static final class ToAndCaretExpressions extends IdentityExpressionVisitor { + @Override + public Expression visit(final And expression) { - final Expression left = expression.getLeft(); - final Expression right = expression.getRight(); + final Expression left = expression.getLeft(); + final Expression right = expression.getRight(); - if (left.containsCaret() && left instanceof Property) { - return new AndCaretExpression((Property) left, right); - } - if (right.containsCaret() && right instanceof Property) { - return new AndCaretExpression((Property) right, left); - } + if (left.containsCaret() && left instanceof Property) { + return new AndCaretExpression((Property) left, right); + } + if (right.containsCaret() && right instanceof Property) { + return new AndCaretExpression((Property) right, left); + } - if (left.containsCaret()// - && left instanceof Not// - && ((Not) left).getExpression() instanceof Property) { - return new AndNotCaretExpression((Property) ((Not) left).getExpression(), right); - } - if (right.containsCaret()// - && right instanceof Not// - && ((Not) right).getExpression() instanceof Property) { - return new AndNotCaretExpression((Property) ((Not) right).getExpression(), left); - } + if (left.containsCaret()// + && left instanceof Not// + && ((Not) left).getExpression() instanceof Property) { + return new AndNotCaretExpression((Property) ((Not) left).getExpression(), right); + } + if (right.containsCaret()// + && right instanceof Not// + && ((Not) right).getExpression() instanceof Property) { + return new AndNotCaretExpression((Property) ((Not) right).getExpression(), left); + } - return super.visit(expression); - } - } + return super.visit(expression); + } + } - public Expression normalizeExpression(final Expression expression) { + public Expression normalizeExpression(final Expression expression) { - Expression normalizingExpression = expression; - Expression previousExpression = normalizingExpression; - do { - previousExpression = normalizingExpression; - // replace all IN-expression, because they are just syntactic sugar for - // OR-expressions, but only for those that include the caret - normalizingExpression = normalizingExpression - .visit(new ReplaceINExpressionsWithPropertyExpressionsVisitor()); + Expression normalizingExpression = expression; + Expression previousExpression = normalizingExpression; + do { + previousExpression = normalizingExpression; + // replace all IN-expression, because they are just syntactic sugar for + // OR-expressions, but only for those that include the caret + normalizingExpression = normalizingExpression + .visit(new ReplaceINExpressionsWithPropertyExpressionsVisitor()); - // Remove expressions that are OR'ed with the one that contains the caret. - // Everything that is OR'ed with the 'caret'-expression cannot change the - // possible values. - normalizingExpression = visitRepeatedly(normalizingExpression, new RemoveOrEdExpressions()); + // Remove expressions that are OR'ed with the one that contains the caret. + // Everything that is OR'ed with the 'caret'-expression cannot change the + // possible values. + normalizingExpression = visitRepeatedly(normalizingExpression, new RemoveOrEdExpressions()); - // In the end we want to have expressions like "firstname=Jane and lastname=|". - // To reach that goal we use the distributive law to modify expressions like - // "(firstname=Jane or firstname=John) and lastname=|" to "(firstname=Jane and - // lastname=|) or (firstname=John and lastname=|)" - normalizingExpression = visitRepeatedly(normalizingExpression, new DistributiveNormalization()); + // In the end we want to have expressions like "firstname=Jane and lastname=|". + // To reach that goal we use the distributive law to modify expressions like + // "(firstname=Jane or firstname=John) and lastname=|" to "(firstname=Jane and + // lastname=|) or (firstname=John and lastname=|)" + normalizingExpression = visitRepeatedly(normalizingExpression, new DistributiveNormalization()); - // (fn=John and (fn=John and ln=|) - // normalized to - // (fn=John and ln=|) and (fn=Jane and ln=|) - // or normalized to - // (fn=John and fn=Jane) and ln=| - normalizingExpression = visitRepeatedly(normalizingExpression, new RotateAndExpressions()); + // (fn=John and (fn=John and ln=|) + // normalized to + // (fn=John and ln=|) and (fn=Jane and ln=|) + // or normalized to + // (fn=John and fn=Jane) and ln=| + normalizingExpression = visitRepeatedly(normalizingExpression, new RotateAndExpressions()); - // normalize a NAND-expression into an OR with DeMorgan, the OR-Expression might - // later be removed - // not ( a and b) => (not a) or (not b) - normalizingExpression = visitRepeatedly(normalizingExpression, new DeMorgan()); + // normalize a NAND-expression into an OR with DeMorgan, the OR-Expression might + // later be removed + // not ( a and b) => (not a) or (not b) + normalizingExpression = visitRepeatedly(normalizingExpression, new DeMorgan()); - // remove double negation - // not not a => a - normalizingExpression = visitRepeatedly(normalizingExpression, new DoubleNegationExpressions()); - } while (!normalizingExpression.equals(previousExpression)); + // remove double negation + // not not a => a + normalizingExpression = visitRepeatedly(normalizingExpression, new DoubleNegationExpressions()); + } while (!normalizingExpression.equals(previousExpression)); - // Replaces all (a and |) expressions with a special expression that represents - // it. - // This special expression will then be used during evaluation. - return visitRepeatedly(normalizingExpression, new ToAndCaretExpressions()); - } + // Replaces all (a and |) expressions with a special expression that represents + // it. + // This special expression will then be used during evaluation. + return visitRepeatedly(normalizingExpression, new ToAndCaretExpressions()); + } - private static Expression visitRepeatedly(final Expression expression, - final ExpressionVisitor visitor) { - Expression previousExpression; - Expression result = expression; + private static Expression visitRepeatedly(final Expression expression, + final ExpressionVisitor visitor) { + Expression previousExpression; + Expression result = expression; - do { - previousExpression = result; - result = previousExpression.visit(visitor); - if (!previousExpression.equals(result)) { - LOGGER.debug(" translate: {}", visitor.getClass().getSimpleName()); - LOGGER.debug(" in: {}", previousExpression); - LOGGER.debug(" out: {}", result); - } - } while (!previousExpression.equals(result)); + do { + previousExpression = result; + result = previousExpression.visit(visitor); + if (!previousExpression.equals(result)) { + LOGGER.debug(" translate: {}", visitor.getClass().getSimpleName()); + LOGGER.debug(" in: {}", previousExpression); + LOGGER.debug(" out: {}", result); + } + } while (!previousExpression.equals(result)); - return result; - } + return result; + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/lang/QueryLanguage.java b/data-store/src/main/java/org/lucares/pdb/datastore/lang/QueryLanguage.java index c849d1c..27a866f 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/lang/QueryLanguage.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/lang/QueryLanguage.java @@ -28,125 +28,125 @@ import org.lucares.utils.CollectionUtils; public class QueryLanguage { - public Expression parse(final String input) { - // define the input - final CharStream in = CharStreams.fromString(input); + public Expression parse(final String input) { + // define the input + final CharStream in = CharStreams.fromString(input); - // create lexer and parser - final PdbLangLexer lexer = new PdbLangLexer(in); - lexer.addErrorListener(new ErrorListener()); + // create lexer and parser + final PdbLangLexer lexer = new PdbLangLexer(in); + lexer.addErrorListener(new ErrorListener()); - final CommonTokenStream tokens = new CommonTokenStream(lexer); - final PdbLangParser parser = new PdbLangParser(tokens); - parser.addErrorListener(new ErrorListener()); + final CommonTokenStream tokens = new CommonTokenStream(lexer); + final PdbLangParser parser = new PdbLangParser(tokens); + parser.addErrorListener(new ErrorListener()); - final Stack stack = new Stack<>(); + final Stack stack = new Stack<>(); - // define a listener that is called for every terminals and - // non-terminals - final ParseTreeListener listener = new PdbLangBaseListener() { + // define a listener that is called for every terminals and + // non-terminals + final ParseTreeListener listener = new PdbLangBaseListener() { - @Override - public void exitIdentifierExpression(final IdentifierExpressionContext ctx) { - if (ctx.getText().length() > 255) { - throw new SyntaxException(ctx, "token too long"); - } + @Override + public void exitIdentifierExpression(final IdentifierExpressionContext ctx) { + if (ctx.getText().length() > 255) { + throw new SyntaxException(ctx, "token too long"); + } - stack.push(new Terminal(ctx.getText())); - } + stack.push(new Terminal(ctx.getText())); + } - @Override - public void exitPropertyTerminalExpression(final PropertyTerminalExpressionContext ctx) { - if (ctx.getText().length() > 255) { - throw new SyntaxException(ctx, "token too long"); - } + @Override + public void exitPropertyTerminalExpression(final PropertyTerminalExpressionContext ctx) { + if (ctx.getText().length() > 255) { + throw new SyntaxException(ctx, "token too long"); + } - stack.push(new Terminal(ctx.getText())); - } + stack.push(new Terminal(ctx.getText())); + } - @Override - public void exitNotExpression(final NotExpressionContext ctx) { + @Override + public void exitNotExpression(final NotExpressionContext ctx) { - final Expression expression = stack.pop(); + final Expression expression = stack.pop(); - final Expression notExpression = new Not(expression); - stack.push(notExpression); - } + final Expression notExpression = new Not(expression); + stack.push(notExpression); + } - @Override - public void exitBinaryAndExpression(final BinaryAndExpressionContext ctx) { - final Expression right = stack.pop(); - final TemporaryExpression operation = new AndTemporary(); - final Expression left = stack.pop(); + @Override + public void exitBinaryAndExpression(final BinaryAndExpressionContext ctx) { + final Expression right = stack.pop(); + final TemporaryExpression operation = new AndTemporary(); + final Expression left = stack.pop(); - stack.push(operation.toExpression(left, right)); - } + stack.push(operation.toExpression(left, right)); + } - @Override - public void exitBinaryOrExpression(final BinaryOrExpressionContext ctx) { - final Expression right = stack.pop(); - final TemporaryExpression operation = new OrTemporary(); - final Expression left = stack.pop(); + @Override + public void exitBinaryOrExpression(final BinaryOrExpressionContext ctx) { + final Expression right = stack.pop(); + final TemporaryExpression operation = new OrTemporary(); + final Expression left = stack.pop(); - stack.push(operation.toExpression(left, right)); - } + stack.push(operation.toExpression(left, right)); + } - @Override - public void exitListOfPropValues(final ListOfPropValuesContext ctx) { - final Expression topStackElement = stack.pop(); + @Override + public void exitListOfPropValues(final ListOfPropValuesContext ctx) { + final Expression topStackElement = stack.pop(); - if (topStackElement instanceof ListOfPropertyValues) { - // there are at least two property values in the query - // e.g. in the expression "bird in (eagle, pigeon)" - final ListOfPropertyValues existingList = (ListOfPropertyValues) topStackElement; - final Terminal nextPropertyValue = (Terminal) stack.pop(); + if (topStackElement instanceof ListOfPropertyValues) { + // there are at least two property values in the query + // e.g. in the expression "bird in (eagle, pigeon)" + final ListOfPropertyValues existingList = (ListOfPropertyValues) topStackElement; + final Terminal nextPropertyValue = (Terminal) stack.pop(); - final ListOfPropertyValues newListOfPropertyValues = new ListOfPropertyValues(nextPropertyValue, - existingList); - stack.push(newListOfPropertyValues); - } else { - // this is the first or the only value in this list of property values - // e.g. in the expression "bird in (eagle)" - final Terminal propertyValue = (Terminal) topStackElement; + final ListOfPropertyValues newListOfPropertyValues = new ListOfPropertyValues(nextPropertyValue, + existingList); + stack.push(newListOfPropertyValues); + } else { + // this is the first or the only value in this list of property values + // e.g. in the expression "bird in (eagle)" + final Terminal propertyValue = (Terminal) topStackElement; - final ListOfPropertyValues newListOfPropertyValues = new ListOfPropertyValues(propertyValue); - stack.push(newListOfPropertyValues); - } - } + final ListOfPropertyValues newListOfPropertyValues = new ListOfPropertyValues(propertyValue); + stack.push(newListOfPropertyValues); + } + } - @Override - public void exitEnclosedListOfPropValues(final EnclosedListOfPropValuesContext ctx) { + @Override + public void exitEnclosedListOfPropValues(final EnclosedListOfPropValuesContext ctx) { - final ListOfPropertyValues propertyValues = (ListOfPropertyValues) stack.pop(); - final Terminal propertyName = (Terminal) stack.pop(); + final ListOfPropertyValues propertyValues = (ListOfPropertyValues) stack.pop(); + final Terminal propertyName = (Terminal) stack.pop(); - final InExpression inExpression = new InExpression(propertyName.getValue(), propertyValues.getValues()); - stack.push(inExpression); - } - }; + final InExpression inExpression = new InExpression(propertyName.getValue(), propertyValues.getValues()); + stack.push(inExpression); + } + }; - // Specify our entry point - final ParseTree parseTree = parser.start(); + // Specify our entry point + final ParseTree parseTree = parser.start(); - // Walk it and attach our listener - final ParseTreeWalker walker = new ParseTreeWalker(); - walker.walk(listener, parseTree); + // Walk it and attach our listener + final ParseTreeWalker walker = new ParseTreeWalker(); + walker.walk(listener, parseTree); - if (stack.size() != 1) { - throw new RuntimeException("stack should have exactly one element " + stack); - } + if (stack.size() != 1) { + throw new RuntimeException("stack should have exactly one element " + stack); + } - return stack.pop(); - } + return stack.pop(); + } - public static List getTokens(final String input) { - final CharStream in = CharStreams.fromString(input); + public static List getTokens(final String input) { + final CharStream in = CharStreams.fromString(input); - final PdbLangLexer lexer = new PdbLangLexer(in); + final PdbLangLexer lexer = new PdbLangLexer(in); - final CommonTokenStream tokens = new CommonTokenStream(lexer); - tokens.fill(); - final List tokenList = tokens.getTokens(); - return CollectionUtils.map(tokenList, Token::getText); - } + final CommonTokenStream tokens = new CommonTokenStream(lexer); + tokens.fill(); + final List tokenList = tokens.getTokens(); + return CollectionUtils.map(tokenList, Token::getText); + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/lang/QueryLanguageParser.java b/data-store/src/main/java/org/lucares/pdb/datastore/lang/QueryLanguageParser.java index cc17728..0d341e8 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/lang/QueryLanguageParser.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/lang/QueryLanguageParser.java @@ -3,15 +3,15 @@ package org.lucares.pdb.datastore.lang; import org.apache.commons.lang3.StringUtils; public class QueryLanguageParser { - public static Expression parse(final String query) { + public static Expression parse(final String query) { - final Expression result; - if (StringUtils.isEmpty(query)) { - result = Expression.matchAll(); - } else { - final QueryLanguage lang = new QueryLanguage(); - result = lang.parse(query); - } - return result; - } + final Expression result; + if (StringUtils.isEmpty(query)) { + result = Expression.matchAll(); + } else { + final QueryLanguage lang = new QueryLanguage(); + result = lang.parse(query); + } + return result; + } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/lang/SyntaxException.java b/data-store/src/main/java/org/lucares/pdb/datastore/lang/SyntaxException.java index f74f01a..4824597 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/lang/SyntaxException.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/lang/SyntaxException.java @@ -4,61 +4,61 @@ import org.antlr.v4.runtime.ParserRuleContext; public class SyntaxException extends RuntimeException { - private static final long serialVersionUID = 1L; - private int lineStart; - private int startIndex; - private int lineStop; - private int stopIndex; + private static final long serialVersionUID = 1L; + private int lineStart; + private int startIndex; + private int lineStop; + private int stopIndex; - public SyntaxException(final ParserRuleContext context, final String message) { - this(message, context.getStart().getLine(), context.getStart().getStartIndex(), context.getStop().getLine(), - context.getStop().getStopIndex()); - } + public SyntaxException(final ParserRuleContext context, final String message) { + this(message, context.getStart().getLine(), context.getStart().getStartIndex(), context.getStop().getLine(), + context.getStop().getStopIndex()); + } - public SyntaxException(final String message, final int lineStart, final int startIndex, final int lineStop, - final int stopIndex) { - super(message + ": " + generateMessage(lineStart, startIndex, lineStop, stopIndex)); - this.lineStart = lineStart; - this.startIndex = startIndex; - this.lineStop = lineStop; - this.stopIndex = stopIndex; - } + public SyntaxException(final String message, final int lineStart, final int startIndex, final int lineStop, + final int stopIndex) { + super(message + ": " + generateMessage(lineStart, startIndex, lineStop, stopIndex)); + this.lineStart = lineStart; + this.startIndex = startIndex; + this.lineStop = lineStop; + this.stopIndex = stopIndex; + } - private static String generateMessage(final int lineStart, final int startIndex, final int lineStop, - final int stopIndex) { + private static String generateMessage(final int lineStart, final int startIndex, final int lineStop, + final int stopIndex) { - return String.format("line=%d, start=%d, to line=%d stop=%d", lineStart, startIndex, lineStop, stopIndex); - } + return String.format("line=%d, start=%d, to line=%d stop=%d", lineStart, startIndex, lineStop, stopIndex); + } - public int getLineStart() { - return lineStart; - } + public int getLineStart() { + return lineStart; + } - public void setLineStart(final int lineStart) { - this.lineStart = lineStart; - } + public void setLineStart(final int lineStart) { + this.lineStart = lineStart; + } - public int getStartIndex() { - return startIndex; - } + public int getStartIndex() { + return startIndex; + } - public void setStartIndex(final int startIndex) { - this.startIndex = startIndex; - } + public void setStartIndex(final int startIndex) { + this.startIndex = startIndex; + } - public int getLineStop() { - return lineStop; - } + public int getLineStop() { + return lineStop; + } - public void setLineStop(final int lineStop) { - this.lineStop = lineStop; - } + public void setLineStop(final int lineStop) { + this.lineStop = lineStop; + } - public int getStopIndex() { - return stopIndex; - } + public int getStopIndex() { + return stopIndex; + } - public void setStopIndex(final int stopIndex) { - this.stopIndex = stopIndex; - } + public void setStopIndex(final int stopIndex) { + this.stopIndex = stopIndex; + } } diff --git a/data-store/src/test/java/org/lucares/pdb/datastore/internal/DataStoreTest.java b/data-store/src/test/java/org/lucares/pdb/datastore/internal/DataStoreTest.java index d333e63..17c9180 100644 --- a/data-store/src/test/java/org/lucares/pdb/datastore/internal/DataStoreTest.java +++ b/data-store/src/test/java/org/lucares/pdb/datastore/internal/DataStoreTest.java @@ -42,293 +42,293 @@ import org.testng.annotations.Test; @Test public class DataStoreTest { - private Path dataDirectory; - private DataStore dataStore; - private Map tagsToBlockStorageRootBlockNumber; - - @BeforeMethod - public void beforeMethod() throws IOException { - dataDirectory = Files.createTempDirectory("pdb"); - } - - @AfterMethod - public void afterMethod() throws IOException { - FileUtils.delete(dataDirectory); - dataStore = null; - tagsToBlockStorageRootBlockNumber = null; - Tags.STRING_COMPRESSOR = null; - } - - public void testQuery() throws Exception { - - dataStore = new DataStore(dataDirectory); - final DateTimeRange dateRange = DateTimeRange.relativeHours(1); - final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0); - - final Tags eagleTim = Tags.createAndAddToDictionary("bird", "eagle", "name", "Tim"); - final Tags pigeonJennifer = Tags.createAndAddToDictionary("bird", "pigeon", "name", "Jennifer"); - final Tags flamingoJennifer = Tags.createAndAddToDictionary("bird", "flamingo", "name", "Jennifer"); - final Tags labradorJenny = Tags.createAndAddToDictionary("dog", "labrador", "name", "Jenny"); - final Tags labradorTim = Tags.createAndAddToDictionary("dog", "labrador", "name", "Tim"); - - tagsToBlockStorageRootBlockNumber = new HashMap<>(); - tagsToBlockStorageRootBlockNumber.put(eagleTim, dataStore.createNewFile(partitionId, eagleTim)); - tagsToBlockStorageRootBlockNumber.put(pigeonJennifer, dataStore.createNewFile(partitionId, pigeonJennifer)); - tagsToBlockStorageRootBlockNumber.put(flamingoJennifer, dataStore.createNewFile(partitionId, flamingoJennifer)); - tagsToBlockStorageRootBlockNumber.put(labradorJenny, dataStore.createNewFile(partitionId, labradorJenny)); - tagsToBlockStorageRootBlockNumber.put(labradorTim, dataStore.createNewFile(partitionId, labradorTim)); - - assertSearch(dateRange, "bird=eagle", eagleTim); - assertSearch(dateRange, "dog=labrador", labradorJenny, labradorTim); - assertSearch(dateRange, "name=Tim", eagleTim, labradorTim); - assertSearch(dateRange, "dog=labrador and name=Tim", labradorTim); - assertSearch(dateRange, "dog=labrador and !name=Tim", labradorJenny); - assertSearch(dateRange, "name=Jennifer or name=Jenny", pigeonJennifer, flamingoJennifer, labradorJenny); + private Path dataDirectory; + private DataStore dataStore; + private Map tagsToBlockStorageRootBlockNumber; + + @BeforeMethod + public void beforeMethod() throws IOException { + dataDirectory = Files.createTempDirectory("pdb"); + } + + @AfterMethod + public void afterMethod() throws IOException { + FileUtils.delete(dataDirectory); + dataStore = null; + tagsToBlockStorageRootBlockNumber = null; + Tags.STRING_COMPRESSOR = null; + } + + public void testQuery() throws Exception { + + dataStore = new DataStore(dataDirectory); + final DateTimeRange dateRange = DateTimeRange.relativeHours(1); + final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0); + + final Tags eagleTim = Tags.createAndAddToDictionary("bird", "eagle", "name", "Tim"); + final Tags pigeonJennifer = Tags.createAndAddToDictionary("bird", "pigeon", "name", "Jennifer"); + final Tags flamingoJennifer = Tags.createAndAddToDictionary("bird", "flamingo", "name", "Jennifer"); + final Tags labradorJenny = Tags.createAndAddToDictionary("dog", "labrador", "name", "Jenny"); + final Tags labradorTim = Tags.createAndAddToDictionary("dog", "labrador", "name", "Tim"); + + tagsToBlockStorageRootBlockNumber = new HashMap<>(); + tagsToBlockStorageRootBlockNumber.put(eagleTim, dataStore.createNewFile(partitionId, eagleTim)); + tagsToBlockStorageRootBlockNumber.put(pigeonJennifer, dataStore.createNewFile(partitionId, pigeonJennifer)); + tagsToBlockStorageRootBlockNumber.put(flamingoJennifer, dataStore.createNewFile(partitionId, flamingoJennifer)); + tagsToBlockStorageRootBlockNumber.put(labradorJenny, dataStore.createNewFile(partitionId, labradorJenny)); + tagsToBlockStorageRootBlockNumber.put(labradorTim, dataStore.createNewFile(partitionId, labradorTim)); + + assertSearch(dateRange, "bird=eagle", eagleTim); + assertSearch(dateRange, "dog=labrador", labradorJenny, labradorTim); + assertSearch(dateRange, "name=Tim", eagleTim, labradorTim); + assertSearch(dateRange, "dog=labrador and name=Tim", labradorTim); + assertSearch(dateRange, "dog=labrador and !name=Tim", labradorJenny); + assertSearch(dateRange, "name=Jennifer or name=Jenny", pigeonJennifer, flamingoJennifer, labradorJenny); - // aÍŸnÍŸd binds stronger than oÍŸr - assertSearch(dateRange, "name=Tim and dog=labrador or bird=pigeon", pigeonJennifer, labradorTim); - assertSearch(dateRange, "bird=pigeon or name=Tim and dog=labrador", pigeonJennifer, labradorTim); + // aÍŸnÍŸd binds stronger than oÍŸr + assertSearch(dateRange, "name=Tim and dog=labrador or bird=pigeon", pigeonJennifer, labradorTim); + assertSearch(dateRange, "bird=pigeon or name=Tim and dog=labrador", pigeonJennifer, labradorTim); - // parenthesis override priority of aÍŸnÍŸd - assertSearch(dateRange, "name=Tim and (dog=labrador or bird=pigeon)", labradorTim); - assertSearch(dateRange, "(dog=labrador or bird=pigeon) and name=Tim", labradorTim); + // parenthesis override priority of aÍŸnÍŸd + assertSearch(dateRange, "name=Tim and (dog=labrador or bird=pigeon)", labradorTim); + assertSearch(dateRange, "(dog=labrador or bird=pigeon) and name=Tim", labradorTim); - // wildcards - assertSearch(dateRange, "bird=*", eagleTim, pigeonJennifer, flamingoJennifer); - assertSearch(dateRange, "name=Jen*", pigeonJennifer, flamingoJennifer, labradorJenny); - assertSearch(dateRange, "dog=*dor", labradorJenny, labradorTim); - assertSearch(dateRange, "dog=lab*dor", labradorJenny, labradorTim); - assertSearch(dateRange, "dog=*lab*dor*", labradorJenny, labradorTim); + // wildcards + assertSearch(dateRange, "bird=*", eagleTim, pigeonJennifer, flamingoJennifer); + assertSearch(dateRange, "name=Jen*", pigeonJennifer, flamingoJennifer, labradorJenny); + assertSearch(dateRange, "dog=*dor", labradorJenny, labradorTim); + assertSearch(dateRange, "dog=lab*dor", labradorJenny, labradorTim); + assertSearch(dateRange, "dog=*lab*dor*", labradorJenny, labradorTim); - // 'in' queries - assertSearch(dateRange, "bird=(eagle, pigeon, flamingo)", eagleTim, pigeonJennifer, flamingoJennifer); - assertSearch(dateRange, "dog = (labrador) and name =Tim,Jennifer", labradorTim); - assertSearch(dateRange, "name =Jenn*", pigeonJennifer, flamingoJennifer, labradorJenny); - assertSearch(dateRange, "name = (*) and dog=labrador", labradorJenny, labradorTim); - assertSearch(dateRange, "name =XYZ, * and dog=labrador", labradorJenny, labradorTim); + // 'in' queries + assertSearch(dateRange, "bird=(eagle, pigeon, flamingo)", eagleTim, pigeonJennifer, flamingoJennifer); + assertSearch(dateRange, "dog = (labrador) and name =Tim,Jennifer", labradorTim); + assertSearch(dateRange, "name =Jenn*", pigeonJennifer, flamingoJennifer, labradorJenny); + assertSearch(dateRange, "name = (*) and dog=labrador", labradorJenny, labradorTim); + assertSearch(dateRange, "name =XYZ, * and dog=labrador", labradorJenny, labradorTim); - } + } - public void testGetByTags() throws IOException { + public void testGetByTags() throws IOException { - dataStore = new DataStore(dataDirectory); - tagsToBlockStorageRootBlockNumber = new LinkedHashMap<>(); - final Tags pigeonJennifer = Tags.createAndAddToDictionary("bird", "pigeon", "name", "Jennifer"); - final Tags flamingoJennifer = Tags.createAndAddToDictionary("bird", "flamingo", "name", "Jennifer"); + dataStore = new DataStore(dataDirectory); + tagsToBlockStorageRootBlockNumber = new LinkedHashMap<>(); + final Tags pigeonJennifer = Tags.createAndAddToDictionary("bird", "pigeon", "name", "Jennifer"); + final Tags flamingoJennifer = Tags.createAndAddToDictionary("bird", "flamingo", "name", "Jennifer"); - final ParititionId partitionId = new ParititionId("partitionA"); - tagsToBlockStorageRootBlockNumber.put(pigeonJennifer, dataStore.createNewFile(partitionId, pigeonJennifer)); - tagsToBlockStorageRootBlockNumber.put(flamingoJennifer, dataStore.createNewFile(partitionId, flamingoJennifer)); + final ParititionId partitionId = new ParititionId("partitionA"); + tagsToBlockStorageRootBlockNumber.put(pigeonJennifer, dataStore.createNewFile(partitionId, pigeonJennifer)); + tagsToBlockStorageRootBlockNumber.put(flamingoJennifer, dataStore.createNewFile(partitionId, flamingoJennifer)); - final Optional docsFlamingoJennifer = dataStore.getByTags(partitionId, flamingoJennifer); - Assert.assertTrue(docsFlamingoJennifer.isPresent(), "doc for docsFlamingoJennifer"); - } + final Optional docsFlamingoJennifer = dataStore.getByTags(partitionId, flamingoJennifer); + Assert.assertTrue(docsFlamingoJennifer.isPresent(), "doc for docsFlamingoJennifer"); + } - public void testBlockAlignment() throws IOException { + public void testBlockAlignment() throws IOException { - dataStore = new DataStore(dataDirectory); - final Tags eagleTim = Tags.createAndAddToDictionary("bird", "eagle", "name", "Tim"); - final long eagleTimBlockOffset = dataStore.createNewFile(new ParititionId("partitionA"), eagleTim); - Assert.assertEquals(eagleTimBlockOffset % BSFile.BLOCK_SIZE, 0); - } + dataStore = new DataStore(dataDirectory); + final Tags eagleTim = Tags.createAndAddToDictionary("bird", "eagle", "name", "Tim"); + final long eagleTimBlockOffset = dataStore.createNewFile(new ParititionId("partitionA"), eagleTim); + Assert.assertEquals(eagleTimBlockOffset % BSFile.BLOCK_SIZE, 0); + } - @DataProvider(name = "providerProposals") - public Iterator providerProposals() { + @DataProvider(name = "providerProposals") + public Iterator providerProposals() { - final List result = new ArrayList<>(); + final List result = new ArrayList<>(); - result.add(new Object[] { "type=bird and subtype=eagle and name=|", "name", Arrays.asList("Tim") }); + result.add(new Object[] { "type=bird and subtype=eagle and name=|", "name", Arrays.asList("Tim") }); - // returns Tim, because it is the only dog's name starting with 'Ti' - result.add(new Object[] { "!name=Ti| and type=dog", "name", Arrays.asList("Tim") }); + // returns Tim, because it is the only dog's name starting with 'Ti' + result.add(new Object[] { "!name=Ti| and type=dog", "name", Arrays.asList("Tim") }); - // all cats - result.add(new Object[] { "type=cat and !name=|", "name", - Arrays.asList("Jane", "John", "Paul", "Sam", "Timothy") }); + // all cats + result.add(new Object[] { "type=cat and !name=|", "name", + Arrays.asList("Jane", "John", "Paul", "Sam", "Timothy") }); - // finds nothing, because there are not dogs names neither Jenny, nor Ti* - result.add(new Object[] { "!name=Ti| and type=dog and !name=Jenny", "name", Arrays.asList() }); + // finds nothing, because there are not dogs names neither Jenny, nor Ti* + result.add(new Object[] { "!name=Ti| and type=dog and !name=Jenny", "name", Arrays.asList() }); - result.add(new Object[] { "(type=bird and age=three or type=dog and age=three) and name=|", "name", - Arrays.asList("Jenny", "Tim") }); + result.add(new Object[] { "(type=bird and age=three or type=dog and age=three) and name=|", "name", + Arrays.asList("Jenny", "Tim") }); - // all but Jennifer - result.add(new Object[] { "!(type=bird) and name=|", "name", - Arrays.asList("Jane", "Jenny", "John", "Paul", "Sam", "Tim", "Timothy") }); + // all but Jennifer + result.add(new Object[] { "!(type=bird) and name=|", "name", + Arrays.asList("Jane", "Jenny", "John", "Paul", "Sam", "Tim", "Timothy") }); - result.add(new Object[] { "type=bird and !subtype=eagle and name=|", "name", Arrays.asList("Jennifer") }); + result.add(new Object[] { "type=bird and !subtype=eagle and name=|", "name", Arrays.asList("Jennifer") }); - // DeMorgan - // TODO should only match "Jenny", because Jenny is the only non-bird name - // starting with 'Jen' - result.add(new Object[] { "!(type=bird and name=Jen|)", "name", Arrays.asList("Jennifer", "Jenny") }); + // DeMorgan + // TODO should only match "Jenny", because Jenny is the only non-bird name + // starting with 'Jen' + result.add(new Object[] { "!(type=bird and name=Jen|)", "name", Arrays.asList("Jennifer", "Jenny") }); - result.add(new Object[] { "!(type=dog and name=|) and !type=cat", "name", - Arrays.asList("Jennifer", "Jenny", "Tim") }); + result.add(new Object[] { "!(type=dog and name=|) and !type=cat", "name", + Arrays.asList("Jennifer", "Jenny", "Tim") }); - // not existing field - result.add(new Object[] { "name=| and XYZ=Tim", "name", Arrays.asList() }); + // not existing field + result.add(new Object[] { "name=| and XYZ=Tim", "name", Arrays.asList() }); - // not existing value - result.add(new Object[] { "name=| and type=XYZ", "name", Arrays.asList() }); + // not existing value + result.add(new Object[] { "name=| and type=XYZ", "name", Arrays.asList() }); - return result.iterator(); - } + return result.iterator(); + } - @Test(dataProvider = "providerProposals") - public void testProposals(final String queryWithCaret, final String field, - final List expectedProposedValues) throws Exception { + @Test(dataProvider = "providerProposals") + public void testProposals(final String queryWithCaret, final String field, + final List expectedProposedValues) throws Exception { - dataStore = new DataStore(dataDirectory); - final ParititionId partitionId = DateIndexExtension.now(); - final DateTimeRange dateRange = DateTimeRange.relativeHours(1); + dataStore = new DataStore(dataDirectory); + final ParititionId partitionId = DateIndexExtension.now(); + final DateTimeRange dateRange = DateTimeRange.relativeHours(1); - final List tags = Arrays.asList( - Tags.createAndAddToDictionary("type", "bird", "subtype", "eagle", "age", "three", "name", "Tim"), - Tags.createAndAddToDictionary("type", "bird", "subtype", "pigeon", "age", "two", "name", "Jennifer"), - Tags.createAndAddToDictionary("type", "bird", "subtype", "flamingo", "age", "one", "name", "Jennifer"), + final List tags = Arrays.asList( + Tags.createAndAddToDictionary("type", "bird", "subtype", "eagle", "age", "three", "name", "Tim"), + Tags.createAndAddToDictionary("type", "bird", "subtype", "pigeon", "age", "two", "name", "Jennifer"), + Tags.createAndAddToDictionary("type", "bird", "subtype", "flamingo", "age", "one", "name", "Jennifer"), - Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", "Jenny"), - Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", "Tim"), + Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", "Jenny"), + Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", "Tim"), - Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "one", "name", "Timothy"), - Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "two", "name", "Paul"), - Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "three", "name", "Jane"), - Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "Sam"), - Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "John")); + Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "one", "name", "Timothy"), + Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "two", "name", "Paul"), + Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "three", "name", "Jane"), + Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "Sam"), + Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "John")); - tags.forEach(t -> dataStore.createNewFile(partitionId, t)); + tags.forEach(t -> dataStore.createNewFile(partitionId, t)); - assertProposals(dateRange, queryWithCaret, field, expectedProposedValues); - } + assertProposals(dateRange, queryWithCaret, field, expectedProposedValues); + } - public void testIdenticalDatesGoIntoSameFile() throws Exception { + public void testIdenticalDatesGoIntoSameFile() throws Exception { - try (final DataStore dataStore = new DataStore(dataDirectory)) { + try (final DataStore dataStore = new DataStore(dataDirectory)) { - final long timestamp = DateUtils.getDate(2016, 1, 1, 13, 1, 1).toInstant().toEpochMilli(); + final long timestamp = DateUtils.getDate(2016, 1, 1, 13, 1, 1).toInstant().toEpochMilli(); - final Tags tags = Tags.createAndAddToDictionary("myKey", "myValue"); + final Tags tags = Tags.createAndAddToDictionary("myKey", "myValue"); - dataStore.write(timestamp, tags, 1); - dataStore.write(timestamp, tags, 2); + dataStore.write(timestamp, tags, 1); + dataStore.write(timestamp, tags, 2); - Assert.assertEquals(dataStore.sizeWriterCache(), 1, "size of the writer cache"); - } - } + Assert.assertEquals(dataStore.sizeWriterCache(), 1, "size of the writer cache"); + } + } - public static void main(final String[] args) throws IOException, InterruptedException { - final Path dir = Files.createTempDirectory("pdb"); - try (final DataStore dataStore = new DataStore(dir)) { + public static void main(final String[] args) throws IOException, InterruptedException { + final Path dir = Files.createTempDirectory("pdb"); + try (final DataStore dataStore = new DataStore(dir)) { - final List tags = Arrays.asList( - Tags.createAndAddToDictionary("type", "bird", "subtype", "eagle", "age", "three", "name", "Tim"), - Tags.createAndAddToDictionary("type", "bird", "subtype", "pigeon", "age", "two", "name", - "Jennifer"), - Tags.createAndAddToDictionary("type", "bird", "subtype", "flamingo", "age", "one", "name", - "Jennifer"), + final List tags = Arrays.asList( + Tags.createAndAddToDictionary("type", "bird", "subtype", "eagle", "age", "three", "name", "Tim"), + Tags.createAndAddToDictionary("type", "bird", "subtype", "pigeon", "age", "two", "name", + "Jennifer"), + Tags.createAndAddToDictionary("type", "bird", "subtype", "flamingo", "age", "one", "name", + "Jennifer"), - Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", - "Jenny"), - Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", "Tim"), + Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", + "Jenny"), + Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", "Tim"), - Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "one", "name", "Timothy"), - Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "two", "name", "Paul"), - Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "three", "name", "Jane"), - Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "Sam"), - Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "John")); + Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "one", "name", "Timothy"), + Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "two", "name", "Paul"), + Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "three", "name", "Jane"), + Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "Sam"), + Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "John")); - final DateTimeRange dateRange = DateTimeRange.relativeMillis(0); - final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0); - tags.forEach(t -> dataStore.createNewFile(partitionId, t)); + final DateTimeRange dateRange = DateTimeRange.relativeMillis(0); + final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0); + tags.forEach(t -> dataStore.createNewFile(partitionId, t)); - final JFrame frame = new JFrame(); - final JTextField input = new JTextField(); - final JTextArea output = new JTextArea(); - final JTextArea info = new JTextArea(); + final JFrame frame = new JFrame(); + final JTextField input = new JTextField(); + final JTextArea output = new JTextArea(); + final JTextArea info = new JTextArea(); - frame.add(input, BorderLayout.NORTH); - frame.add(output, BorderLayout.CENTER); - frame.add(info, BorderLayout.SOUTH); + frame.add(input, BorderLayout.NORTH); + frame.add(output, BorderLayout.CENTER); + frame.add(info, BorderLayout.SOUTH); - input.setText("type=bird and !subtype=eagle and name="); + input.setText("type=bird and !subtype=eagle and name="); - input.addKeyListener(new KeyAdapter() { + input.addKeyListener(new KeyAdapter() { - @Override - public void keyReleased(final KeyEvent e) { + @Override + public void keyReleased(final KeyEvent e) { - final String query = input.getText(); - final int caretIndex = input.getCaretPosition(); - final QueryWithCaretMarker q = new QueryWithCaretMarker(query, dateRange, caretIndex, - ResultMode.CUT_AT_DOT); + final String query = input.getText(); + final int caretIndex = input.getCaretPosition(); + final QueryWithCaretMarker q = new QueryWithCaretMarker(query, dateRange, caretIndex, + ResultMode.CUT_AT_DOT); - final List proposals = dataStore.propose(q); + final List proposals = dataStore.propose(q); - final StringBuilder out = new StringBuilder(); + final StringBuilder out = new StringBuilder(); - for (final Proposal proposal : proposals) { - out.append(proposal.getProposedTag()); - out.append(" "); - out.append(proposal.getProposedQuery()); - out.append("\n"); - } + for (final Proposal proposal : proposals) { + out.append(proposal.getProposedTag()); + out.append(" "); + out.append(proposal.getProposedQuery()); + out.append("\n"); + } - final String queryWithCaretMarker = new StringBuilder(query).insert(caretIndex, "|").toString(); + final String queryWithCaretMarker = new StringBuilder(query).insert(caretIndex, "|").toString(); - out.append("\n"); - out.append("\n"); - out.append("input: " + queryWithCaretMarker); + out.append("\n"); + out.append("\n"); + out.append("input: " + queryWithCaretMarker); - output.setText(out.toString()); + output.setText(out.toString()); - } - }); - final List docs = dataStore.search(Query.createQuery("", DateTimeRange.relative(1, ChronoUnit.DAYS))); - final StringBuilder out = new StringBuilder(); - out.append("info\n"); - for (final Doc doc : docs) { - out.append(doc.getTags()); - out.append("\n"); - } - info.setText(out.toString()); + } + }); + final List docs = dataStore.search(Query.createQuery("", DateTimeRange.relative(1, ChronoUnit.DAYS))); + final StringBuilder out = new StringBuilder(); + out.append("info\n"); + for (final Doc doc : docs) { + out.append(doc.getTags()); + out.append("\n"); + } + info.setText(out.toString()); - frame.setSize(800, 600); - frame.setVisible(true); - TimeUnit.HOURS.sleep(1000); - } - } + frame.setSize(800, 600); + frame.setVisible(true); + TimeUnit.HOURS.sleep(1000); + } + } - private void assertProposals(final DateTimeRange dateRange, final String queryWithCaret, final String field, - final List expectedProposedValues) { - final String query = queryWithCaret.replace("|", ""); - final int caretIndex = queryWithCaret.indexOf("|"); - final List proposals = dataStore - .propose(new QueryWithCaretMarker(query, dateRange, caretIndex, ResultMode.CUT_AT_DOT)); - System.out.println( - "proposed values: " + proposals.stream().map(Proposal::getProposedTag).collect(Collectors.toList())); - - proposals.forEach(p -> assertQueryFindsResults(dateRange, p.getNewQuery())); - - final List proposedValues = CollectionUtils.map(proposals, Proposal::getProposedTag); - Collections.sort(proposedValues); - Collections.sort(expectedProposedValues); - Assert.assertEquals(proposedValues.toString(), expectedProposedValues.toString(), "proposed values:"); - } - - private void assertQueryFindsResults(final DateTimeRange dateRange, final String query) { - final List result = dataStore.search(new Query(query, dateRange)); - Assert.assertFalse(result.isEmpty(), "The query '" + query + "' must return a result, but didn't."); - } - - private void assertSearch(final DateTimeRange dateRange, final String queryString, final Tags... tags) { - final Query query = new Query(queryString, dateRange); - final List actualDocs = dataStore.search(query); - final List actual = CollectionUtils.map(actualDocs, Doc::getRootBlockNumber); - - final List expectedPaths = CollectionUtils.map(tags, tagsToBlockStorageRootBlockNumber::get); - - Assert.assertEquals(actual, expectedPaths, "Query: " + queryString + " Found: " + actual); - } + private void assertProposals(final DateTimeRange dateRange, final String queryWithCaret, final String field, + final List expectedProposedValues) { + final String query = queryWithCaret.replace("|", ""); + final int caretIndex = queryWithCaret.indexOf("|"); + final List proposals = dataStore + .propose(new QueryWithCaretMarker(query, dateRange, caretIndex, ResultMode.CUT_AT_DOT)); + System.out.println( + "proposed values: " + proposals.stream().map(Proposal::getProposedTag).collect(Collectors.toList())); + + proposals.forEach(p -> assertQueryFindsResults(dateRange, p.getNewQuery())); + + final List proposedValues = CollectionUtils.map(proposals, Proposal::getProposedTag); + Collections.sort(proposedValues); + Collections.sort(expectedProposedValues); + Assert.assertEquals(proposedValues.toString(), expectedProposedValues.toString(), "proposed values:"); + } + + private void assertQueryFindsResults(final DateTimeRange dateRange, final String query) { + final List result = dataStore.search(new Query(query, dateRange)); + Assert.assertFalse(result.isEmpty(), "The query '" + query + "' must return a result, but didn't."); + } + + private void assertSearch(final DateTimeRange dateRange, final String queryString, final Tags... tags) { + final Query query = new Query(queryString, dateRange); + final List actualDocs = dataStore.search(query); + final List actual = CollectionUtils.map(actualDocs, Doc::getRootBlockNumber); + + final List expectedPaths = CollectionUtils.map(tags, tagsToBlockStorageRootBlockNumber::get); + + Assert.assertEquals(actual, expectedPaths, "Query: " + queryString + " Found: " + actual); + } } diff --git a/data-store/src/test/java/org/lucares/pdb/datastore/internal/DateIndexExtensionTest.java b/data-store/src/test/java/org/lucares/pdb/datastore/internal/DateIndexExtensionTest.java index 25dc704..14bc0ed 100644 --- a/data-store/src/test/java/org/lucares/pdb/datastore/internal/DateIndexExtensionTest.java +++ b/data-store/src/test/java/org/lucares/pdb/datastore/internal/DateIndexExtensionTest.java @@ -16,129 +16,129 @@ import org.testng.annotations.Test; @Test public class DateIndexExtensionTest { - @DataProvider - public Object[][] provider() { + @DataProvider + public Object[][] provider() { - final List result = new ArrayList<>(); + final List result = new ArrayList<>(); - { - final OffsetDateTime start = OffsetDateTime.of(2018, 1, 31, 0, 0, 0, 0, ZoneOffset.UTC); - final OffsetDateTime end = OffsetDateTime.of(2018, 1, 31, 0, 0, 0, 0, ZoneOffset.UTC); - final Set expected = Set.of("201801"); - result.add(new Object[] { start, end, expected }); - } - { - final OffsetDateTime start = OffsetDateTime.of(2017, 11, 1, 0, 0, 0, 0, ZoneOffset.UTC); - final OffsetDateTime end = OffsetDateTime.of(2018, 02, 1, 0, 0, 0, 0, ZoneOffset.UTC); - final Set expected = Set.of("201711", "201712", "201801", "201802"); - result.add(new Object[] { start, end, expected }); - } - { - // check that adding one month to Jan 31 does not skip the February - final OffsetDateTime start = OffsetDateTime.of(2018, 1, 31, 0, 0, 0, 0, ZoneOffset.UTC); - final OffsetDateTime end = OffsetDateTime.of(2018, 3, 31, 0, 0, 0, 0, ZoneOffset.UTC); - final Set expected = Set.of("201801", "201802", "201803"); - result.add(new Object[] { start, end, expected }); - } + { + final OffsetDateTime start = OffsetDateTime.of(2018, 1, 31, 0, 0, 0, 0, ZoneOffset.UTC); + final OffsetDateTime end = OffsetDateTime.of(2018, 1, 31, 0, 0, 0, 0, ZoneOffset.UTC); + final Set expected = Set.of("201801"); + result.add(new Object[] { start, end, expected }); + } + { + final OffsetDateTime start = OffsetDateTime.of(2017, 11, 1, 0, 0, 0, 0, ZoneOffset.UTC); + final OffsetDateTime end = OffsetDateTime.of(2018, 02, 1, 0, 0, 0, 0, ZoneOffset.UTC); + final Set expected = Set.of("201711", "201712", "201801", "201802"); + result.add(new Object[] { start, end, expected }); + } + { + // check that adding one month to Jan 31 does not skip the February + final OffsetDateTime start = OffsetDateTime.of(2018, 1, 31, 0, 0, 0, 0, ZoneOffset.UTC); + final OffsetDateTime end = OffsetDateTime.of(2018, 3, 31, 0, 0, 0, 0, ZoneOffset.UTC); + final Set expected = Set.of("201801", "201802", "201803"); + result.add(new Object[] { start, end, expected }); + } - return result.toArray(new Object[0][]); - } + return result.toArray(new Object[0][]); + } - @Test(dataProvider = "provider") - public void test(final OffsetDateTime start, final OffsetDateTime end, final Set expected) { + @Test(dataProvider = "provider") + public void test(final OffsetDateTime start, final OffsetDateTime end, final Set expected) { - final DateTimeRange dateRange = new DateTimeRange(start, end); + final DateTimeRange dateRange = new DateTimeRange(start, end); - final Set actual = DateIndexExtension.toDateIndexPrefix(dateRange); + final Set actual = DateIndexExtension.toDateIndexPrefix(dateRange); - Assert.assertEquals(actual, expected); - } + Assert.assertEquals(actual, expected); + } - public void testDateToDateIndexPrefix() { + public void testDateToDateIndexPrefix() { - final long mid_201711 = OffsetDateTime.of(2017, 11, 23, 2, 2, 2, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); - final long mid_201712 = OffsetDateTime.of(2017, 12, 7, 1, 1, 1, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); - final long min_201801 = OffsetDateTime.of(2018, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); - final long max_201801 = OffsetDateTime.of(2018, 1, 31, 23, 59, 59, 999_999_999, ZoneOffset.UTC).toInstant() - .toEpochMilli(); + final long mid_201711 = OffsetDateTime.of(2017, 11, 23, 2, 2, 2, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); + final long mid_201712 = OffsetDateTime.of(2017, 12, 7, 1, 1, 1, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); + final long min_201801 = OffsetDateTime.of(2018, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); + final long max_201801 = OffsetDateTime.of(2018, 1, 31, 23, 59, 59, 999_999_999, ZoneOffset.UTC).toInstant() + .toEpochMilli(); - Assert.assertEquals(DateIndexExtension.toDateIndexPrefix(mid_201712), "201712"); - Assert.assertEquals(DateIndexExtension.toDateIndexPrefix(min_201801), "201801"); - Assert.assertEquals(DateIndexExtension.toDateIndexPrefix(max_201801), "201801"); - Assert.assertEquals(DateIndexExtension.toDateIndexPrefix(mid_201711), "201711"); - } + Assert.assertEquals(DateIndexExtension.toDateIndexPrefix(mid_201712), "201712"); + Assert.assertEquals(DateIndexExtension.toDateIndexPrefix(min_201801), "201801"); + Assert.assertEquals(DateIndexExtension.toDateIndexPrefix(max_201801), "201801"); + Assert.assertEquals(DateIndexExtension.toDateIndexPrefix(mid_201711), "201711"); + } - public void testDateRanges() { - final OffsetDateTime mid_201712 = OffsetDateTime.of(2017, 12, 7, 1, 1, 1, 0, ZoneOffset.UTC) - .withOffsetSameInstant(ZoneOffset.ofHours(-2)); - final OffsetDateTime min_201801 = OffsetDateTime.of(2018, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC) - .withOffsetSameInstant(ZoneOffset.ofHours(-8)); - final OffsetDateTime min_201802 = OffsetDateTime.of(2018, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC) - .withOffsetSameInstant(ZoneOffset.ofHours(12)); + public void testDateRanges() { + final OffsetDateTime mid_201712 = OffsetDateTime.of(2017, 12, 7, 1, 1, 1, 0, ZoneOffset.UTC) + .withOffsetSameInstant(ZoneOffset.ofHours(-2)); + final OffsetDateTime min_201801 = OffsetDateTime.of(2018, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC) + .withOffsetSameInstant(ZoneOffset.ofHours(-8)); + final OffsetDateTime min_201802 = OffsetDateTime.of(2018, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC) + .withOffsetSameInstant(ZoneOffset.ofHours(12)); - final DateTimeRange range_201712_201802 = new DateTimeRange(mid_201712, min_201802); - final DateTimeRange range_201712_201801 = new DateTimeRange(mid_201712, min_201801); - final DateTimeRange range_201712_201712 = new DateTimeRange(mid_201712, mid_201712); + final DateTimeRange range_201712_201802 = new DateTimeRange(mid_201712, min_201802); + final DateTimeRange range_201712_201801 = new DateTimeRange(mid_201712, min_201801); + final DateTimeRange range_201712_201712 = new DateTimeRange(mid_201712, mid_201712); - final List dateIndexPrefixesWithEmptyCache = DateIndexExtension - .toPartitionIds(range_201712_201802); - Assert.assertEquals(dateIndexPrefixesWithEmptyCache, - Arrays.asList(new ParititionId("201712"), new ParititionId("201801"), new ParititionId("201802"))); + final List dateIndexPrefixesWithEmptyCache = DateIndexExtension + .toPartitionIds(range_201712_201802); + Assert.assertEquals(dateIndexPrefixesWithEmptyCache, + Arrays.asList(new ParititionId("201712"), new ParititionId("201801"), new ParititionId("201802"))); - final List dateIndexPrefixesWithFilledCache = DateIndexExtension - .toPartitionIds(range_201712_201801); - Assert.assertEquals(dateIndexPrefixesWithFilledCache, - Arrays.asList(new ParititionId("201712"), new ParititionId("201801"))); + final List dateIndexPrefixesWithFilledCache = DateIndexExtension + .toPartitionIds(range_201712_201801); + Assert.assertEquals(dateIndexPrefixesWithFilledCache, + Arrays.asList(new ParititionId("201712"), new ParititionId("201801"))); - final List dateIndexPrefixesOneMonth = DateIndexExtension.toPartitionIds(range_201712_201712); - Assert.assertEquals(dateIndexPrefixesOneMonth, Arrays.asList(new ParititionId("201712"))); - } + final List dateIndexPrefixesOneMonth = DateIndexExtension.toPartitionIds(range_201712_201712); + Assert.assertEquals(dateIndexPrefixesOneMonth, Arrays.asList(new ParititionId("201712"))); + } - public void testDateRangeToEpochMilli() { - final OffsetDateTime mid_201712 = OffsetDateTime.of(2017, 12, 7, 1, 1, 1, 0, ZoneOffset.ofHours(3)); - final OffsetDateTime min_201802 = OffsetDateTime.of(2018, 2, 15, 0, 0, 0, 0, ZoneOffset.ofHours(7)); + public void testDateRangeToEpochMilli() { + final OffsetDateTime mid_201712 = OffsetDateTime.of(2017, 12, 7, 1, 1, 1, 0, ZoneOffset.ofHours(3)); + final OffsetDateTime min_201802 = OffsetDateTime.of(2018, 2, 15, 0, 0, 0, 0, ZoneOffset.ofHours(7)); - final long exp_201712 = OffsetDateTime.of(2017, 12, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); - final long exp_201801 = OffsetDateTime.of(2018, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); - final long exp_201802 = OffsetDateTime.of(2018, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); + final long exp_201712 = OffsetDateTime.of(2017, 12, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); + final long exp_201801 = OffsetDateTime.of(2018, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); + final long exp_201802 = OffsetDateTime.of(2018, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); - final List dateIndexEpochMillis = DateIndexExtension - .toDateIndexEpochMillis(new DateTimeRange(mid_201712, min_201802)); - Assert.assertEquals(dateIndexEpochMillis, Arrays.asList(exp_201712, exp_201801, exp_201802)); - } + final List dateIndexEpochMillis = DateIndexExtension + .toDateIndexEpochMillis(new DateTimeRange(mid_201712, min_201802)); + Assert.assertEquals(dateIndexEpochMillis, Arrays.asList(exp_201712, exp_201801, exp_201802)); + } - public void testPerformance() { + public void testPerformance() { - final long min = OffsetDateTime.of(2010, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); - final long mid = OffsetDateTime.of(2020, 6, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); - final long max = OffsetDateTime.of(2030, 12, 31, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); + final long min = OffsetDateTime.of(2010, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); + final long mid = OffsetDateTime.of(2020, 6, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); + final long max = OffsetDateTime.of(2030, 12, 31, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli(); - final int iterations = 1_000_000; - final int factor = 1; - final int warmup = 20 * factor; - final int rounds = warmup + 20; + final int iterations = 1_000_000; + final int factor = 1; + final int warmup = 20 * factor; + final int rounds = warmup + 20; - // fill the cache - DateIndexExtension.DATE_PREFIX_CACHE.clear(); - for (long i = min; i < max; i += 3600 * 24 * 28) { - DateIndexExtension.toPartitionId(i); - } + // fill the cache + DateIndexExtension.DATE_PREFIX_CACHE.clear(); + for (long i = min; i < max; i += 3600 * 24 * 28) { + DateIndexExtension.toPartitionId(i); + } - final List measurements = new ArrayList<>(); + final List measurements = new ArrayList<>(); - for (int r = 0; r < rounds; r++) { + for (int r = 0; r < rounds; r++) { - final long start = System.nanoTime(); - for (int i = 0; i < iterations; i++) { - DateIndexExtension.toPartitionId(mid); - } - final double duration = (System.nanoTime() - start) / 1_000_000.0; - System.out.println("duration: " + duration + "ms"); - measurements.add(duration); - } + final long start = System.nanoTime(); + for (int i = 0; i < iterations; i++) { + DateIndexExtension.toPartitionId(mid); + } + final double duration = (System.nanoTime() - start) / 1_000_000.0; + System.out.println("duration: " + duration + "ms"); + measurements.add(duration); + } - final DoubleSummaryStatistics stats = measurements.subList(warmup, rounds).stream().mapToDouble(d -> factor * d) - .summaryStatistics(); - System.out.println(stats); - } + final DoubleSummaryStatistics stats = measurements.subList(warmup, rounds).stream().mapToDouble(d -> factor * d) + .summaryStatistics(); + System.out.println(stats); + } } diff --git a/data-store/src/test/java/org/lucares/pdb/datastore/internal/ProposerTest.java b/data-store/src/test/java/org/lucares/pdb/datastore/internal/ProposerTest.java index 7c30d9d..8d3f9d7 100644 --- a/data-store/src/test/java/org/lucares/pdb/datastore/internal/ProposerTest.java +++ b/data-store/src/test/java/org/lucares/pdb/datastore/internal/ProposerTest.java @@ -22,275 +22,276 @@ import org.testng.annotations.Test; @Test public class ProposerTest { - private Path dataDirectory; - private DataStore dataStore; - private DateTimeRange dateRange; + private Path dataDirectory; + private DataStore dataStore; + private DateTimeRange dateRange; - @BeforeClass - public void beforeClass() throws Exception { - dataDirectory = Files.createTempDirectory("pdb"); - initDatabase(); - } + @BeforeClass + public void beforeClass() throws Exception { + dataDirectory = Files.createTempDirectory("pdb"); + initDatabase(); + } - @AfterClass - public void afterClass() throws IOException { - FileUtils.delete(dataDirectory); - dataStore.close(); - dataStore = null; - Tags.STRING_COMPRESSOR = null; - } + @AfterClass + public void afterClass() throws IOException { + FileUtils.delete(dataDirectory); + dataStore.close(); + dataStore = null; + Tags.STRING_COMPRESSOR = null; + } - private void initDatabase() throws Exception { - dataStore = new DataStore(dataDirectory); - dateRange = DateTimeRange.now(); - final ParititionId now = DateIndexExtension.toPartitionIds(dateRange).get(0); + private void initDatabase() throws Exception { + dataStore = new DataStore(dataDirectory); + dateRange = DateTimeRange.now(); + final ParititionId now = DateIndexExtension.toPartitionIds(dateRange).get(0); - final Tags eagleTim = Tags.createAndAddToDictionary("bird", "eagle", "name", "Tim"); - final Tags eagleTimothy = Tags.createAndAddToDictionary("bird", "eagle", "name", "Timothy"); - final Tags pigeonJennifer = Tags.createAndAddToDictionary("bird", "pigeon", "name", "Jennifer"); - final Tags flamingoJennifer = Tags.createAndAddToDictionary("bird", "flamingo", "name", "Jennifer"); - final Tags labradorJenny = Tags.createAndAddToDictionary("dog", "labrador", "name", "Jenny"); - final Tags labradorTim = Tags.createAndAddToDictionary("dog", "labrador", "name", "Tim"); + final Tags eagleTim = Tags.createAndAddToDictionary("bird", "eagle", "name", "Tim"); + final Tags eagleTimothy = Tags.createAndAddToDictionary("bird", "eagle", "name", "Timothy"); + final Tags pigeonJennifer = Tags.createAndAddToDictionary("bird", "pigeon", "name", "Jennifer"); + final Tags flamingoJennifer = Tags.createAndAddToDictionary("bird", "flamingo", "name", "Jennifer"); + final Tags labradorJenny = Tags.createAndAddToDictionary("dog", "labrador", "name", "Jenny"); + final Tags labradorTim = Tags.createAndAddToDictionary("dog", "labrador", "name", "Tim"); - final Tags methodA = Tags.createAndAddToDictionary("method", "FooController.doImportantStuff", "source", "web"); - final Tags methodB = Tags.createAndAddToDictionary("method", "FooService.doImportantStuff", "source", - "service"); - final Tags methodC = Tags.createAndAddToDictionary("method", "BarController.doBoringStuff", "source", "web"); - final Tags methodD = Tags.createAndAddToDictionary("method", "FooBarService.doOtherStuff", "source", "service"); + final Tags methodA = Tags.createAndAddToDictionary("method", "FooController.doImportantStuff", "source", "web"); + final Tags methodB = Tags.createAndAddToDictionary("method", "FooService.doImportantStuff", "source", + "service"); + final Tags methodC = Tags.createAndAddToDictionary("method", "BarController.doBoringStuff", "source", "web"); + final Tags methodD = Tags.createAndAddToDictionary("method", "FooBarService.doOtherStuff", "source", "service"); - dataStore.createNewFile(now, eagleTim); - dataStore.createNewFile(now, eagleTimothy); - dataStore.createNewFile(now, pigeonJennifer); - dataStore.createNewFile(now, flamingoJennifer); - dataStore.createNewFile(now, labradorJenny); - dataStore.createNewFile(now, labradorTim); + dataStore.createNewFile(now, eagleTim); + dataStore.createNewFile(now, eagleTimothy); + dataStore.createNewFile(now, pigeonJennifer); + dataStore.createNewFile(now, flamingoJennifer); + dataStore.createNewFile(now, labradorJenny); + dataStore.createNewFile(now, labradorTim); - dataStore.createNewFile(now, methodA); - dataStore.createNewFile(now, methodB); - dataStore.createNewFile(now, methodC); - dataStore.createNewFile(now, methodD); - } + dataStore.createNewFile(now, methodA); + dataStore.createNewFile(now, methodB); + dataStore.createNewFile(now, methodC); + dataStore.createNewFile(now, methodD); + } - public void testEmptyQuery() throws Exception { + public void testEmptyQuery() throws Exception { - assertProposals("|", ResultMode.FULL_VALUES, // - new Proposal("name", "name=*", true, "name=", 5), // - new Proposal("bird", "bird=*", true, "bird=", 5), // - new Proposal("dog", "dog=*", true, "dog=", 4), // - new Proposal("method", "method=*", true, "method=", 7), // - new Proposal("source", "source=*", true, "source=", 7)// - ); + assertProposals("|", ResultMode.FULL_VALUES, // + new Proposal("name", "name=*", true, "name=", 5), // + new Proposal("bird", "bird=*", true, "bird=", 5), // + new Proposal("dog", "dog=*", true, "dog=", 4), // + new Proposal("method", "method=*", true, "method=", 7), // + new Proposal("source", "source=*", true, "source=", 7)// + ); - assertProposals(" |", ResultMode.FULL_VALUES, // - new Proposal("name", "name=*", true, "name=", 5), // - new Proposal("bird", "bird=*", true, "bird=", 5), // - new Proposal("dog", "dog=*", true, "dog=", 4), // - new Proposal("method", "method=*", true, "method=", 7), // - new Proposal("source", "source=*", true, "source=", 7)// - ); - } + assertProposals(" |", ResultMode.FULL_VALUES, // + new Proposal("name", "name=*", true, "name=", 5), // + new Proposal("bird", "bird=*", true, "bird=", 5), // + new Proposal("dog", "dog=*", true, "dog=", 4), // + new Proposal("method", "method=*", true, "method=", 7), // + new Proposal("source", "source=*", true, "source=", 7)// + ); + } - public void testPrefixOfKey() throws Exception { - assertProposals("bi|", ResultMode.FULL_VALUES, // - new Proposal("bird", "bird=* ", true, "bird=", 5) // - ); - assertProposals("bird|", ResultMode.FULL_VALUES, // - new Proposal("bird", "bird=* ", true, "bird=", 5) // - ); - assertProposals("bird=eagle and n|", ResultMode.FULL_VALUES, // - new Proposal("name", "bird=eagle and name=* ", true, "bird=eagle and name=", 20) // - ); - - assertProposals("|bird", ResultMode.FULL_VALUES, // - new Proposal("bird", "bird=* ", true, "bird=", 5), // - new Proposal("dog", "dog=* ", true, "dog=", 4), // - new Proposal("method", "method=* ", true, "method=", 7), // - new Proposal("name", "name=* ", true, "name=", 5), // - new Proposal("source", "source=* ", true, "source=", 7) // - ); - } + public void testPrefixOfKey() throws Exception { + assertProposals("bi|", ResultMode.FULL_VALUES, // + new Proposal("bird", "bird=* ", true, "bird=", 5) // + ); + assertProposals("bird|", ResultMode.FULL_VALUES, // + new Proposal("bird", "bird=* ", true, "bird=", 5) // + ); + assertProposals("bird=eagle and n|", ResultMode.FULL_VALUES, // + new Proposal("name", "bird=eagle and name=* ", true, "bird=eagle and name=", 20) // + ); - public void testPrefixOfValue() throws Exception { - assertProposals("name =Tim|", ResultMode.FULL_VALUES, // - new Proposal("Tim", "name =Tim", true, "name =Tim", 9), - new Proposal("Timothy", "name =Timothy", true, "name =Timothy", 13)); + assertProposals("|bird", ResultMode.FULL_VALUES, // + new Proposal("bird", "bird=* ", true, "bird=", 5), // + new Proposal("dog", "dog=* ", true, "dog=", 4), // + new Proposal("method", "method=* ", true, "method=", 7), // + new Proposal("name", "name=* ", true, "name=", 5), // + new Proposal("source", "source=* ", true, "source=", 7) // + ); + } - assertProposals("name =Je|", ResultMode.FULL_VALUES, // - new Proposal("Jennifer", "name =Jennifer", true, "name =Jennifer", 14), // - new Proposal("Jenny", "name =Jenny", true, "name =Jenny", 11) // - ); - assertProposals("name =Tim,Je|", ResultMode.FULL_VALUES, // - new Proposal("Jennifer", "name =Tim,Jennifer", true, "name =Tim,Jennifer", 18), // - new Proposal("Jenny", "name =Tim,Jenny", true, "name =Tim,Jenny", 15) // - ); - - // TODO this case is currently handled completely wrong - it is handled similar to an empty query + public void testPrefixOfValue() throws Exception { + assertProposals("name =Tim|", ResultMode.FULL_VALUES, // + new Proposal("Tim", "name =Tim", true, "name =Tim", 9), + new Proposal("Timothy", "name =Timothy", true, "name =Timothy", 13)); + + assertProposals("name =Je|", ResultMode.FULL_VALUES, // + new Proposal("Jennifer", "name =Jennifer", true, "name =Jennifer", 14), // + new Proposal("Jenny", "name =Jenny", true, "name =Jenny", 11) // + ); + assertProposals("name =Tim,Je|", ResultMode.FULL_VALUES, // + new Proposal("Jennifer", "name =Tim,Jennifer", true, "name =Tim,Jennifer", 18), // + new Proposal("Jenny", "name =Tim,Jenny", true, "name =Tim,Jenny", 15) // + ); + + // TODO this case is currently handled completely wrong - it is handled similar + // to an empty query // assertProposals("|bird=eagle and name=Tim", ResultMode.FULL_VALUES, // // new Proposal("Jennifer", "name =Tim,Jennifer", true, "name =Tim,Jennifer", 18), // // new Proposal("Jenny", "name =Tim,Jenny", true, "name =Tim,Jenny", 15) // // ); - - /* - */ - } - @Test(enabled = true) - public void testInExpressions() throws Exception { - assertProposals("name = (Timothy,|)", ResultMode.FULL_VALUES, // - new Proposal("Jennifer", "name = (Timothy,Jennifer)", true, "name = (Timothy,Jennifer)", 24), // - new Proposal("Jenny", "name = (Timothy,Jenny)", true, "name = (Timothy,Jenny)", 21), // - new Proposal("Tim", "name = (Timothy,Tim)", true, "name = (Timothy,Tim)", 19), // - new Proposal("Timothy", "name = (Timothy,Timothy)", true, "name = (Timothy,Timothy)", 23)// - ); + /* + */ + } - assertProposals("name = (Timothy, J|)", ResultMode.FULL_VALUES, // - new Proposal("Jennifer", "name = (Timothy, Jennifer)", true, "name = (Timothy, Jennifer)", 25), // - new Proposal("Jenny", "name = (Timothy, Jenny)", true, "name = (Timothy, Jenny)", 22)); + @Test(enabled = true) + public void testInExpressions() throws Exception { + assertProposals("name = (Timothy,|)", ResultMode.FULL_VALUES, // + new Proposal("Jennifer", "name = (Timothy,Jennifer)", true, "name = (Timothy,Jennifer)", 24), // + new Proposal("Jenny", "name = (Timothy,Jenny)", true, "name = (Timothy,Jenny)", 21), // + new Proposal("Tim", "name = (Timothy,Tim)", true, "name = (Timothy,Tim)", 19), // + new Proposal("Timothy", "name = (Timothy,Timothy)", true, "name = (Timothy,Timothy)", 23)// + ); - assertProposals("name = (Tim|)", ResultMode.FULL_VALUES, // - new Proposal("Tim", "name = (Tim)", true, "name = (Tim)", 11), - new Proposal("Timothy", "name = (Timothy)", true, "name = (Timothy)", 15)); + assertProposals("name = (Timothy, J|)", ResultMode.FULL_VALUES, // + new Proposal("Jennifer", "name = (Timothy, Jennifer)", true, "name = (Timothy, Jennifer)", 25), // + new Proposal("Jenny", "name = (Timothy, Jenny)", true, "name = (Timothy, Jenny)", 22)); - /* - */ - } + assertProposals("name = (Tim|)", ResultMode.FULL_VALUES, // + new Proposal("Tim", "name = (Tim)", true, "name = (Tim)", 11), + new Proposal("Timothy", "name = (Timothy)", true, "name = (Timothy)", 15)); - public void testProposalOnEmptyValuePrefix() throws Exception { - assertProposals("name=|", ResultMode.FULL_VALUES, // - new Proposal("Jennifer", "name=Jennifer", true, "name=Jennifer", 13), // - new Proposal("Jenny", "name=Jenny", true, "name=Jenny", 10), // - new Proposal("Tim", "name=Tim", true, "name=Tim", 8), // - new Proposal("Timothy", "name=Timothy", true, "name=Timothy", 12) // - ); + /* + */ + } - assertProposals("method=|", ResultMode.CUT_AT_DOT, // - new Proposal("FooController.", "method=FooController.", true, "method=FooController.", 21), // - new Proposal("FooService.", "method=FooService.", true, "method=FooService.", 18), // - new Proposal("BarController.", "method=BarController.", true, "method=BarController.", 21), // - new Proposal("FooBarService.", "method=FooBarService.", true, "method=FooBarService.", 21) // - ); - assertProposals("method=|", ResultMode.FULL_VALUES, // - new Proposal("FooController.doImportantStuff", "method=FooController.doImportantStuff", true, - "method=FooController.doImportantStuff", 37), // - new Proposal("FooService.doImportantStuff", "method=FooService.doImportantStuff", true, - "method=FooService.doImportantStuff", 34), // - new Proposal("FooBarService.doOtherStuff", "method=FooBarService.doOtherStuff", true, - "method=FooBarService.doOtherStuff", 33), // - new Proposal("BarController.doBoringStuff", "method=BarController.doBoringStuff", true, - "method=BarController.doBoringStuff", 34) // - ); - } + public void testProposalOnEmptyValuePrefix() throws Exception { + assertProposals("name=|", ResultMode.FULL_VALUES, // + new Proposal("Jennifer", "name=Jennifer", true, "name=Jennifer", 13), // + new Proposal("Jenny", "name=Jenny", true, "name=Jenny", 10), // + new Proposal("Tim", "name=Tim", true, "name=Tim", 8), // + new Proposal("Timothy", "name=Timothy", true, "name=Timothy", 12) // + ); - public void testProposalOnValueSmartExpression() throws Exception { - assertProposals("method=Foo.|", ResultMode.CUT_AT_DOT, // - new Proposal("FooController.doImportantStuff", "method=FooController.doImportantStuff", true, - "method=FooController.doImportantStuff", 37), // - new Proposal("FooService.doImportantStuff", "method=FooService.doImportantStuff", true, - "method=FooService.doImportantStuff", 34), // - new Proposal("FooBarService.doOtherStuff", "method=FooBarService.doOtherStuff", true, - "method=FooBarService.doOtherStuff", 33) // - ); + assertProposals("method=|", ResultMode.CUT_AT_DOT, // + new Proposal("FooController.", "method=FooController.", true, "method=FooController.", 21), // + new Proposal("FooService.", "method=FooService.", true, "method=FooService.", 18), // + new Proposal("BarController.", "method=BarController.", true, "method=BarController.", 21), // + new Proposal("FooBarService.", "method=FooBarService.", true, "method=FooBarService.", 21) // + ); + assertProposals("method=|", ResultMode.FULL_VALUES, // + new Proposal("FooController.doImportantStuff", "method=FooController.doImportantStuff", true, + "method=FooController.doImportantStuff", 37), // + new Proposal("FooService.doImportantStuff", "method=FooService.doImportantStuff", true, + "method=FooService.doImportantStuff", 34), // + new Proposal("FooBarService.doOtherStuff", "method=FooBarService.doOtherStuff", true, + "method=FooBarService.doOtherStuff", 33), // + new Proposal("BarController.doBoringStuff", "method=BarController.doBoringStuff", true, + "method=BarController.doBoringStuff", 34) // + ); + } - assertProposals("method=Foo.*Stuf|", ResultMode.CUT_AT_DOT, // - new Proposal("FooController.doImportantStuff", "method=FooController.doImportantStuff", true, - "method=FooController.doImportantStuff", 37), // - new Proposal("FooService.doImportantStuff", "method=FooService.doImportantStuff", true, - "method=FooService.doImportantStuff", 34), // - new Proposal("FooBarService.doOtherStuff", "method=FooBarService.doOtherStuff", true, - "method=FooBarService.doOtherStuff", 33) // - ); + public void testProposalOnValueSmartExpression() throws Exception { + assertProposals("method=Foo.|", ResultMode.CUT_AT_DOT, // + new Proposal("FooController.doImportantStuff", "method=FooController.doImportantStuff", true, + "method=FooController.doImportantStuff", 37), // + new Proposal("FooService.doImportantStuff", "method=FooService.doImportantStuff", true, + "method=FooService.doImportantStuff", 34), // + new Proposal("FooBarService.doOtherStuff", "method=FooBarService.doOtherStuff", true, + "method=FooBarService.doOtherStuff", 33) // + ); - // returns nothing, because GloblikePattern.globlikeToRegex() returns the - // following regex: ^[a-z]*Foo.*\.[a-z]*Stuf - // Maybe I will change that some day and allow upper case characters before - // "Stuff". - assertProposals("method=Foo.Stuf|", ResultMode.CUT_AT_DOT); + assertProposals("method=Foo.*Stuf|", ResultMode.CUT_AT_DOT, // + new Proposal("FooController.doImportantStuff", "method=FooController.doImportantStuff", true, + "method=FooController.doImportantStuff", 37), // + new Proposal("FooService.doImportantStuff", "method=FooService.doImportantStuff", true, + "method=FooService.doImportantStuff", 34), // + new Proposal("FooBarService.doOtherStuff", "method=FooBarService.doOtherStuff", true, + "method=FooBarService.doOtherStuff", 33) // + ); - assertProposals("method=Foo.Im", ResultMode.CUT_AT_DOT, 13, // - new Proposal("FooController.doImportantStuff", "method=FooController.doImportantStuff", true, - "method=FooController.doImportantStuff", 37), // - new Proposal("FooService.doImportantStuff", "method=FooService.doImportantStuff", true, - "method=FooService.doImportantStuff", 34) // - ); - } + // returns nothing, because GloblikePattern.globlikeToRegex() returns the + // following regex: ^[a-z]*Foo.*\.[a-z]*Stuf + // Maybe I will change that some day and allow upper case characters before + // "Stuff". + assertProposals("method=Foo.Stuf|", ResultMode.CUT_AT_DOT); - public void testProposalOnEmptyKeyPrefix() throws Exception { - assertProposals("name=* and |", ResultMode.FULL_VALUES, // - proposal("name", "name=* and name=* ", "name=* and name=|"), // - proposal("bird", "name=* and bird=* ", "name=* and bird=|"), // - proposal("dog", "name=* and dog=* ", "name=* and dog=|"), // - // TODO it is wrong to return those two, because there are no values with name - // and type|address, but I'll leave this for now, because this is a different - // issue - proposal("method", "name=* and method=* ", "name=* and method=|"), // - proposal("source", "name=* and source=* ", "name=* and source=|")// - ); - } + assertProposals("method=Foo.Im", ResultMode.CUT_AT_DOT, 13, // + new Proposal("FooController.doImportantStuff", "method=FooController.doImportantStuff", true, + "method=FooController.doImportantStuff", 37), // + new Proposal("FooService.doImportantStuff", "method=FooService.doImportantStuff", true, + "method=FooService.doImportantStuff", 34) // + ); + } - public void testProposalWithWildcards() throws Exception { + public void testProposalOnEmptyKeyPrefix() throws Exception { + assertProposals("name=* and |", ResultMode.FULL_VALUES, // + proposal("name", "name=* and name=* ", "name=* and name=|"), // + proposal("bird", "name=* and bird=* ", "name=* and bird=|"), // + proposal("dog", "name=* and dog=* ", "name=* and dog=|"), // + // TODO it is wrong to return those two, because there are no values with name + // and type|address, but I'll leave this for now, because this is a different + // issue + proposal("method", "name=* and method=* ", "name=* and method=|"), // + proposal("source", "name=* and source=* ", "name=* and source=|")// + ); + } - assertProposals("name=*im|", ResultMode.FULL_VALUES, // - proposal("Tim", "name=Tim", "name=Tim|"), // - proposal("Timothy", "name=Timothy", "name=Timothy|")// - ); + public void testProposalWithWildcards() throws Exception { - assertProposals("(method=FooService.doIS,FooController.*) and method=|", ResultMode.FULL_VALUES, // - proposal("FooService.doImportantStuff", - "(method=FooService.doIS,FooController.*) and method=FooService.doImportantStuff", - "(method=FooService.doIS,FooController.*) and method=FooService.doImportantStuff|"), // - proposal("FooController.doImportantStuff", - "(method=FooService.doIS,FooController.*) and method=FooController.doImportantStuff", - "(method=FooService.doIS,FooController.*) and method=FooController.doImportantStuff|")// - ); - } + assertProposals("name=*im|", ResultMode.FULL_VALUES, // + proposal("Tim", "name=Tim", "name=Tim|"), // + proposal("Timothy", "name=Timothy", "name=Timothy|")// + ); - public void testProposalWithAndExpression() throws Exception { - assertProposals("name=*im| and bird=eagle", ResultMode.FULL_VALUES, // - proposal("Tim", "name=Tim and bird=eagle", "name=Tim| and bird=eagle"), // - proposal("Timothy", "name=Timothy and bird=eagle", "name=Timothy| and bird=eagle")// - ); + assertProposals("(method=FooService.doIS,FooController.*) and method=|", ResultMode.FULL_VALUES, // + proposal("FooService.doImportantStuff", + "(method=FooService.doIS,FooController.*) and method=FooService.doImportantStuff", + "(method=FooService.doIS,FooController.*) and method=FooService.doImportantStuff|"), // + proposal("FooController.doImportantStuff", + "(method=FooService.doIS,FooController.*) and method=FooController.doImportantStuff", + "(method=FooService.doIS,FooController.*) and method=FooController.doImportantStuff|")// + ); + } - assertProposals("name=*im| and bird=eagle,pigeon", ResultMode.FULL_VALUES, // - proposal("Tim", "name=Tim and bird=eagle,pigeon", "name=Tim| and bird=eagle,pigeon"), // - proposal("Timothy", "name=Timothy and bird=eagle,pigeon", "name=Timothy| and bird=eagle,pigeon")// - ); - } + public void testProposalWithAndExpression() throws Exception { + assertProposals("name=*im| and bird=eagle", ResultMode.FULL_VALUES, // + proposal("Tim", "name=Tim and bird=eagle", "name=Tim| and bird=eagle"), // + proposal("Timothy", "name=Timothy and bird=eagle", "name=Timothy| and bird=eagle")// + ); - public void testProposalWithAndNotExpression() throws Exception { - assertProposals("name=Tim and ! dog=labrador and bird=|", ResultMode.FULL_VALUES, // - proposal("eagle", "name=Tim and ! dog=labrador and bird=eagle", - "name=Tim and ! dog=labrador and bird=eagle|") // - ); - assertProposals("name=Tim and not dog=labrador and bird=|", ResultMode.FULL_VALUES, // - proposal("eagle", "name=Tim and not dog=labrador and bird=eagle", - "name=Tim and not dog=labrador and bird=eagle|") // - ); - } + assertProposals("name=*im| and bird=eagle,pigeon", ResultMode.FULL_VALUES, // + proposal("Tim", "name=Tim and bird=eagle,pigeon", "name=Tim| and bird=eagle,pigeon"), // + proposal("Timothy", "name=Timothy and bird=eagle,pigeon", "name=Timothy| and bird=eagle,pigeon")// + ); + } - private Proposal proposal(final String proposedTag, final String proposedQuery, final String newQuery) { - final String newQueryWithoutCaretMarker = newQuery.replace("|", ""); - final int newCaretPosition = newQuery.indexOf('|'); - return new Proposal(proposedTag, proposedQuery, true, newQueryWithoutCaretMarker, newCaretPosition); - } + public void testProposalWithAndNotExpression() throws Exception { + assertProposals("name=Tim and ! dog=labrador and bird=|", ResultMode.FULL_VALUES, // + proposal("eagle", "name=Tim and ! dog=labrador and bird=eagle", + "name=Tim and ! dog=labrador and bird=eagle|") // + ); + assertProposals("name=Tim and not dog=labrador and bird=|", ResultMode.FULL_VALUES, // + proposal("eagle", "name=Tim and not dog=labrador and bird=eagle", + "name=Tim and not dog=labrador and bird=eagle|") // + ); + } - private void assertProposals(final String query, final ResultMode resultMode, final Proposal... expected) - throws InterruptedException { - final int caretIndex = query.indexOf("|"); - final String q = query.replace("|", ""); - assertProposals(q, resultMode, caretIndex, expected); - } + private Proposal proposal(final String proposedTag, final String proposedQuery, final String newQuery) { + final String newQueryWithoutCaretMarker = newQuery.replace("|", ""); + final int newCaretPosition = newQuery.indexOf('|'); + return new Proposal(proposedTag, proposedQuery, true, newQueryWithoutCaretMarker, newCaretPosition); + } - private void assertProposals(final String query, final ResultMode resultMode, final int caretIndex, - final Proposal... expected) throws InterruptedException { + private void assertProposals(final String query, final ResultMode resultMode, final Proposal... expected) + throws InterruptedException { + final int caretIndex = query.indexOf("|"); + final String q = query.replace("|", ""); + assertProposals(q, resultMode, caretIndex, expected); + } - final List actual = dataStore - .propose(new QueryWithCaretMarker(query, dateRange, caretIndex, resultMode)); - final List expectedList = Arrays.asList(expected); - Collections.sort(expectedList); + private void assertProposals(final String query, final ResultMode resultMode, final int caretIndex, + final Proposal... expected) throws InterruptedException { - System.out.println("\n\n--- " + query + " ---"); - System.out.println("actual : " + String.join("\n", CollectionUtils.map(actual, Proposal::toString))); - System.out.println("expected: " + String.join("\n", CollectionUtils.map(expectedList, Proposal::toString))); - Assert.assertEquals(actual, expectedList); - } + final List actual = dataStore + .propose(new QueryWithCaretMarker(query, dateRange, caretIndex, resultMode)); + final List expectedList = Arrays.asList(expected); + Collections.sort(expectedList); + + System.out.println("\n\n--- " + query + " ---"); + System.out.println("actual : " + String.join("\n", CollectionUtils.map(actual, Proposal::toString))); + System.out.println("expected: " + String.join("\n", CollectionUtils.map(expectedList, Proposal::toString))); + Assert.assertEquals(actual, expectedList); + } } diff --git a/data-store/src/test/java/org/lucares/pdb/datastore/internal/QueryCompletionIndexTest.java b/data-store/src/test/java/org/lucares/pdb/datastore/internal/QueryCompletionIndexTest.java index 8ddbfce..e5ae417 100644 --- a/data-store/src/test/java/org/lucares/pdb/datastore/internal/QueryCompletionIndexTest.java +++ b/data-store/src/test/java/org/lucares/pdb/datastore/internal/QueryCompletionIndexTest.java @@ -21,53 +21,53 @@ import org.testng.annotations.Test; @Test public class QueryCompletionIndexTest { - private Path dataDirectory; + private Path dataDirectory; - @BeforeMethod - public void beforeMethod() throws IOException { - dataDirectory = Files.createTempDirectory("pdb"); - } + @BeforeMethod + public void beforeMethod() throws IOException { + dataDirectory = Files.createTempDirectory("pdb"); + } - @AfterMethod - public void afterMethod() throws IOException { - FileUtils.delete(dataDirectory); - } + @AfterMethod + public void afterMethod() throws IOException { + FileUtils.delete(dataDirectory); + } - public void test() throws Exception { - Tags.STRING_COMPRESSOR = new StringCompressor(new UniqueStringIntegerPairs()); + public void test() throws Exception { + Tags.STRING_COMPRESSOR = new StringCompressor(new UniqueStringIntegerPairs()); - final List tags = Arrays.asList(// - Tags.createAndAddToDictionary("firstname", "John", "lastname", "Doe", "country", "Atlantis"), // A - Tags.createAndAddToDictionary("firstname", "Jane", "lastname", "Doe", "country", "ElDorado"), // B - Tags.createAndAddToDictionary("firstname", "John", "lastname", "Miller", "country", "Atlantis")// C - ); + final List tags = Arrays.asList(// + Tags.createAndAddToDictionary("firstname", "John", "lastname", "Doe", "country", "Atlantis"), // A + Tags.createAndAddToDictionary("firstname", "Jane", "lastname", "Doe", "country", "ElDorado"), // B + Tags.createAndAddToDictionary("firstname", "John", "lastname", "Miller", "country", "Atlantis")// C + ); - final DateTimeRange dateRange = DateTimeRange.relativeMillis(1); - final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0); + final DateTimeRange dateRange = DateTimeRange.relativeMillis(1); + final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0); - try (QueryCompletionIndex index = new QueryCompletionIndex(dataDirectory)) { - for (final Tags t : tags) { - index.addTags(partitionId, t); - } + try (QueryCompletionIndex index = new QueryCompletionIndex(dataDirectory)) { + for (final Tags t : tags) { + index.addTags(partitionId, t); + } - // all firstnames where lastname=Doe are returned sorted alphabetically. - // tags A and B match - final SortedSet firstnamesWithLastnameDoe = index.find(dateRange, new Tag("lastname", "Doe"), - "firstname"); - Assert.assertEquals(firstnamesWithLastnameDoe, Arrays.asList("Jane", "John")); + // all firstnames where lastname=Doe are returned sorted alphabetically. + // tags A and B match + final SortedSet firstnamesWithLastnameDoe = index.find(dateRange, new Tag("lastname", "Doe"), + "firstname"); + Assert.assertEquals(firstnamesWithLastnameDoe, Arrays.asList("Jane", "John")); - // no duplicates are returned: - // tags A and C match firstname=John, but both have country=Atlantis - final SortedSet countryWithFirstnameJohn = index.find(dateRange, new Tag("firstname", "John"), - "country"); - Assert.assertEquals(countryWithFirstnameJohn, Arrays.asList("Atlantis")); + // no duplicates are returned: + // tags A and C match firstname=John, but both have country=Atlantis + final SortedSet countryWithFirstnameJohn = index.find(dateRange, new Tag("firstname", "John"), + "country"); + Assert.assertEquals(countryWithFirstnameJohn, Arrays.asList("Atlantis")); - // findAllValuesForField sorts alphabetically - final SortedSet firstnames = index.findAllValuesForField(dateRange, "firstname"); - Assert.assertEquals(firstnames, Arrays.asList("Jane", "John"), "found: " + firstnames); + // findAllValuesForField sorts alphabetically + final SortedSet firstnames = index.findAllValuesForField(dateRange, "firstname"); + Assert.assertEquals(firstnames, Arrays.asList("Jane", "John"), "found: " + firstnames); - final SortedSet countries = index.findAllValuesForField(dateRange, "country"); - Assert.assertEquals(countries, Arrays.asList("Atlantis", "ElDorado")); - } - } + final SortedSet countries = index.findAllValuesForField(dateRange, "country"); + Assert.assertEquals(countries, Arrays.asList("Atlantis", "ElDorado")); + } + } } diff --git a/data-store/src/test/java/org/lucares/pdb/datastore/lang/CandidateGrouperTest.java b/data-store/src/test/java/org/lucares/pdb/datastore/lang/CandidateGrouperTest.java index 9f7b8cc..61b168b 100644 --- a/data-store/src/test/java/org/lucares/pdb/datastore/lang/CandidateGrouperTest.java +++ b/data-store/src/test/java/org/lucares/pdb/datastore/lang/CandidateGrouperTest.java @@ -12,54 +12,54 @@ import org.testng.annotations.Test; @Test public class CandidateGrouperTest { - @DataProvider - public Object[][] providerGroup() { - final List result = new ArrayList<>(); + @DataProvider + public Object[][] providerGroup() { + final List result = new ArrayList<>(); - result.add(new Object[] { // - Set.of("aa.xx.AA.XX", "aa.yy.BB", "aa.xx.BB", "aa.xx.AA.YY"), // - "name = |", // - Set.of("aa.") }); - result.add(new Object[] { // - Set.of("aa.xx.AA.XX", "aa.yy.BB", "aa.xx.BB", "aa.xx.AA.YY"), // - "name = a|", // - Set.of("aa.") }); - result.add(new Object[] { // - Set.of("aa.xx.AA.XX", "aa.yy.BB", "aa.xx.BB", "aa.xx.AA.YY"), // - "name = aa|", // - Set.of("aa.") }); - result.add(new Object[] { // - Set.of("aa.xx.AA.XX", "aa.yy.BB", "aa.xx.BB", "aa.xx.AA.YY"), // - "name = aa.|", // - Set.of("aa.xx.", "aa.yy.") }); - result.add(new Object[] { // - Set.of("aa.xx.AA.XX", "aa.xx.BB", "aa.xx.AA.YY"), // - "name = aa.x|", // - Set.of("aa.xx.") }); - result.add(new Object[] { // - Set.of("aa.xx.AA.XX", "aa.xx.BB", "aa.xx.AA.YY"), // - "name = aa.xx.|", // - Set.of("aa.xx.AA.", "aa.xx.BB") }); - result.add(new Object[] { // - Set.of("aa.xx.AA.XX", "aa.xx.AA.YY"), // - "name = aa.xx.AA.|", // - Set.of("aa.xx.AA.XX", "aa.xx.AA.YY") }); - result.add(new Object[] { // - Set.of("XX.YY.ZZ", "XX.YY"), // - "name = XX.Y|", // - Set.of("XX.YY.", "XX.YY") }); + result.add(new Object[] { // + Set.of("aa.xx.AA.XX", "aa.yy.BB", "aa.xx.BB", "aa.xx.AA.YY"), // + "name = |", // + Set.of("aa.") }); + result.add(new Object[] { // + Set.of("aa.xx.AA.XX", "aa.yy.BB", "aa.xx.BB", "aa.xx.AA.YY"), // + "name = a|", // + Set.of("aa.") }); + result.add(new Object[] { // + Set.of("aa.xx.AA.XX", "aa.yy.BB", "aa.xx.BB", "aa.xx.AA.YY"), // + "name = aa|", // + Set.of("aa.") }); + result.add(new Object[] { // + Set.of("aa.xx.AA.XX", "aa.yy.BB", "aa.xx.BB", "aa.xx.AA.YY"), // + "name = aa.|", // + Set.of("aa.xx.", "aa.yy.") }); + result.add(new Object[] { // + Set.of("aa.xx.AA.XX", "aa.xx.BB", "aa.xx.AA.YY"), // + "name = aa.x|", // + Set.of("aa.xx.") }); + result.add(new Object[] { // + Set.of("aa.xx.AA.XX", "aa.xx.BB", "aa.xx.AA.YY"), // + "name = aa.xx.|", // + Set.of("aa.xx.AA.", "aa.xx.BB") }); + result.add(new Object[] { // + Set.of("aa.xx.AA.XX", "aa.xx.AA.YY"), // + "name = aa.xx.AA.|", // + Set.of("aa.xx.AA.XX", "aa.xx.AA.YY") }); + result.add(new Object[] { // + Set.of("XX.YY.ZZ", "XX.YY"), // + "name = XX.Y|", // + Set.of("XX.YY.", "XX.YY") }); - return result.toArray(new Object[0][]); - } + return result.toArray(new Object[0][]); + } - @Test(dataProvider = "providerGroup") - public void testGroup(final Set values, final String queryWithCaretMarker, final Set expected) { - final CandidateGrouper grouper = new CandidateGrouper(); + @Test(dataProvider = "providerGroup") + public void testGroup(final Set values, final String queryWithCaretMarker, final Set expected) { + final CandidateGrouper grouper = new CandidateGrouper(); - final String query = queryWithCaretMarker.replace("|", NewProposerParser.CARET_MARKER); + final String query = queryWithCaretMarker.replace("|", NewProposerParser.CARET_MARKER); - final SortedSet actual = grouper.group(values, query); + final SortedSet actual = grouper.group(values, query); - Assert.assertEquals(actual, expected); - } + Assert.assertEquals(actual, expected); + } } diff --git a/file-utils/src/main/java/org/lucares/utils/file/FileUtils.java b/file-utils/src/main/java/org/lucares/utils/file/FileUtils.java index 3cb17b9..a73ca6e 100644 --- a/file-utils/src/main/java/org/lucares/utils/file/FileUtils.java +++ b/file-utils/src/main/java/org/lucares/utils/file/FileUtils.java @@ -15,55 +15,55 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class FileUtils { - private static final Logger LOGGER = LoggerFactory.getLogger(FileUtils.class); + private static final Logger LOGGER = LoggerFactory.getLogger(FileUtils.class); - private static final class RecursiveDeleter extends SimpleFileVisitor { + private static final class RecursiveDeleter extends SimpleFileVisitor { - @Override - public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException { + @Override + public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException { - Files.delete(file); - LOGGER.trace("deleted: {}", file); + Files.delete(file); + LOGGER.trace("deleted: {}", file); - return FileVisitResult.CONTINUE; - } + return FileVisitResult.CONTINUE; + } - @Override - public FileVisitResult postVisitDirectory(final Path dir, final IOException exc) throws IOException { + @Override + public FileVisitResult postVisitDirectory(final Path dir, final IOException exc) throws IOException { - Files.delete(dir); - LOGGER.trace("deleted: {}", dir); + Files.delete(dir); + LOGGER.trace("deleted: {}", dir); - return FileVisitResult.CONTINUE; - } - } + return FileVisitResult.CONTINUE; + } + } - public static void delete(final Path path) { + public static void delete(final Path path) { - final int maxAttempts = 10; - int attempt = 1; + final int maxAttempts = 10; + int attempt = 1; - while (attempt <= maxAttempts) { - try { - LOGGER.debug("deleting '{}' attempt {} of {}", path.toFile().getAbsolutePath(), attempt, maxAttempts); - Files.walkFileTree(path, new RecursiveDeleter()); - break; - } catch (final IOException e) { - final String msg = "failed to delete '" + path.toFile().getAbsolutePath() + "' on attempt " + attempt - + " of " + maxAttempts; - LOGGER.warn(msg, e); - } - attempt++; - } - } + while (attempt <= maxAttempts) { + try { + LOGGER.debug("deleting '{}' attempt {} of {}", path.toFile().getAbsolutePath(), attempt, maxAttempts); + Files.walkFileTree(path, new RecursiveDeleter()); + break; + } catch (final IOException e) { + final String msg = "failed to delete '" + path.toFile().getAbsolutePath() + "' on attempt " + attempt + + " of " + maxAttempts; + LOGGER.warn(msg, e); + } + attempt++; + } + } - public static List listRecursively(final Path start) throws IOException { + public static List listRecursively(final Path start) throws IOException { - final int maxDepth = Integer.MAX_VALUE; - final BiPredicate matcher = (path, attr) -> Files.isRegularFile(path); + final int maxDepth = Integer.MAX_VALUE; + final BiPredicate matcher = (path, attr) -> Files.isRegularFile(path); - try (final Stream files = Files.find(start, maxDepth, matcher)) { - return files.collect(Collectors.toList()); - } - } + try (final Stream files = Files.find(start, maxDepth, matcher)) { + return files.collect(Collectors.toList()); + } + } } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/DateTimeRange.java b/pdb-api/src/main/java/org/lucares/pdb/api/DateTimeRange.java index f68a4ad..40c3b12 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/DateTimeRange.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/DateTimeRange.java @@ -8,106 +8,106 @@ import java.time.temporal.TemporalUnit; public class DateTimeRange { - private static final DateTimeRange MAX = new DateTimeRange( - OffsetDateTime.of(1900, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC), - OffsetDateTime.of(2100, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)); + private static final DateTimeRange MAX = new DateTimeRange( + OffsetDateTime.of(1900, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC), + OffsetDateTime.of(2100, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)); - private final OffsetDateTime start; - private final OffsetDateTime end; + private final OffsetDateTime start; + private final OffsetDateTime end; - public DateTimeRange(final OffsetDateTime start, final OffsetDateTime end) { - this.start = start; - this.end = end; - } + public DateTimeRange(final OffsetDateTime start, final OffsetDateTime end) { + this.start = start; + this.end = end; + } - public static DateTimeRange max() { - return MAX; - } + public static DateTimeRange max() { + return MAX; + } - public static DateTimeRange now() { - return relativeMillis(0); - } + public static DateTimeRange now() { + return relativeMillis(0); + } - public static DateTimeRange relative(final long amount, final TemporalUnit unit) { - final OffsetDateTime now = OffsetDateTime.now(); - return new DateTimeRange(now.minus(amount, unit), now); - } + public static DateTimeRange relative(final long amount, final TemporalUnit unit) { + final OffsetDateTime now = OffsetDateTime.now(); + return new DateTimeRange(now.minus(amount, unit), now); + } - public static DateTimeRange relativeMillis(final long amount) { - return relative(amount, ChronoUnit.MILLIS); - } + public static DateTimeRange relativeMillis(final long amount) { + return relative(amount, ChronoUnit.MILLIS); + } - public static DateTimeRange relativeSeconds(final long amount) { - return relative(amount, ChronoUnit.SECONDS); - } + public static DateTimeRange relativeSeconds(final long amount) { + return relative(amount, ChronoUnit.SECONDS); + } - public static DateTimeRange relativeMinutes(final long amount) { - return relative(amount, ChronoUnit.MINUTES); - } + public static DateTimeRange relativeMinutes(final long amount) { + return relative(amount, ChronoUnit.MINUTES); + } - public static DateTimeRange relativeHours(final long amount) { - return relative(amount, ChronoUnit.HOURS); - } + public static DateTimeRange relativeHours(final long amount) { + return relative(amount, ChronoUnit.HOURS); + } - public static DateTimeRange relativeDays(final long amount) { - return relative(amount, ChronoUnit.DAYS); - } + public static DateTimeRange relativeDays(final long amount) { + return relative(amount, ChronoUnit.DAYS); + } - public static DateTimeRange relativeMonths(final long amount) { - return relative(amount, ChronoUnit.MONTHS); - } + public static DateTimeRange relativeMonths(final long amount) { + return relative(amount, ChronoUnit.MONTHS); + } - public static DateTimeRange relativeYears(final long amount) { - return relative(amount, ChronoUnit.YEARS); - } + public static DateTimeRange relativeYears(final long amount) { + return relative(amount, ChronoUnit.YEARS); + } - public OffsetDateTime getStart() { - return start; - } + public OffsetDateTime getStart() { + return start; + } - public long getStartEpochMilli() { - return start.toInstant().toEpochMilli(); - } + public long getStartEpochMilli() { + return start.toInstant().toEpochMilli(); + } - public OffsetDateTime getEnd() { - return end; - } + public OffsetDateTime getEnd() { + return end; + } - public long getEndEpochMilli() { - return end.toInstant().toEpochMilli(); - } + public long getEndEpochMilli() { + return end.toInstant().toEpochMilli(); + } - @Override - public String toString() { - return start + "-" + end; - } + @Override + public String toString() { + return start + "-" + end; + } - public static DateTimeRange ofDay(final OffsetDateTime day) { - final OffsetDateTime from = day.truncatedTo(ChronoUnit.DAYS); - final OffsetDateTime to = from.plusDays(1).minusNanos(1); + public static DateTimeRange ofDay(final OffsetDateTime day) { + final OffsetDateTime from = day.truncatedTo(ChronoUnit.DAYS); + final OffsetDateTime to = from.plusDays(1).minusNanos(1); - return new DateTimeRange(from, to); - } + return new DateTimeRange(from, to); + } - public Duration duration() { - return Duration.between(start, end); - } + public Duration duration() { + return Duration.between(start, end); + } - public boolean inRange(final long epochMilli) { - final long fromEpochMilli = start.toInstant().toEpochMilli(); - final long toEpochMilli = end.toInstant().toEpochMilli(); + public boolean inRange(final long epochMilli) { + final long fromEpochMilli = start.toInstant().toEpochMilli(); + final long toEpochMilli = end.toInstant().toEpochMilli(); - return fromEpochMilli <= epochMilli && epochMilli <= toEpochMilli; - } + return fromEpochMilli <= epochMilli && epochMilli <= toEpochMilli; + } - public boolean inRange(final OffsetDateTime date) { - return start.compareTo(date) <= 0 && end.compareTo(date) >= 0; - } + public boolean inRange(final OffsetDateTime date) { + return start.compareTo(date) <= 0 && end.compareTo(date) >= 0; + } - public boolean intersect(final DateTimeRange timeRange) { - return inRange(timeRange.start) // - || inRange(timeRange.end) // - || timeRange.inRange(start)// - || timeRange.inRange(end); - } + public boolean intersect(final DateTimeRange timeRange) { + return inRange(timeRange.start) // + || inRange(timeRange.end) // + || timeRange.inRange(start)// + || timeRange.inRange(end); + } } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/Entries.java b/pdb-api/src/main/java/org/lucares/pdb/api/Entries.java index 77c3421..6c8c8f6 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/Entries.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/Entries.java @@ -7,36 +7,36 @@ import java.util.Iterator; import java.util.List; public class Entries implements Iterable { - /** - * A special {@link Entries} instance that can be used as poison object for - * {@link BlockingQueueIterator}. - */ - public static final Entries POISON = new Entries(0); + /** + * A special {@link Entries} instance that can be used as poison object for + * {@link BlockingQueueIterator}. + */ + public static final Entries POISON = new Entries(0); - private final List entries; + private final List entries; - public Entries(final int initialSize) { - entries = new ArrayList<>(initialSize); - } + public Entries(final int initialSize) { + entries = new ArrayList<>(initialSize); + } - public Entries(final Entry... entries) { - this.entries = new ArrayList<>(Arrays.asList(entries)); - } + public Entries(final Entry... entries) { + this.entries = new ArrayList<>(Arrays.asList(entries)); + } - public Entries(final Collection entries) { - this.entries = new ArrayList<>(entries); - } + public Entries(final Collection entries) { + this.entries = new ArrayList<>(entries); + } - public void add(final Entry entry) { - entries.add(entry); - } + public void add(final Entry entry) { + entries.add(entry); + } - @Override - public Iterator iterator() { - return entries.iterator(); - } + @Override + public Iterator iterator() { + return entries.iterator(); + } - public int size() { - return entries.size(); - } + public int size() { + return entries.size(); + } } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/Entry.java b/pdb-api/src/main/java/org/lucares/pdb/api/Entry.java index f8f0c20..62e2045 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/Entry.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/Entry.java @@ -7,65 +7,65 @@ import java.time.format.DateTimeFormatter; public class Entry { - private final long value; + private final long value; - private final Tags tags; + private final Tags tags; - private final long epochMilli; + private final long epochMilli; - public Entry(final long epochMilli, final long value, final Tags tags) { - this.epochMilli = epochMilli; - this.tags = tags; - this.value = value; - } + public Entry(final long epochMilli, final long value, final Tags tags) { + this.epochMilli = epochMilli; + this.tags = tags; + this.value = value; + } - public long getValue() { - return value; - } + public long getValue() { + return value; + } - public long getEpochMilli() { - return epochMilli; - } + public long getEpochMilli() { + return epochMilli; + } - public Tags getTags() { - return tags; - } + public Tags getTags() { + return tags; + } - @Override - public String toString() { + @Override + public String toString() { - final OffsetDateTime date = Instant.ofEpochMilli(epochMilli).atOffset(ZoneOffset.UTC); - return date.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + " = " + value + " (" + tags.asString() + ")"; - } + final OffsetDateTime date = Instant.ofEpochMilli(epochMilli).atOffset(ZoneOffset.UTC); + return date.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + " = " + value + " (" + tags.asString() + ")"; + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (int) (epochMilli ^ (epochMilli >>> 32)); - result = prime * result + ((tags == null) ? 0 : tags.hashCode()); - result = prime * result + (int) (value ^ (value >>> 32)); - return result; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (int) (epochMilli ^ (epochMilli >>> 32)); + result = prime * result + ((tags == null) ? 0 : tags.hashCode()); + result = prime * result + (int) (value ^ (value >>> 32)); + return result; + } - @Override - public boolean equals(final Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final Entry other = (Entry) obj; - if (epochMilli != other.epochMilli) - return false; - if (tags == null) { - if (other.tags != null) - return false; - } else if (!tags.equals(other.tags)) - return false; - if (value != other.value) - return false; - return true; - } + @Override + public boolean equals(final Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + final Entry other = (Entry) obj; + if (epochMilli != other.epochMilli) + return false; + if (tags == null) { + if (other.tags != null) + return false; + } else if (!tags.equals(other.tags)) + return false; + if (value != other.value) + return false; + return true; + } } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/GroupResult.java b/pdb-api/src/main/java/org/lucares/pdb/api/GroupResult.java index 4e7c4cd..bf187b8 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/GroupResult.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/GroupResult.java @@ -6,31 +6,31 @@ import org.lucares.collections.LongList; public class GroupResult { - private final Tags groupedBy; + private final Tags groupedBy; - private final Stream timeValueStream; + private final Stream timeValueStream; - public GroupResult(final Stream entries, final Tags groupedBy) { - this.timeValueStream = entries; - this.groupedBy = groupedBy; - } + public GroupResult(final Stream entries, final Tags groupedBy) { + this.timeValueStream = entries; + this.groupedBy = groupedBy; + } - public Tags getGroupedBy() { - return groupedBy; - } + public Tags getGroupedBy() { + return groupedBy; + } - /** - * @return {@link Stream} - */ - public Stream asStream() { - return timeValueStream; - } + /** + * @return {@link Stream} + */ + public Stream asStream() { + return timeValueStream; + } - public LongList flatMap() { - final LongList result = new LongList(); + public LongList flatMap() { + final LongList result = new LongList(); - timeValueStream.forEachOrdered(result::addAll); + timeValueStream.forEachOrdered(result::addAll); - return result; - } + return result; + } } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/Query.java b/pdb-api/src/main/java/org/lucares/pdb/api/Query.java index 56d5c01..82ffbbc 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/Query.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/Query.java @@ -4,74 +4,74 @@ import java.util.ArrayList; import java.util.List; public class Query { - private final String query; + private final String query; - private final DateTimeRange dateRange; + private final DateTimeRange dateRange; - public Query(final String query, final DateTimeRange dateRange) { - super(); - this.query = query; - this.dateRange = dateRange; - } + public Query(final String query, final DateTimeRange dateRange) { + super(); + this.query = query; + this.dateRange = dateRange; + } - public Query relativeMillis(final String query, final long amount) { - return new Query(query, DateTimeRange.relativeMillis(amount)); - } + public Query relativeMillis(final String query, final long amount) { + return new Query(query, DateTimeRange.relativeMillis(amount)); + } - public Query relativeSeconds(final String query, final long amount) { - return new Query(query, DateTimeRange.relativeSeconds(amount)); - } + public Query relativeSeconds(final String query, final long amount) { + return new Query(query, DateTimeRange.relativeSeconds(amount)); + } - public Query relativeMinutes(final String query, final long amount) { - return new Query(query, DateTimeRange.relativeMinutes(amount)); - } + public Query relativeMinutes(final String query, final long amount) { + return new Query(query, DateTimeRange.relativeMinutes(amount)); + } - public Query relativeHours(final String query, final long amount) { - return new Query(query, DateTimeRange.relativeHours(amount)); - } + public Query relativeHours(final String query, final long amount) { + return new Query(query, DateTimeRange.relativeHours(amount)); + } - public Query relativeDays(final String query, final long amount) { - return new Query(query, DateTimeRange.relativeDays(amount)); - } + public Query relativeDays(final String query, final long amount) { + return new Query(query, DateTimeRange.relativeDays(amount)); + } - public Query relativeMonths(final String query, final long amount) { - return new Query(query, DateTimeRange.relativeMonths(amount)); - } + public Query relativeMonths(final String query, final long amount) { + return new Query(query, DateTimeRange.relativeMonths(amount)); + } - public static Query createQuery(final String query, final DateTimeRange dateRange) { - return new Query(query, dateRange); - } + public static Query createQuery(final String query, final DateTimeRange dateRange) { + return new Query(query, dateRange); + } - public static Query createQuery(final Tags tags, final DateTimeRange dateRange) { + public static Query createQuery(final Tags tags, final DateTimeRange dateRange) { - final List terms = new ArrayList<>(); + final List terms = new ArrayList<>(); - for (final String key : tags.getKeys()) { - final String value = tags.getValue(key); + for (final String key : tags.getKeys()) { + final String value = tags.getValue(key); - final StringBuilder term = new StringBuilder(); - term.append(key); - term.append("="); - term.append(value); - term.append(" "); + final StringBuilder term = new StringBuilder(); + term.append(key); + term.append("="); + term.append(value); + term.append(" "); - terms.add(term.toString()); - } + terms.add(term.toString()); + } - return new Query(String.join(" and ", terms), dateRange); - } + return new Query(String.join(" and ", terms), dateRange); + } - public String getQuery() { - return query; - } + public String getQuery() { + return query; + } - public DateTimeRange getDateRange() { - return dateRange; - } + public DateTimeRange getDateRange() { + return dateRange; + } - @Override - public String toString() { - return "'" + query + "' [" + dateRange + "]"; - } + @Override + public String toString() { + return "'" + query + "' [" + dateRange + "]"; + } } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/QueryConstants.java b/pdb-api/src/main/java/org/lucares/pdb/api/QueryConstants.java index ccdcac5..3de17d5 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/QueryConstants.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/QueryConstants.java @@ -1,5 +1,5 @@ package org.lucares.pdb.api; public interface QueryConstants { - String CARET_MARKER = "\ue001"; // character in the private use area + String CARET_MARKER = "\ue001"; // character in the private use area } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/QueryWithCaretMarker.java b/pdb-api/src/main/java/org/lucares/pdb/api/QueryWithCaretMarker.java index 31dcfae..d1f70f3 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/QueryWithCaretMarker.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/QueryWithCaretMarker.java @@ -2,28 +2,28 @@ package org.lucares.pdb.api; public class QueryWithCaretMarker extends Query implements QueryConstants { - public enum ResultMode { - CUT_AT_DOT, FULL_VALUES - } + public enum ResultMode { + CUT_AT_DOT, FULL_VALUES + } - private final int caretIndex; - private final ResultMode resultMode; + private final int caretIndex; + private final ResultMode resultMode; - public QueryWithCaretMarker(final String query, final DateTimeRange dateRange, final int caretIndex, - final ResultMode resultMode) { - super(query, dateRange); - this.caretIndex = caretIndex; - this.resultMode = resultMode; - } + public QueryWithCaretMarker(final String query, final DateTimeRange dateRange, final int caretIndex, + final ResultMode resultMode) { + super(query, dateRange); + this.caretIndex = caretIndex; + this.resultMode = resultMode; + } - public String getQueryWithCaretMarker() { - final StringBuilder queryBuilder = new StringBuilder(getQuery()); - final StringBuilder queryWithCaretMarker = queryBuilder.insert(caretIndex, CARET_MARKER); - return queryWithCaretMarker.toString(); - } + public String getQueryWithCaretMarker() { + final StringBuilder queryBuilder = new StringBuilder(getQuery()); + final StringBuilder queryWithCaretMarker = queryBuilder.insert(caretIndex, CARET_MARKER); + return queryWithCaretMarker.toString(); + } - public ResultMode getResultMode() { - return resultMode; - } + public ResultMode getResultMode() { + return resultMode; + } } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/Result.java b/pdb-api/src/main/java/org/lucares/pdb/api/Result.java index 9c90054..a774839 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/Result.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/Result.java @@ -7,24 +7,24 @@ import java.util.List; public class Result { - private final List groupResults; + private final List groupResults; - public Result(final GroupResult... groupResults) { - this(Arrays.asList(groupResults)); - } + public Result(final GroupResult... groupResults) { + this(Arrays.asList(groupResults)); + } - public Result(final Collection groupResults) { - this.groupResults = new ArrayList<>(groupResults); - } + public Result(final Collection groupResults) { + this.groupResults = new ArrayList<>(groupResults); + } - public GroupResult singleGroup() { - if (groupResults.size() != 1) { - throw new IllegalStateException("the result does not contain exactly one group"); - } - return groupResults.get(0); - } + public GroupResult singleGroup() { + if (groupResults.size() != 1) { + throw new IllegalStateException("the result does not contain exactly one group"); + } + return groupResults.get(0); + } - public List getGroups() { - return new ArrayList<>(groupResults); - } + public List getGroups() { + return new ArrayList<>(groupResults); + } } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/RuntimeIOException.java b/pdb-api/src/main/java/org/lucares/pdb/api/RuntimeIOException.java index 4165193..6b62756 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/RuntimeIOException.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/RuntimeIOException.java @@ -2,9 +2,9 @@ package org.lucares.pdb.api; public class RuntimeIOException extends RuntimeException { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - public RuntimeIOException(final Throwable cause) { - super(cause); - } + public RuntimeIOException(final Throwable cause) { + super(cause); + } } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/StringCompressor.java b/pdb-api/src/main/java/org/lucares/pdb/api/StringCompressor.java index fb7d177..5fbabe4 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/StringCompressor.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/StringCompressor.java @@ -7,34 +7,34 @@ import java.nio.file.Path; */ public class StringCompressor { - private final UniqueStringIntegerPairs usip; + private final UniqueStringIntegerPairs usip; - public StringCompressor(final UniqueStringIntegerPairs usip) throws RuntimeIOException { - this.usip = usip; - } + public StringCompressor(final UniqueStringIntegerPairs usip) throws RuntimeIOException { + this.usip = usip; + } - public static StringCompressor create(final Path path) { - final UniqueStringIntegerPairs mapsi = new UniqueStringIntegerPairs(path); - return new StringCompressor(mapsi); - } + public static StringCompressor create(final Path path) { + final UniqueStringIntegerPairs mapsi = new UniqueStringIntegerPairs(path); + return new StringCompressor(mapsi); + } - public int put(final String string) { + public int put(final String string) { - return usip.computeIfAbsent(string, s -> usip.getHighestInteger() + 1); - } + return usip.computeIfAbsent(string, s -> usip.getHighestInteger() + 1); + } - public int put(final byte[] bytes, final int start, final int endExclusive) { - return usip.computeIfAbsent(bytes, start, endExclusive); - } + public int put(final byte[] bytes, final int start, final int endExclusive) { + return usip.computeIfAbsent(bytes, start, endExclusive); + } - public String get(final int integer) { + public String get(final int integer) { - return usip.getKey(integer); - } + return usip.getKey(integer); + } - public int getIfPresent(final String string) { - final Integer integer = usip.get(string); - return integer != null ? integer : -1; - } + public int getIfPresent(final String string) { + final Integer integer = usip.get(string); + return integer != null ? integer : -1; + } } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/Tag.java b/pdb-api/src/main/java/org/lucares/pdb/api/Tag.java index 48bbcc8..d17adab 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/Tag.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/Tag.java @@ -6,89 +6,89 @@ package org.lucares.pdb.api; * 'Sam' is the value. */ public class Tag implements Comparable { - private final int field; + private final int field; - private final int value; + private final int value; - /** - * Create a new tag with field and value specified as int. See - * {@link Tags#STRING_COMPRESSOR} for the mapping between Strings and ints. - * - * @param field the field as int - * @param value the value as int - */ - public Tag(final int field, final int value) { - this.field = field; - this.value = value; - } + /** + * Create a new tag with field and value specified as int. See + * {@link Tags#STRING_COMPRESSOR} for the mapping between Strings and ints. + * + * @param field the field as int + * @param value the value as int + */ + public Tag(final int field, final int value) { + this.field = field; + this.value = value; + } - /** - * Create a new {@link Tag} for the given field and value. - * - * @param field the field - * @param value the value - */ - public Tag(final String field, final String value) { - this.field = field != null ? Tags.STRING_COMPRESSOR.getIfPresent(field) : -1; - this.value = value != null ? Tags.STRING_COMPRESSOR.getIfPresent(value) : -1; - } + /** + * Create a new {@link Tag} for the given field and value. + * + * @param field the field + * @param value the value + */ + public Tag(final String field, final String value) { + this.field = field != null ? Tags.STRING_COMPRESSOR.getIfPresent(field) : -1; + this.value = value != null ? Tags.STRING_COMPRESSOR.getIfPresent(value) : -1; + } - @Override - public int compareTo(final Tag o) { + @Override + public int compareTo(final Tag o) { - if (field != o.field) { - return field - o.field; - } else if (value != o.value) { - return value - o.value; - } + if (field != o.field) { + return field - o.field; + } else if (value != o.value) { + return value - o.value; + } - return 0; - } + return 0; + } - public int getKey() { - return field; - } + public int getKey() { + return field; + } - public String getKeyAsString() { - return Tags.STRING_COMPRESSOR.get(field); - } + public String getKeyAsString() { + return Tags.STRING_COMPRESSOR.get(field); + } - public int getValue() { - return value; - } + public int getValue() { + return value; + } - public String getValueAsString() { - return Tags.STRING_COMPRESSOR.get(value); - } + public String getValueAsString() { + return Tags.STRING_COMPRESSOR.get(value); + } - @Override - public String toString() { - return Tags.STRING_COMPRESSOR.get(field) + "=" + Tags.STRING_COMPRESSOR.get(value); - } + @Override + public String toString() { + return Tags.STRING_COMPRESSOR.get(field) + "=" + Tags.STRING_COMPRESSOR.get(value); + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + field; - result = prime * result + value; - return result; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + field; + result = prime * result + value; + return result; + } - @Override - public boolean equals(final Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final Tag other = (Tag) obj; - if (field != other.field) - return false; - if (value != other.value) - return false; - return true; - } + @Override + public boolean equals(final Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + final Tag other = (Tag) obj; + if (field != other.field) + return false; + if (value != other.value) + return false; + return true; + } } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/TagByKeyAndValueComparator.java b/pdb-api/src/main/java/org/lucares/pdb/api/TagByKeyAndValueComparator.java index b86cf83..9aef8f0 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/TagByKeyAndValueComparator.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/TagByKeyAndValueComparator.java @@ -4,5 +4,5 @@ import java.util.Comparator; public class TagByKeyAndValueComparator { - public static final Comparator INSTANCE = Comparator.comparing(Tag::getKey).thenComparing(Tag::getValue); + public static final Comparator INSTANCE = Comparator.comparing(Tag::getKey).thenComparing(Tag::getValue); } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/TagByKeyComparator.java b/pdb-api/src/main/java/org/lucares/pdb/api/TagByKeyComparator.java index f493abf..e3bce2a 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/TagByKeyComparator.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/TagByKeyComparator.java @@ -4,5 +4,5 @@ import java.util.Comparator; public class TagByKeyComparator { - public static final Comparator INSTANCE = Comparator.comparing(Tag::getKey); + public static final Comparator INSTANCE = Comparator.comparing(Tag::getKey); } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/Tags.java b/pdb-api/src/main/java/org/lucares/pdb/api/Tags.java index f3bcb45..53eecef 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/Tags.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/Tags.java @@ -14,264 +14,264 @@ import org.lucares.utils.byteencoder.VariableByteEncoder; public class Tags implements Comparable { - public static StringCompressor STRING_COMPRESSOR = null; - private static final byte[] EMPTY_BYTES = new byte[0]; - public static final Tags EMPTY = new Tags(); + public static StringCompressor STRING_COMPRESSOR = null; + private static final byte[] EMPTY_BYTES = new byte[0]; + public static final Tags EMPTY = new Tags(); - private final List tags; + private final List tags; - public Tags() { - tags = new ArrayList<>(); - } + public Tags() { + tags = new ArrayList<>(); + } - public Tags(final List tags) { - Collections.sort(tags, TagByKeyAndValueComparator.INSTANCE); - this.tags = tags; - } + public Tags(final List tags) { + Collections.sort(tags, TagByKeyAndValueComparator.INSTANCE); + this.tags = tags; + } - public static Tags create(final List tags) { + public static Tags create(final List tags) { - return new Tags(tags); - } + return new Tags(tags); + } - public static Tags create() { - return EMPTY; - } + public static Tags create() { + return EMPTY; + } - public static Tags create(final int key, final int value) { + public static Tags create(final int key, final int value) { - return TagsBuilder.create().add(key, value).build(); - } + return TagsBuilder.create().add(key, value).build(); + } - public static Tags create(final int key1, final int value1, final int key2, final int value2) { + public static Tags create(final int key1, final int value1, final int key2, final int value2) { - final Tags result = TagsBuilder.create().add(key1, value1).add(key2, value2).build(); - return result; - } + final Tags result = TagsBuilder.create().add(key1, value1).add(key2, value2).build(); + return result; + } - public static Tags create(final int key1, final int value1, final int key2, final int value2, final int key3, - final int value3) { - final Tags result = TagsBuilder.create().add(key1, value1).add(key2, value2).add(key3, value3).build(); - return result; - } + public static Tags create(final int key1, final int value1, final int key2, final int value2, final int key3, + final int value3) { + final Tags result = TagsBuilder.create().add(key1, value1).add(key2, value2).add(key3, value3).build(); + return result; + } - public static Tags createAndAddToDictionary(final String key, final String value) { + public static Tags createAndAddToDictionary(final String key, final String value) { - return TagsBuilder.create().addAndAddToDictionary(key, value).build(); - } + return TagsBuilder.create().addAndAddToDictionary(key, value).build(); + } - public static Tags createAndAddToDictionary(final String key1, final String value1, final String key2, - final String value2) { + public static Tags createAndAddToDictionary(final String key1, final String value1, final String key2, + final String value2) { - final Tags result = TagsBuilder.create().addAndAddToDictionary(key1, value1).addAndAddToDictionary(key2, value2) - .build(); - return result; - } + final Tags result = TagsBuilder.create().addAndAddToDictionary(key1, value1).addAndAddToDictionary(key2, value2) + .build(); + return result; + } - public static Tags createAndAddToDictionary(final String key1, final String value1, final String key2, - final String value2, final String key3, final String value3) { - final Tags result = TagsBuilder.create().addAndAddToDictionary(key1, value1).addAndAddToDictionary(key2, value2) - .addAndAddToDictionary(key3, value3).build(); - return result; - } + public static Tags createAndAddToDictionary(final String key1, final String value1, final String key2, + final String value2, final String key3, final String value3) { + final Tags result = TagsBuilder.create().addAndAddToDictionary(key1, value1).addAndAddToDictionary(key2, value2) + .addAndAddToDictionary(key3, value3).build(); + return result; + } - public static Tags createAndAddToDictionary(final String key1, final String value1, final String key2, - final String value2, final String key3, final String value3, final String key4, final String value4) { - final Tags result = TagsBuilder.create().addAndAddToDictionary(key1, value1).addAndAddToDictionary(key2, value2) - .addAndAddToDictionary(key3, value3).addAndAddToDictionary(key4, value4).build(); - return result; - } + public static Tags createAndAddToDictionary(final String key1, final String value1, final String key2, + final String value2, final String key3, final String value3, final String key4, final String value4) { + final Tags result = TagsBuilder.create().addAndAddToDictionary(key1, value1).addAndAddToDictionary(key2, value2) + .addAndAddToDictionary(key3, value3).addAndAddToDictionary(key4, value4).build(); + return result; + } - public static Tags fromBytes(final byte[] bytes) { - final List result = new ArrayList<>(); + public static Tags fromBytes(final byte[] bytes) { + final List result = new ArrayList<>(); - final LongList keyValuesAsLongs = VariableByteEncoder.decode(bytes); + final LongList keyValuesAsLongs = VariableByteEncoder.decode(bytes); - for (int i = 0; i < keyValuesAsLongs.size(); i += 2) { + for (int i = 0; i < keyValuesAsLongs.size(); i += 2) { - final long keyAsLong = keyValuesAsLongs.get(i); - final long valueAsLong = keyValuesAsLongs.get(i + 1); + final long keyAsLong = keyValuesAsLongs.get(i); + final long valueAsLong = keyValuesAsLongs.get(i + 1); - final int key = (int) keyAsLong; - final int value = (int) valueAsLong; - result.add(new Tag(key, value)); - } + final int key = (int) keyAsLong; + final int value = (int) valueAsLong; + result.add(new Tag(key, value)); + } - return new Tags(result); - } + return new Tags(result); + } - public byte[] toBytes() { - final byte[] result; + public byte[] toBytes() { + final byte[] result; - if (tags.size() > 0) { - final LongList keyValuesAsLongs = new LongList(tags.size() * 2); - for (final Tag tag : tags) { - final long keyAsLong = tag.getKey(); - final long valueAsLong = tag.getValue(); + if (tags.size() > 0) { + final LongList keyValuesAsLongs = new LongList(tags.size() * 2); + for (final Tag tag : tags) { + final long keyAsLong = tag.getKey(); + final long valueAsLong = tag.getValue(); - keyValuesAsLongs.add(keyAsLong); - keyValuesAsLongs.add(valueAsLong); - } + keyValuesAsLongs.add(keyAsLong); + keyValuesAsLongs.add(valueAsLong); + } - result = VariableByteEncoder.encode(keyValuesAsLongs); - } else { - result = EMPTY_BYTES; - } - return result; - } + result = VariableByteEncoder.encode(keyValuesAsLongs); + } else { + result = EMPTY_BYTES; + } + return result; + } - @Override - public int compareTo(final Tags o) { + @Override + public int compareTo(final Tags o) { - if (tags.size() != o.tags.size()) { - return tags.size() - o.tags.size(); - } else { - for (int i = 0; i < tags.size(); i++) { - final int compareResult = tags.get(i).compareTo(o.tags.get(i)); - if (compareResult != 0) { - return compareResult; - } - } - } + if (tags.size() != o.tags.size()) { + return tags.size() - o.tags.size(); + } else { + for (int i = 0; i < tags.size(); i++) { + final int compareResult = tags.get(i).compareTo(o.tags.get(i)); + if (compareResult != 0) { + return compareResult; + } + } + } - return 0; - } + return 0; + } - public String getValue(final String key) { - final Tag needle = new Tag(STRING_COMPRESSOR.put(key), 0); + public String getValue(final String key) { + final Tag needle = new Tag(STRING_COMPRESSOR.put(key), 0); - final int index = Collections.binarySearch(tags, needle, TagByKeyComparator.INSTANCE); - if (index >= 0) { - final Tag tag = tags.get(index); - return STRING_COMPRESSOR.get(tag.getValue()); - } - return null; - } + final int index = Collections.binarySearch(tags, needle, TagByKeyComparator.INSTANCE); + if (index >= 0) { + final Tag tag = tags.get(index); + return STRING_COMPRESSOR.get(tag.getValue()); + } + return null; + } - public int getValueAsInt(final String key) { - final Tag needle = new Tag(STRING_COMPRESSOR.put(key), 0); + public int getValueAsInt(final String key) { + final Tag needle = new Tag(STRING_COMPRESSOR.put(key), 0); - final int index = Collections.binarySearch(tags, needle, TagByKeyComparator.INSTANCE); - if (index >= 0) { - final Tag tag = tags.get(index); - return tag.getValue(); - } - return -1; - } + final int index = Collections.binarySearch(tags, needle, TagByKeyComparator.INSTANCE); + if (index >= 0) { + final Tag tag = tags.get(index); + return tag.getValue(); + } + return -1; + } - public Set getKeys() { - final TreeSet result = new TreeSet<>(); - for (final Tag tag : tags) { - result.add(STRING_COMPRESSOR.get(tag.getKey())); - } - return result; - } + public Set getKeys() { + final TreeSet result = new TreeSet<>(); + for (final Tag tag : tags) { + result.add(STRING_COMPRESSOR.get(tag.getKey())); + } + return result; + } - public IntList getKeysAsInt() { - final IntList result = new IntList(); - for (final Tag tag : tags) { - result.add(tag.getKey()); - } - return result; - } + public IntList getKeysAsInt() { + final IntList result = new IntList(); + for (final Tag tag : tags) { + result.add(tag.getKey()); + } + return result; + } - public List toTags() { - return Collections.unmodifiableList(tags); - } + public List toTags() { + return Collections.unmodifiableList(tags); + } - public void forEach(final BiConsumer keyValueConsumer) { + public void forEach(final BiConsumer keyValueConsumer) { - for (final Tag tag : tags) { - final String key = STRING_COMPRESSOR.get(tag.getKey()); - final String value = STRING_COMPRESSOR.get(tag.getValue()); - keyValueConsumer.accept(key, value); - } - } + for (final Tag tag : tags) { + final String key = STRING_COMPRESSOR.get(tag.getKey()); + final String value = STRING_COMPRESSOR.get(tag.getValue()); + keyValueConsumer.accept(key, value); + } + } - public Tags mapTags(final Function tagMapFuntion) { - final List mappedTags = new ArrayList<>(tags.size()); - for (final Tag tag : tags) { - mappedTags.add(tagMapFuntion.apply(tag)); - } - return Tags.create(mappedTags); - } + public Tags mapTags(final Function tagMapFuntion) { + final List mappedTags = new ArrayList<>(tags.size()); + for (final Tag tag : tags) { + mappedTags.add(tagMapFuntion.apply(tag)); + } + return Tags.create(mappedTags); + } - @Override - public String toString() { - return String.valueOf(tags); - } + @Override + public String toString() { + return String.valueOf(tags); + } - public String toCsv() { - final List tagsAsStrings = new ArrayList<>(); - for (final Tag tag : tags) { - tagsAsStrings.add(tag.getKeyAsString() + "=" + tag.getValueAsString()); - } + public String toCsv() { + final List tagsAsStrings = new ArrayList<>(); + for (final Tag tag : tags) { + tagsAsStrings.add(tag.getKeyAsString() + "=" + tag.getValueAsString()); + } - return String.join(",", tagsAsStrings); - } + return String.join(",", tagsAsStrings); + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((tags == null) ? 0 : tags.hashCode()); - return result; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((tags == null) ? 0 : tags.hashCode()); + return result; + } - @Override - public boolean equals(final Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final Tags other = (Tags) obj; - if (tags == null) { - if (other.tags != null) - return false; - } else if (!tags.equals(other.tags)) - return false; - return true; - } + @Override + public boolean equals(final Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + final Tags other = (Tags) obj; + if (tags == null) { + if (other.tags != null) + return false; + } else if (!tags.equals(other.tags)) + return false; + return true; + } - public Tags subset(final List groupByFields) { + public Tags subset(final List groupByFields) { - final TagsBuilder result = TagsBuilder.create(); + final TagsBuilder result = TagsBuilder.create(); - for (final String field : groupByFields) { - final int value = getValueAsInt(field); + for (final String field : groupByFields) { + final int value = getValueAsInt(field); - if (value >= 0) { - final int fieldAsInt = STRING_COMPRESSOR.getIfPresent(field); - result.add(fieldAsInt, value); - } - } + if (value >= 0) { + final int fieldAsInt = STRING_COMPRESSOR.getIfPresent(field); + result.add(fieldAsInt, value); + } + } - return result.build(); - } + return result.build(); + } - public boolean isEmpty() { - return tags.isEmpty(); - } + public boolean isEmpty() { + return tags.isEmpty(); + } - /** - * @return User facing readable representation - */ - public String asString() { + /** + * @return User facing readable representation + */ + public String asString() { - final StringBuilder result = new StringBuilder(); + final StringBuilder result = new StringBuilder(); - for (final Tag tag : tags) { - if (result.length() > 0) { - result.append(", "); - } + for (final Tag tag : tags) { + if (result.length() > 0) { + result.append(", "); + } - result.append(STRING_COMPRESSOR.get(tag.getKey())); - result.append("="); - result.append(STRING_COMPRESSOR.get(tag.getValue())); - } + result.append(STRING_COMPRESSOR.get(tag.getKey())); + result.append("="); + result.append(STRING_COMPRESSOR.get(tag.getValue())); + } - return result.toString(); - } + return result.toString(); + } } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/TagsBuilder.java b/pdb-api/src/main/java/org/lucares/pdb/api/TagsBuilder.java index 83f7776..50633f4 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/TagsBuilder.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/TagsBuilder.java @@ -5,30 +5,30 @@ import java.util.List; public class TagsBuilder { - final List tags = new ArrayList<>(); + final List tags = new ArrayList<>(); - public static TagsBuilder create() { - return new TagsBuilder(); - } + public static TagsBuilder create() { + return new TagsBuilder(); + } - public TagsBuilder add(final int key, final int value) { - tags.add(new Tag(key, value)); - return this; - } + public TagsBuilder add(final int key, final int value) { + tags.add(new Tag(key, value)); + return this; + } - public TagsBuilder add(final String key, final String value) { - final int keyAsInt = Tags.STRING_COMPRESSOR.getIfPresent(key); - final int valueAsInt = Tags.STRING_COMPRESSOR.getIfPresent(value); - return add(keyAsInt, valueAsInt); - } + public TagsBuilder add(final String key, final String value) { + final int keyAsInt = Tags.STRING_COMPRESSOR.getIfPresent(key); + final int valueAsInt = Tags.STRING_COMPRESSOR.getIfPresent(value); + return add(keyAsInt, valueAsInt); + } - public TagsBuilder addAndAddToDictionary(final String key, final String value) { - final int keyAsInt = Tags.STRING_COMPRESSOR.put(key); - final int valueAsInt = Tags.STRING_COMPRESSOR.put(value); - return add(keyAsInt, valueAsInt); - } + public TagsBuilder addAndAddToDictionary(final String key, final String value) { + final int keyAsInt = Tags.STRING_COMPRESSOR.put(key); + final int valueAsInt = Tags.STRING_COMPRESSOR.put(value); + return add(keyAsInt, valueAsInt); + } - public Tags build() { - return Tags.create(tags); - } + public Tags build() { + return Tags.create(tags); + } } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/UniqueStringIntegerPairs.java b/pdb-api/src/main/java/org/lucares/pdb/api/UniqueStringIntegerPairs.java index 2b5f668..b6572b6 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/UniqueStringIntegerPairs.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/UniqueStringIntegerPairs.java @@ -31,182 +31,182 @@ import java.util.regex.Pattern; * retrievals. */ public class UniqueStringIntegerPairs { - private static final String SEPARATOR = "\t"; + private static final String SEPARATOR = "\t"; - private static final boolean APPEND = true; + private static final boolean APPEND = true; - private static final class ByteArray implements Comparable { - private final byte[] array; - private final int start; - private final int endExclusive; + private static final class ByteArray implements Comparable { + private final byte[] array; + private final int start; + private final int endExclusive; - public ByteArray(final byte[] array, final int start, final int endExclusive) { - super(); - this.array = array; - this.start = start; - this.endExclusive = endExclusive; - } + public ByteArray(final byte[] array, final int start, final int endExclusive) { + super(); + this.array = array; + this.start = start; + this.endExclusive = endExclusive; + } - public ByteArray(final byte[] bytes) { - this.array = bytes; - this.start = 0; - this.endExclusive = bytes.length; - } + public ByteArray(final byte[] bytes) { + this.array = bytes; + this.start = 0; + this.endExclusive = bytes.length; + } - // custom hashcode! - @Override - public int hashCode() { - int result = 1; - final byte[] a = array; - final int end = endExclusive; - for (int i = start; i < end; i++) { - result = 31 * result + a[i]; - } - return result; - } + // custom hashcode! + @Override + public int hashCode() { + int result = 1; + final byte[] a = array; + final int end = endExclusive; + for (int i = start; i < end; i++) { + result = 31 * result + a[i]; + } + return result; + } - // custom equals! - @Override - public boolean equals(final Object obj) { - final ByteArray other = (ByteArray) obj; - if (!Arrays.equals(array, start, endExclusive, other.array, other.start, other.endExclusive)) - return false; - return true; - } + // custom equals! + @Override + public boolean equals(final Object obj) { + final ByteArray other = (ByteArray) obj; + if (!Arrays.equals(array, start, endExclusive, other.array, other.start, other.endExclusive)) + return false; + return true; + } - @Override - public int compareTo(final ByteArray o) { - return Arrays.compare(array, start, endExclusive, o.array, o.start, o.endExclusive); - } + @Override + public int compareTo(final ByteArray o) { + return Arrays.compare(array, start, endExclusive, o.array, o.start, o.endExclusive); + } - } + } - /** - * Maps a string to an integer. E.g. "myLongValue" -> 123 - */ - private final Map stringToInt = new HashMap<>(); + /** + * Maps a string to an integer. E.g. "myLongValue" -> 123 + */ + private final Map stringToInt = new HashMap<>(); - private final Map bytesToInt = new HashMap<>(); + private final Map bytesToInt = new HashMap<>(); - /** - * Maps an integer to a string. E.g. 123 -> "myLongValue" - */ - private final List intToString = new ArrayList<>(); + /** + * Maps an integer to a string. E.g. 123 -> "myLongValue" + */ + private final List intToString = new ArrayList<>(); - private final Path file; + private final Path file; - public UniqueStringIntegerPairs() { - this(null); - } + public UniqueStringIntegerPairs() { + this(null); + } - public UniqueStringIntegerPairs(final Path file) { - this.file = file; - if (file != null) { - init(file); - } - } + public UniqueStringIntegerPairs(final Path file) { + this.file = file; + if (file != null) { + init(file); + } + } - private void init(final Path file) throws RuntimeIOException { + private void init(final Path file) throws RuntimeIOException { - try { - Files.createDirectories(file.getParent()); - if (!Files.exists(file)) { - Files.createFile(file); - } + try { + Files.createDirectories(file.getParent()); + if (!Files.exists(file)) { + Files.createFile(file); + } - try (final BufferedReader reader = new BufferedReader( - new InputStreamReader(new FileInputStream(file.toFile()), StandardCharsets.UTF_8))) { - String line; - while ((line = reader.readLine()) != null) { + try (final BufferedReader reader = new BufferedReader( + new InputStreamReader(new FileInputStream(file.toFile()), StandardCharsets.UTF_8))) { + String line; + while ((line = reader.readLine()) != null) { - final String[] tokens = line.split(Pattern.quote(SEPARATOR)); + final String[] tokens = line.split(Pattern.quote(SEPARATOR)); - if (tokens.length == 2) { - final String string = tokens[0]; - final int integer = Integer.parseInt(tokens[1]); - intToStringPut(integer, string); - stringToInt.put(string, integer); - bytesToInt.put(new ByteArray(string.getBytes(StandardCharsets.UTF_8)), integer); - } - } - } - } catch (final IOException e) { - throw new RuntimeIOException(e); - } - } + if (tokens.length == 2) { + final String string = tokens[0]; + final int integer = Integer.parseInt(tokens[1]); + intToStringPut(integer, string); + stringToInt.put(string, integer); + bytesToInt.put(new ByteArray(string.getBytes(StandardCharsets.UTF_8)), integer); + } + } + } + } catch (final IOException e) { + throw new RuntimeIOException(e); + } + } - private void intToStringPut(final int value, final String string) { - if (intToString.size() <= value) { - // list is not long enough -> grow list - while (intToString.size() <= value) { - intToString.add(null); - } - } - intToString.set(value, string); - } + private void intToStringPut(final int value, final String string) { + if (intToString.size() <= value) { + // list is not long enough -> grow list + while (intToString.size() <= value) { + intToString.add(null); + } + } + intToString.set(value, string); + } - void put(final String string, final int integer) { + void put(final String string, final int integer) { - if (stringToInt.containsKey(string) || (intToString.size() > integer && intToString.get(integer) != null)) { - throw new IllegalArgumentException("Unique key constraint violation for (" + string + ", " + integer + ")"); - } - if (file != null) { - try (final Writer writer = new OutputStreamWriter(new FileOutputStream(file.toFile(), APPEND), - StandardCharsets.UTF_8)) { + if (stringToInt.containsKey(string) || (intToString.size() > integer && intToString.get(integer) != null)) { + throw new IllegalArgumentException("Unique key constraint violation for (" + string + ", " + integer + ")"); + } + if (file != null) { + try (final Writer writer = new OutputStreamWriter(new FileOutputStream(file.toFile(), APPEND), + StandardCharsets.UTF_8)) { - writer.write(string + SEPARATOR + integer + "\n"); + writer.write(string + SEPARATOR + integer + "\n"); - } catch (final IOException e) { - throw new RuntimeIOException(e); - } - } + } catch (final IOException e) { + throw new RuntimeIOException(e); + } + } - intToStringPut(integer, string); - stringToInt.put(string, integer); - bytesToInt.put(new ByteArray(string.getBytes(StandardCharsets.UTF_8)), integer); - } + intToStringPut(integer, string); + stringToInt.put(string, integer); + bytesToInt.put(new ByteArray(string.getBytes(StandardCharsets.UTF_8)), integer); + } - public Integer get(final String string) { + public Integer get(final String string) { - return stringToInt.get(string); - } + return stringToInt.get(string); + } - public String getKey(final int second) { - return intToString.get(second); - } + public String getKey(final int second) { + return intToString.get(second); + } - public Integer getHighestInteger() { - return intToString.size() == 0 ? -1 : intToString.size() - 1; - } + public Integer getHighestInteger() { + return intToString.size() == 0 ? -1 : intToString.size() - 1; + } - public Integer computeIfAbsent(final String string, final Function mappingFunction) { - if (!stringToInt.containsKey(string)) { - synchronized (stringToInt) { - if (!stringToInt.containsKey(string)) { - final Integer second = mappingFunction.apply(string); - put(string, second); - } - } - } + public Integer computeIfAbsent(final String string, final Function mappingFunction) { + if (!stringToInt.containsKey(string)) { + synchronized (stringToInt) { + if (!stringToInt.containsKey(string)) { + final Integer second = mappingFunction.apply(string); + put(string, second); + } + } + } - return stringToInt.get(string); - } + return stringToInt.get(string); + } - public Integer computeIfAbsent(final byte[] bytes, final int start, final int endExclusive) { + public Integer computeIfAbsent(final byte[] bytes, final int start, final int endExclusive) { - final ByteArray byteArray = new ByteArray(bytes, start, endExclusive); - Integer result = bytesToInt.get(byteArray); - if (result == null) { - synchronized (stringToInt) { - if (!bytesToInt.containsKey(byteArray)) { - final String string = new String(bytes, start, endExclusive - start, StandardCharsets.UTF_8); - final Integer integer = intToString.size(); - put(string, integer); - } - result = bytesToInt.get(byteArray); - } - } + final ByteArray byteArray = new ByteArray(bytes, start, endExclusive); + Integer result = bytesToInt.get(byteArray); + if (result == null) { + synchronized (stringToInt) { + if (!bytesToInt.containsKey(byteArray)) { + final String string = new String(bytes, start, endExclusive - start, StandardCharsets.UTF_8); + final Integer integer = intToString.size(); + put(string, integer); + } + result = bytesToInt.get(byteArray); + } + } - return result; - } + return result; + } } diff --git a/pdb-api/src/test/java/org/lucares/memory/MemoryScale.java b/pdb-api/src/test/java/org/lucares/memory/MemoryScale.java index 8c1b2e2..91b5d2b 100644 --- a/pdb-api/src/test/java/org/lucares/memory/MemoryScale.java +++ b/pdb-api/src/test/java/org/lucares/memory/MemoryScale.java @@ -14,133 +14,133 @@ import org.lucares.pdb.api.UniqueStringIntegerPairs; public class MemoryScale { - public static final String A = "A"; + public static final String A = "A"; - public static void main(final String[] args) { - Tags.STRING_COMPRESSOR = new StringCompressor(new UniqueStringIntegerPairs()); + public static void main(final String[] args) { + Tags.STRING_COMPRESSOR = new StringCompressor(new UniqueStringIntegerPairs()); - scale("singleTag"); - scale("tags0"); - scale("tags1"); - scale("tags2"); - scale("tags6"); - } + scale("singleTag"); + scale("tags0"); + scale("tags1"); + scale("tags2"); + scale("tags6"); + } - private static void scale(final String what) { - System.out.println("start: " + what); - // warmup of classes - getUsedMemory(); - Object handle = createObject(what); + private static void scale(final String what) { + System.out.println("start: " + what); + // warmup of classes + getUsedMemory(); + Object handle = createObject(what); - handle = null; + handle = null; - runGc(); - final long memoryBefore = getUsedMemory(); + runGc(); + final long memoryBefore = getUsedMemory(); - handle = createObject(what); + handle = createObject(what); - runGc(); - final long memoryAfter = getUsedMemory(); - System.out.println(what + ": used memory: " + (memoryAfter - memoryBefore)); - handle.hashCode(); // use the variable, so causes no warnings and is not removed by JIT compiler - } + runGc(); + final long memoryAfter = getUsedMemory(); + System.out.println(what + ": used memory: " + (memoryAfter - memoryBefore)); + handle.hashCode(); // use the variable, so causes no warnings and is not removed by JIT compiler + } - private static Object createObject(final String what) { + private static Object createObject(final String what) { - switch (what) { - case "singleTag": - return createTag(); - case "tags0": - return createTags0(); - case "tags1": - return createTags1(); - case "tags2": - return createTags2(); - case "tags6": - return createTags6(); - case "string": - return createString(); - case "linkedHashMap": - return createLinkedHashMap(); - case "path": - return createPath("C:\\pdb\\dataNew\\storage\\0\\4\\3n-5k_0-5l_2-1L_4-4n_3w-5h_6-7$.pdb"); - case "pathAsString": - return createPathAsString("C:\\pdb\\dataNew\\storage\\0\\4\\3n-5k_0-5l_2-1L_4-4n_3w-5h_6-7$.pdb"); - case "pathAsUtf8": - return createPathAsUtf8("C:\\pdb\\dataNew\\storage\\0\\4\\3n-5k_0-5l_2-1L_4-4n_3w-5h_6-7$.pdb"); - default: - return null; - } - } + switch (what) { + case "singleTag": + return createTag(); + case "tags0": + return createTags0(); + case "tags1": + return createTags1(); + case "tags2": + return createTags2(); + case "tags6": + return createTags6(); + case "string": + return createString(); + case "linkedHashMap": + return createLinkedHashMap(); + case "path": + return createPath("C:\\pdb\\dataNew\\storage\\0\\4\\3n-5k_0-5l_2-1L_4-4n_3w-5h_6-7$.pdb"); + case "pathAsString": + return createPathAsString("C:\\pdb\\dataNew\\storage\\0\\4\\3n-5k_0-5l_2-1L_4-4n_3w-5h_6-7$.pdb"); + case "pathAsUtf8": + return createPathAsUtf8("C:\\pdb\\dataNew\\storage\\0\\4\\3n-5k_0-5l_2-1L_4-4n_3w-5h_6-7$.pdb"); + default: + return null; + } + } - private static Object createTag() { - return new Tag("", ""); - } + private static Object createTag() { + return new Tag("", ""); + } - private static Object createTags0() { - return new Tags(); - } + private static Object createTags0() { + return new Tags(); + } - private static Object createTags1() { - return Tags.createAndAddToDictionary("k1", "v1"); - } + private static Object createTags1() { + return Tags.createAndAddToDictionary("k1", "v1"); + } - private static Object createTags2() { - return Tags.createAndAddToDictionary("k1", "v1", "k2", "v2"); - } + private static Object createTags2() { + return Tags.createAndAddToDictionary("k1", "v1", "k2", "v2"); + } - private static Object createTags6() { - TagsBuilder result = TagsBuilder.create(); - result = result.add("k1", "v1"); - result = result.add("k2", "v2"); - result = result.add("k3", "v3"); - result = result.add("k4", "v4"); - result = result.add("k5", "v5"); - result = result.add("k6", "v6"); - return result.build(); - } + private static Object createTags6() { + TagsBuilder result = TagsBuilder.create(); + result = result.add("k1", "v1"); + result = result.add("k2", "v2"); + result = result.add("k3", "v3"); + result = result.add("k4", "v4"); + result = result.add("k5", "v5"); + result = result.add("k6", "v6"); + return result.build(); + } - private static Object createPathAsUtf8(final String string) { - return string.getBytes(StandardCharsets.UTF_8); - } + private static Object createPathAsUtf8(final String string) { + return string.getBytes(StandardCharsets.UTF_8); + } - private static String createPathAsString(final String string) { - return string.replace("C", "c"); - } + private static String createPathAsString(final String string) { + return string.replace("C", "c"); + } - private static Path createPath(final String string) { - return Paths.get(string); - } + private static Path createPath(final String string) { + return Paths.get(string); + } - private static String createString() { + private static String createString() { - final int i = 0; - return "" + i; - } + final int i = 0; + return "" + i; + } - private static Object createLinkedHashMap() { - final Map map = new LinkedHashMap<>(); + private static Object createLinkedHashMap() { + final Map map = new LinkedHashMap<>(); - map.put("A", "A"); - for (int i = 0; i < 0; i++) { - map.put("" + i, "" + i); - } + map.put("A", "A"); + for (int i = 0; i < 0; i++) { + map.put("" + i, "" + i); + } - return map; - } + return map; + } - private static void runGc() { - for (int i = 0; i < 10; i++) { - System.gc(); - try { - Thread.sleep(100); - } catch (final InterruptedException e) { - Thread.currentThread().interrupt(); - } - } - } + private static void runGc() { + for (int i = 0; i < 10; i++) { + System.gc(); + try { + Thread.sleep(100); + } catch (final InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + } - private static long getUsedMemory() { - return Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); - } + private static long getUsedMemory() { + return Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); + } } diff --git a/pdb-api/src/test/java/org/lucares/pdb/api/DateTimeRangeTest.java b/pdb-api/src/test/java/org/lucares/pdb/api/DateTimeRangeTest.java index 3095991..77dd994 100644 --- a/pdb-api/src/test/java/org/lucares/pdb/api/DateTimeRangeTest.java +++ b/pdb-api/src/test/java/org/lucares/pdb/api/DateTimeRangeTest.java @@ -11,27 +11,27 @@ import org.testng.annotations.DataProvider; import org.testng.annotations.Test; public class DateTimeRangeTest { - @DataProvider - Object[][] providerIntersect() { - final List result = new ArrayList<>(); + @DataProvider + Object[][] providerIntersect() { + final List result = new ArrayList<>(); - final OffsetDateTime a = Instant.ofEpochMilli(1000).atOffset(ZoneOffset.UTC); - final OffsetDateTime b = Instant.ofEpochMilli(2000).atOffset(ZoneOffset.UTC); - final OffsetDateTime c = Instant.ofEpochMilli(3000).atOffset(ZoneOffset.UTC); - final OffsetDateTime d = Instant.ofEpochMilli(4000).atOffset(ZoneOffset.UTC); + final OffsetDateTime a = Instant.ofEpochMilli(1000).atOffset(ZoneOffset.UTC); + final OffsetDateTime b = Instant.ofEpochMilli(2000).atOffset(ZoneOffset.UTC); + final OffsetDateTime c = Instant.ofEpochMilli(3000).atOffset(ZoneOffset.UTC); + final OffsetDateTime d = Instant.ofEpochMilli(4000).atOffset(ZoneOffset.UTC); - result.add(new Object[] { new DateTimeRange(a, b), new DateTimeRange(c, d), false }); - result.add(new Object[] { new DateTimeRange(a, c), new DateTimeRange(b, d), true }); - result.add(new Object[] { new DateTimeRange(a, d), new DateTimeRange(b, d), true }); - result.add(new Object[] { new DateTimeRange(a, d), new DateTimeRange(b, d), true }); - result.add(new Object[] { new DateTimeRange(a, b), new DateTimeRange(b, d), true }); + result.add(new Object[] { new DateTimeRange(a, b), new DateTimeRange(c, d), false }); + result.add(new Object[] { new DateTimeRange(a, c), new DateTimeRange(b, d), true }); + result.add(new Object[] { new DateTimeRange(a, d), new DateTimeRange(b, d), true }); + result.add(new Object[] { new DateTimeRange(a, d), new DateTimeRange(b, d), true }); + result.add(new Object[] { new DateTimeRange(a, b), new DateTimeRange(b, d), true }); - return result.toArray(new Object[result.size()][]); - } + return result.toArray(new Object[result.size()][]); + } - @Test(dataProvider = "providerIntersect") - public void testIntersect(final DateTimeRange a, final DateTimeRange b, final boolean expected) throws Exception { - Assert.assertEquals(a.intersect(b), expected, a + " intersects " + b); - Assert.assertEquals(b.intersect(a), expected, a + " intersects " + b); - } + @Test(dataProvider = "providerIntersect") + public void testIntersect(final DateTimeRange a, final DateTimeRange b, final boolean expected) throws Exception { + Assert.assertEquals(a.intersect(b), expected, a + " intersects " + b); + Assert.assertEquals(b.intersect(a), expected, a + " intersects " + b); + } } diff --git a/pdb-api/src/test/java/org/lucares/pdb/api/StringCompressorTest.java b/pdb-api/src/test/java/org/lucares/pdb/api/StringCompressorTest.java index 5878a9c..5fe95ba 100644 --- a/pdb-api/src/test/java/org/lucares/pdb/api/StringCompressorTest.java +++ b/pdb-api/src/test/java/org/lucares/pdb/api/StringCompressorTest.java @@ -18,63 +18,63 @@ import org.testng.annotations.Test; @Test public class StringCompressorTest { - private Path dataDirectory; + private Path dataDirectory; - @BeforeMethod - public void beforeMethod() throws IOException { - dataDirectory = Files.createTempDirectory("pdb"); - } + @BeforeMethod + public void beforeMethod() throws IOException { + dataDirectory = Files.createTempDirectory("pdb"); + } - @AfterMethod - public void afterMethod() throws IOException { - FileUtils.delete(dataDirectory); - } + @AfterMethod + public void afterMethod() throws IOException { + FileUtils.delete(dataDirectory); + } - public void testKeyCompressorRoundtrip() throws Exception { - final StringCompressor keyValueCompressor = StringCompressor.create(dataDirectory.resolve("key.csv")); + public void testKeyCompressorRoundtrip() throws Exception { + final StringCompressor keyValueCompressor = StringCompressor.create(dataDirectory.resolve("key.csv")); - final String value = "foo"; - final Integer intFoo = keyValueCompressor.put(value); - final String actual = keyValueCompressor.get(intFoo); + final String value = "foo"; + final Integer intFoo = keyValueCompressor.put(value); + final String actual = keyValueCompressor.get(intFoo); - Assert.assertEquals(actual, value); - } + Assert.assertEquals(actual, value); + } - public void testKeyCompressorInitialization() throws Exception { - final Path database = dataDirectory.resolve("key.csv"); - final String value = "foo"; - { - final StringCompressor keyValueCompressor = StringCompressor.create(database); + public void testKeyCompressorInitialization() throws Exception { + final Path database = dataDirectory.resolve("key.csv"); + final String value = "foo"; + { + final StringCompressor keyValueCompressor = StringCompressor.create(database); - keyValueCompressor.put(value); - } - { - final StringCompressor keyValueCompressor = StringCompressor.create(database); + keyValueCompressor.put(value); + } + { + final StringCompressor keyValueCompressor = StringCompressor.create(database); - keyValueCompressor.get(0); - } + keyValueCompressor.get(0); + } - } + } - @Test(invocationCount = 1) - public void testPutConcurrently() throws InterruptedException, ExecutionException { - final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(); - final StringCompressor stringCompressor = new StringCompressor(usip); + @Test(invocationCount = 1) + public void testPutConcurrently() throws InterruptedException, ExecutionException { + final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(); + final StringCompressor stringCompressor = new StringCompressor(usip); - final ExecutorService pool = Executors.newCachedThreadPool(); + final ExecutorService pool = Executors.newCachedThreadPool(); - final int numEntries = 1000; - final Future> future1 = pool.submit(new StringInserter(stringCompressor, numEntries)); - final Future> future2 = pool.submit(new StringInserter(stringCompressor, numEntries)); - final Future> future3 = pool.submit(new StringInserter(stringCompressor, numEntries)); + final int numEntries = 1000; + final Future> future1 = pool.submit(new StringInserter(stringCompressor, numEntries)); + final Future> future2 = pool.submit(new StringInserter(stringCompressor, numEntries)); + final Future> future3 = pool.submit(new StringInserter(stringCompressor, numEntries)); - future1.get(); - future2.get(); - future3.get(); + future1.get(); + future2.get(); + future3.get(); - pool.shutdown(); - pool.awaitTermination(1, TimeUnit.MILLISECONDS); + pool.shutdown(); + pool.awaitTermination(1, TimeUnit.MILLISECONDS); - Assert.assertEquals((int) usip.getHighestInteger(), 3 * numEntries - 1); - } + Assert.assertEquals((int) usip.getHighestInteger(), 3 * numEntries - 1); + } } diff --git a/pdb-api/src/test/java/org/lucares/pdb/api/StringInserter.java b/pdb-api/src/test/java/org/lucares/pdb/api/StringInserter.java index a98f725..e5bbbdc 100644 --- a/pdb-api/src/test/java/org/lucares/pdb/api/StringInserter.java +++ b/pdb-api/src/test/java/org/lucares/pdb/api/StringInserter.java @@ -7,23 +7,23 @@ import java.util.concurrent.Callable; final class StringInserter implements Callable> { - private final StringCompressor stringCompressor; - private final int numEntries; + private final StringCompressor stringCompressor; + private final int numEntries; - public StringInserter(final StringCompressor stringCompressor, final int numEntries) { - this.stringCompressor = stringCompressor; - this.numEntries = numEntries; - } + public StringInserter(final StringCompressor stringCompressor, final int numEntries) { + this.stringCompressor = stringCompressor; + this.numEntries = numEntries; + } - @Override - public List call() throws Exception { + @Override + public List call() throws Exception { - final List result = new ArrayList<>(); - for (int i = 0; i < numEntries; i++) { - final String s = UUID.randomUUID().toString(); - stringCompressor.put(s); - result.add(s); - } - return result; - } + final List result = new ArrayList<>(); + for (int i = 0; i < numEntries; i++) { + final String s = UUID.randomUUID().toString(); + stringCompressor.put(s); + result.add(s); + } + return result; + } } \ No newline at end of file diff --git a/pdb-api/src/test/java/org/lucares/pdb/api/UniqueStringIntegerPairsTest.java b/pdb-api/src/test/java/org/lucares/pdb/api/UniqueStringIntegerPairsTest.java index a5a7a68..9e0f6e9 100644 --- a/pdb-api/src/test/java/org/lucares/pdb/api/UniqueStringIntegerPairsTest.java +++ b/pdb-api/src/test/java/org/lucares/pdb/api/UniqueStringIntegerPairsTest.java @@ -13,62 +13,62 @@ import org.testng.annotations.Test; @Test public class UniqueStringIntegerPairsTest { - private Path dataDirectory; + private Path dataDirectory; - @BeforeMethod - public void beforeMethod() throws IOException { - dataDirectory = Files.createTempDirectory("pdb"); - } + @BeforeMethod + public void beforeMethod() throws IOException { + dataDirectory = Files.createTempDirectory("pdb"); + } - @AfterMethod - public void afterMethod() throws IOException { - FileUtils.delete(dataDirectory); - } + @AfterMethod + public void afterMethod() throws IOException { + FileUtils.delete(dataDirectory); + } - public void testPutGet() throws Exception { - final Path database = dataDirectory.resolve("key.csv"); - final String first = "key1"; - final Integer second = 1; + public void testPutGet() throws Exception { + final Path database = dataDirectory.resolve("key.csv"); + final String first = "key1"; + final Integer second = 1; - { - final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database); + { + final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database); - usip.put(first, second); - Assert.assertEquals(usip.get(first), second); - Assert.assertEquals(usip.getKey(second), first); - } + usip.put(first, second); + Assert.assertEquals(usip.get(first), second); + Assert.assertEquals(usip.getKey(second), first); + } - { - final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database); + { + final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database); - Assert.assertEquals(usip.get(first), second); - Assert.assertEquals(usip.getKey(second), first); - } - } + Assert.assertEquals(usip.get(first), second); + Assert.assertEquals(usip.getKey(second), first); + } + } - public void testUniqueKeyContstraint() throws Exception { - final Path database = dataDirectory.resolve("key.csv"); - final String first = "key1"; - final Integer second = 1; + public void testUniqueKeyContstraint() throws Exception { + final Path database = dataDirectory.resolve("key.csv"); + final String first = "key1"; + final Integer second = 1; - final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database); - usip.put(first, second); - try { - // cannot add another pair with the first key - final int another = second + 1; - usip.put(first, another); - Assert.fail("expected an IllegalArgumentException"); - } catch (final IllegalArgumentException e) { - // expected - } + final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database); + usip.put(first, second); + try { + // cannot add another pair with the first key + final int another = second + 1; + usip.put(first, another); + Assert.fail("expected an IllegalArgumentException"); + } catch (final IllegalArgumentException e) { + // expected + } - try { - // cannot add another pair with the same second value - final String another = first + 1; - usip.put(another, second); - Assert.fail("expected an IllegalArgumentException"); - } catch (final IllegalArgumentException e) { - // expected - } - } + try { + // cannot add another pair with the same second value + final String another = first + 1; + usip.put(another, second); + Assert.fail("expected an IllegalArgumentException"); + } catch (final IllegalArgumentException e) { + // expected + } + } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/Aggregate.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/Aggregate.java index a2fcbfa..4baa141 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/Aggregate.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/Aggregate.java @@ -1,17 +1,18 @@ package org.lucares.pdb.plot.api; /** - * Note: The order in this enum defines the order in which the aggregates are drawn. + * Note: The order in this enum defines the order in which the aggregates are + * drawn. */ public enum Aggregate { - PARALLEL, - - SCATTER, - - /** - * Empirical cumulative distribution functions - * - * @see https://serialmentor.com/dataviz/ecdf-qq.html - */ - CUM_DISTRIBUTION, + PARALLEL, + + SCATTER, + + /** + * Empirical cumulative distribution functions + * + * @see https://serialmentor.com/dataviz/ecdf-qq.html + */ + CUM_DISTRIBUTION, } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregateHandler.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregateHandler.java index 55dc33c..3636113 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregateHandler.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregateHandler.java @@ -13,52 +13,53 @@ import org.lucares.recommind.logs.LineStyle; public abstract class AggregateHandler implements Appender { - private GnuplotAxis xAxis = GnuplotAxis.X1; - - private GnuplotAxis yAxis = GnuplotAxis.Y1; - - public GnuplotAxis getxAxis() { - return xAxis; - } + private GnuplotAxis xAxis = GnuplotAxis.X1; - public void updateAxis(GnuplotAxis axis) { - switch (axis) { - case X1: - case X2: - this.xAxis = axis; - break; - case Y1: - case Y2: - this.yAxis = axis; - break; - default: - throw new IllegalArgumentException("Unexpected value: " + axis); + private GnuplotAxis yAxis = GnuplotAxis.Y1; + + public GnuplotAxis getxAxis() { + return xAxis; } - } - public GnuplotAxis getyAxis() { - return yAxis; - } + public void updateAxis(final GnuplotAxis axis) { + switch (axis) { + case X1: + case X2: + this.xAxis = axis; + break; + case Y1: + case Y2: + this.yAxis = axis; + break; + default: + throw new IllegalArgumentException("Unexpected value: " + axis); + } + } - protected String gnuplotXYAxis() { - return xAxis.getAxisNameForPlots()+yAxis.getAxisNameForPlots(); - } - - abstract Type getAxisType(GnuplotAxis axis); + public GnuplotAxis getyAxis() { + return yAxis; + } - abstract Aggregate getAggregateType(); + protected String gnuplotXYAxis() { + return xAxis.getAxisNameForPlots() + yAxis.getAxisNameForPlots(); + } - abstract AxisSettings createXAxisSettings(GnuplotSettings settings, Collection dataSeries); + abstract Type getAxisType(GnuplotAxis axis); - abstract AxisSettings createYAxisSettings(GnuplotSettings settings, Collection dataSeries); + abstract Aggregate getAggregateType(); - abstract void addPlot(StringBuilder result, AggregatedData aggregatedData, LineStyle lineStyle, Optional title); + abstract AxisSettings createXAxisSettings(GnuplotSettings settings, Collection dataSeries); - abstract CustomAggregator createCustomAggregator(Path tmpDir, PlotSettings plotSettings, long fromEpochMilli, - long toEpochMilli); + abstract AxisSettings createYAxisSettings(GnuplotSettings settings, Collection dataSeries); - protected String gnuplotTitle(Optional title) { - - return title.isPresent() ? "title '" + title.get() + "'" : "notitle"; - } + abstract void addPlot(StringBuilder result, AggregatedData aggregatedData, LineStyle lineStyle, + Optional title); + + abstract CustomAggregator createCustomAggregator(Path tmpDir, PlotSettings plotSettings, long fromEpochMilli, + long toEpochMilli); + + protected String gnuplotTitle(final Optional title) { + + return title.isPresent() ? "title '" + title.get() + "'" : "notitle"; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregateHandlerCollection.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregateHandlerCollection.java index 675fb61..ed2071e 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregateHandlerCollection.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregateHandlerCollection.java @@ -17,91 +17,97 @@ import org.lucares.utils.CollectionUtils; import org.lucares.utils.Preconditions; public class AggregateHandlerCollection { - private static final Comparator PLOTTING_ORDER = Comparator.comparing(AggregateHandler::getAggregateType); - - private final List aggregateHandlers = new ArrayList<>(); + private static final Comparator PLOTTING_ORDER = Comparator + .comparing(AggregateHandler::getAggregateType); - public void add(AggregateHandler aggregateHandler) { - aggregateHandlers.add(aggregateHandler); - } - - public void updateAxisForHandlers() { - updateAxisForHandlers(GnuplotAxis.X1); - updateAxisForHandlers(GnuplotAxis.Y1); - } + private final List aggregateHandlers = new ArrayList<>(); - private void updateAxisForHandlers(GnuplotAxis axis) { - final EnumSet result = EnumSet.noneOf(Type.class); - for (AggregateHandler handler : aggregateHandlers) { - final Type type = handler.getAxisType(axis); - - if (result.isEmpty()) { - result.add(type); - }else { - final boolean containsType = result.contains(type); - if (containsType) { - // already has an axis of this type - // TODO merge axis definitions and use the greater values for: range, ticsIncrement - } else{ - Preconditions.checkSmaller(result.size(), 2, "at most two different axis are supported"); - final GnuplotAxis mirrorAxis = axis.mirrorAxis(); - handler.updateAxis(mirrorAxis); - result.add(type); + public void add(final AggregateHandler aggregateHandler) { + aggregateHandlers.add(aggregateHandler); + } + + public void updateAxisForHandlers() { + updateAxisForHandlers(GnuplotAxis.X1); + updateAxisForHandlers(GnuplotAxis.Y1); + } + + private void updateAxisForHandlers(final GnuplotAxis axis) { + final EnumSet result = EnumSet.noneOf(Type.class); + for (final AggregateHandler handler : aggregateHandlers) { + final Type type = handler.getAxisType(axis); + + if (result.isEmpty()) { + result.add(type); + } else { + final boolean containsType = result.contains(type); + if (containsType) { + // already has an axis of this type + // TODO merge axis definitions and use the greater values for: range, + // ticsIncrement + } else { + Preconditions.checkSmaller(result.size(), 2, "at most two different axis are supported"); + final GnuplotAxis mirrorAxis = axis.mirrorAxis(); + handler.updateAxis(mirrorAxis); + result.add(type); + } + } } - } - } - } - - public List getXAxisDefinitions(GnuplotSettings settings, Collection dataSeries) { - final List result = new ArrayList<>(); - for (AggregateHandler handler : aggregateHandlers) { - AxisSettings axis = handler.createXAxisSettings(settings, dataSeries); - result.add(axis); - } - return result; - } - - public List getYAxisDefinitions(GnuplotSettings settings, Collection dataSeries) { - List result = new ArrayList<>(); - for (AggregateHandler handler : aggregateHandlers) { - final AxisSettings axis = handler.createYAxisSettings(settings, dataSeries); - result.add(axis); - } - return result; - } - - - public AggregatorCollection createCustomAggregator(Path tmpDir, PlotSettings plotSettings, long fromEpochMilli, - long toEpochMilli) { - - final List aggregators = new ArrayList<>(); - - for (AggregateHandler handler : aggregateHandlers) { - final CustomAggregator aggregator = handler.createCustomAggregator(tmpDir, plotSettings, fromEpochMilli, toEpochMilli); - if (aggregator != null) { - aggregators.add(aggregator); - } } - return new AggregatorCollection(aggregators); - } - - public void addPlots(StringBuilder result, Collection dataSeries) { - - boolean first = true; - final List handlersInPlottingOrder = CollectionUtils.copySort(aggregateHandlers, PLOTTING_ORDER); - for (AggregateHandler handler : handlersInPlottingOrder) { - - for (DataSeries dataSerie : dataSeries) { - final Optional title = first ? Optional.of(dataSerie.getTitle()) : Optional.empty(); - - Optional aggregatedData = dataSerie.getAggregatedData().get(handler.getAggregateType()); - if(aggregatedData.isPresent()) { - handler.addPlot(result, aggregatedData.get(), dataSerie.getStyle(), title); + public List getXAxisDefinitions(final GnuplotSettings settings, + final Collection dataSeries) { + final List result = new ArrayList<>(); + for (final AggregateHandler handler : aggregateHandlers) { + final AxisSettings axis = handler.createXAxisSettings(settings, dataSeries); + result.add(axis); + } + return result; + } + + public List getYAxisDefinitions(final GnuplotSettings settings, + final Collection dataSeries) { + final List result = new ArrayList<>(); + for (final AggregateHandler handler : aggregateHandlers) { + final AxisSettings axis = handler.createYAxisSettings(settings, dataSeries); + result.add(axis); + } + return result; + } + + public AggregatorCollection createCustomAggregator(final Path tmpDir, final PlotSettings plotSettings, + final long fromEpochMilli, final long toEpochMilli) { + + final List aggregators = new ArrayList<>(); + + for (final AggregateHandler handler : aggregateHandlers) { + final CustomAggregator aggregator = handler.createCustomAggregator(tmpDir, plotSettings, fromEpochMilli, + toEpochMilli); + if (aggregator != null) { + aggregators.add(aggregator); + } + } + + return new AggregatorCollection(aggregators); + } + + public void addPlots(final StringBuilder result, final Collection dataSeries) { + + boolean first = true; + final List handlersInPlottingOrder = CollectionUtils.copySort(aggregateHandlers, + PLOTTING_ORDER); + for (final AggregateHandler handler : handlersInPlottingOrder) { + + for (final DataSeries dataSerie : dataSeries) { + final Optional title = first ? Optional.of(dataSerie.getTitle()) : Optional.empty(); + + final Optional aggregatedData = dataSerie.getAggregatedData() + .get(handler.getAggregateType()); + if (aggregatedData.isPresent()) { + handler.addPlot(result, aggregatedData.get(), dataSerie.getStyle(), title); + } + } + + first = false; } - } - - first = false; } - } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatedData.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatedData.java index 37ec819..77c8fb7 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatedData.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatedData.java @@ -3,19 +3,19 @@ package org.lucares.pdb.plot.api; import java.io.File; public class AggregatedData { - private final String label; - private final File dataFile; + private final String label; + private final File dataFile; - public AggregatedData(final String label, final File dataFile) { - this.label = label; - this.dataFile = dataFile; - } + public AggregatedData(final String label, final File dataFile) { + this.label = label; + this.dataFile = dataFile; + } - public String getLabel() { - return label; - } + public String getLabel() { + return label; + } - public File getDataFile() { - return dataFile; - } + public File getDataFile() { + return dataFile; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatedDataCollection.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatedDataCollection.java index 1fb342b..9cdbbf3 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatedDataCollection.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatedDataCollection.java @@ -4,19 +4,19 @@ import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Optional; -public class AggregatedDataCollection implements Iterable{ - private final LinkedHashMap aggregatedDatas = new LinkedHashMap<>(); +public class AggregatedDataCollection implements Iterable { + private final LinkedHashMap aggregatedDatas = new LinkedHashMap<>(); - public void put(Aggregate aggregate, AggregatedData aggregatedData) { - aggregatedDatas.put(aggregate, aggregatedData); - } + public void put(final Aggregate aggregate, final AggregatedData aggregatedData) { + aggregatedDatas.put(aggregate, aggregatedData); + } - @Override - public Iterator iterator() { - return aggregatedDatas.values().iterator(); - } + @Override + public Iterator iterator() { + return aggregatedDatas.values().iterator(); + } - public Optional get(Aggregate aggregateType) { - return Optional.ofNullable(aggregatedDatas.get(aggregateType)); - } + public Optional get(final Aggregate aggregateType) { + return Optional.ofNullable(aggregatedDatas.get(aggregateType)); + } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatedDataEntry.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatedDataEntry.java index 535ae9e..b4406d9 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatedDataEntry.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatedDataEntry.java @@ -1,18 +1,21 @@ package org.lucares.pdb.plot.api; public class AggregatedDataEntry { -private final double epochSeconds; -private final long value; -public AggregatedDataEntry(double epochSeconds, long value) { - super(); - this.epochSeconds = epochSeconds; - this.value = value; -} -public double getEpochSeconds() { - return epochSeconds; -} -public long getValue() { - return value; -} + private final double epochSeconds; + private final long value; + + public AggregatedDataEntry(final double epochSeconds, final long value) { + super(); + this.epochSeconds = epochSeconds; + this.value = value; + } + + public double getEpochSeconds() { + return epochSeconds; + } + + public long getValue() { + return value; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatorCollection.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatorCollection.java index cc3b816..52f347e 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatorCollection.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AggregatorCollection.java @@ -4,26 +4,26 @@ import java.io.IOException; import java.util.List; public class AggregatorCollection { - private final List aggregators; + private final List aggregators; - public AggregatorCollection(List aggregators) { - this.aggregators = aggregators; - } - - public void addValue(boolean valueIsInYRange, long epochMilli, long value) { - for (CustomAggregator aggregator : aggregators) { - aggregator.addValue(valueIsInYRange, epochMilli, value); + public AggregatorCollection(final List aggregators) { + this.aggregators = aggregators; } - } - public AggregatedDataCollection getAggregatedData() throws IOException { - - AggregatedDataCollection result = new AggregatedDataCollection(); - - for (CustomAggregator aggregator : aggregators) { - result.put(aggregator.getType(), aggregator.getAggregatedData()); + public void addValue(final boolean valueIsInYRange, final long epochMilli, final long value) { + for (final CustomAggregator aggregator : aggregators) { + aggregator.addValue(valueIsInYRange, epochMilli, value); + } + } + + public AggregatedDataCollection getAggregatedData() throws IOException { + + final AggregatedDataCollection result = new AggregatedDataCollection(); + + for (final CustomAggregator aggregator : aggregators) { + result.put(aggregator.getType(), aggregator.getAggregatedData()); + } + + return result; } - - return result; - } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/Appender.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/Appender.java index f412e3c..c857b1d 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/Appender.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/Appender.java @@ -3,15 +3,15 @@ package org.lucares.pdb.plot.api; import java.util.Locale; public interface Appender { - default void appendln(final StringBuilder builder, final String string) { - builder.append(string + "\n"); -} + default void appendln(final StringBuilder builder, final String string) { + builder.append(string + "\n"); + } -default void appendfln(final StringBuilder builder, final String format, final Object... args) { - builder.append(String.format(Locale.US,format + "\n", args)); -} + default void appendfln(final StringBuilder builder, final String format, final Object... args) { + builder.append(String.format(Locale.US, format + "\n", args)); + } -default void appendf(final StringBuilder builder, final String format, final Object... args) { - builder.append(String.format(Locale.US,format, args)); -} + default void appendf(final StringBuilder builder, final String format, final Object... args) { + builder.append(String.format(Locale.US, format, args)); + } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AxisScale.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AxisScale.java index fe5923b..daf2689 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AxisScale.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/AxisScale.java @@ -1,5 +1,5 @@ package org.lucares.pdb.plot.api; public enum AxisScale { - LINEAR, LOG10 + LINEAR, LOG10 } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/CumulativeDistributionCustomAggregator.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/CumulativeDistributionCustomAggregator.java index 6091f83..f00ff7f 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/CumulativeDistributionCustomAggregator.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/CumulativeDistributionCustomAggregator.java @@ -15,114 +15,114 @@ import org.lucares.collections.LongLongHashMap; public class CumulativeDistributionCustomAggregator implements CustomAggregator { - private final static int POINTS = 500; + private final static int POINTS = 500; - private static final class ToPercentiles implements LongLongConsumer { + private static final class ToPercentiles implements LongLongConsumer { - private long cumulativeCount = 0; + private long cumulativeCount = 0; - private long maxValue = 0; + private long maxValue = 0; - private final LinkedHashMap percentiles = new LinkedHashMap<>(POINTS); + private final LinkedHashMap percentiles = new LinkedHashMap<>(POINTS); - private final double stepSize; + private final double stepSize; - private double lastPercentile; - private double nextPercentile; + private double lastPercentile; + private double nextPercentile; - private final long totalValues; + private final long totalValues; - public ToPercentiles(final long totalValues) { - this.totalValues = totalValues; - stepSize = 100.0 / POINTS; - nextPercentile = stepSize; - } + public ToPercentiles(final long totalValues) { + this.totalValues = totalValues; + stepSize = 100.0 / POINTS; + nextPercentile = stepSize; + } - @Override - public void accept(final long duration, final long count) { - maxValue = duration; + @Override + public void accept(final long duration, final long count) { + maxValue = duration; - cumulativeCount += count; - final double newPercentile = cumulativeCount * 100.0 / totalValues; + cumulativeCount += count; + final double newPercentile = cumulativeCount * 100.0 / totalValues; - if (newPercentile >= nextPercentile) { - double currentPercentile = lastPercentile + stepSize; - while (currentPercentile <= newPercentile) { - percentiles.put(currentPercentile, duration); - currentPercentile += stepSize; - } - nextPercentile = currentPercentile; - lastPercentile = currentPercentile - stepSize; - } - } + if (newPercentile >= nextPercentile) { + double currentPercentile = lastPercentile + stepSize; + while (currentPercentile <= newPercentile) { + percentiles.put(currentPercentile, duration); + currentPercentile += stepSize; + } + nextPercentile = currentPercentile; + lastPercentile = currentPercentile - stepSize; + } + } - public long getMaxValue() { - return maxValue; - } + public long getMaxValue() { + return maxValue; + } - public LinkedHashMap getPercentiles() { - return percentiles; - } + public LinkedHashMap getPercentiles() { + return percentiles; + } - } + } - // the rather large initial capacity should prevent too many grow&re-hash phases - private final LongLongHashMap map = new LongLongHashMap(5_000, 0.75); + // the rather large initial capacity should prevent too many grow&re-hash phases + private final LongLongHashMap map = new LongLongHashMap(5_000, 0.75); - private long totalValues = 0; + private long totalValues = 0; - private final Path tmpDir; + private final Path tmpDir; - public CumulativeDistributionCustomAggregator(final Path tmpDir) { - this.tmpDir = tmpDir; - } + public CumulativeDistributionCustomAggregator(final Path tmpDir) { + this.tmpDir = tmpDir; + } - @Override - public void addValue(boolean valueIsInYRange, final long epochMilli, final long value) { - map.compute(value, 0, l -> l + 1); - totalValues++; - } + @Override + public void addValue(boolean valueIsInYRange, final long epochMilli, final long value) { + map.compute(value, 0, l -> l + 1); + totalValues++; + } - @Override - public AggregatedData getAggregatedData() throws IOException { - final char separator = ','; - final char newline = '\n'; + @Override + public AggregatedData getAggregatedData() throws IOException { + final char separator = ','; + final char newline = '\n'; - final ToPercentiles toPercentiles = new ToPercentiles(totalValues); - map.forEachOrdered(toPercentiles); + final ToPercentiles toPercentiles = new ToPercentiles(totalValues); + map.forEachOrdered(toPercentiles); - final File dataFile = File.createTempFile("data", ".dat", tmpDir.toFile()); - try (final Writer output = new BufferedWriter( - new OutputStreamWriter(new FileOutputStream(dataFile), StandardCharsets.US_ASCII));) { + final File dataFile = File.createTempFile("data", ".dat", tmpDir.toFile()); + try (final Writer output = new BufferedWriter( + new OutputStreamWriter(new FileOutputStream(dataFile), StandardCharsets.US_ASCII));) { - final StringBuilder data = new StringBuilder(); - if (map.size() > 0) { - // compute the percentiles - toPercentiles.getPercentiles().forEach((percentile, value) -> { + final StringBuilder data = new StringBuilder(); + if (map.size() > 0) { + // compute the percentiles + toPercentiles.getPercentiles().forEach((percentile, value) -> { - data.append(percentile); - data.append(separator); - data.append(value); - data.append(newline); - }); + data.append(percentile); + data.append(separator); + data.append(value); + data.append(newline); + }); - final long maxValue = toPercentiles.getMaxValue(); - data.append(100); - data.append(separator); - data.append(maxValue); - data.append(newline); - } - output.write(data.toString()); + final long maxValue = toPercentiles.getMaxValue(); + data.append(100); + data.append(separator); + data.append(maxValue); + data.append(newline); + } + output.write(data.toString()); - } + } - final String title = String.format("cumulative distribution"); - return new AggregatedData(title, dataFile); - } + final String title = String.format("cumulative distribution"); + return new AggregatedData(title, dataFile); + } - @Override - public Aggregate getType() { - return Aggregate.CUM_DISTRIBUTION; - } + @Override + public Aggregate getType() { + return Aggregate.CUM_DISTRIBUTION; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/CumulativeDistributionHandler.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/CumulativeDistributionHandler.java index 9d2a76d..20417dd 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/CumulativeDistributionHandler.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/CumulativeDistributionHandler.java @@ -14,74 +14,74 @@ import org.lucares.recommind.logs.AxisSettings.Type; public class CumulativeDistributionHandler extends AggregateHandler { - @Override - public CustomAggregator createCustomAggregator(final Path tmpDir, PlotSettings plotSettings, - final long fromEpochMilli, final long toEpochMilli) { - return new CumulativeDistributionCustomAggregator(tmpDir); - } - - public CumulativeDistributionHandler() { - } - - @Override - Type getAxisType(GnuplotAxis axis) { - switch (axis) { - case X1: - case X2: - return Type.Percent; - case Y1: - case Y2: - return Type.Duration; - default: - throw new IllegalArgumentException("Unexpected value: " + axis); + @Override + public CustomAggregator createCustomAggregator(final Path tmpDir, PlotSettings plotSettings, + final long fromEpochMilli, final long toEpochMilli) { + return new CumulativeDistributionCustomAggregator(tmpDir); } - } - @Override - public AxisSettings createYAxisSettings(GnuplotSettings settings, Collection dataSeries) { - AxisSettings result = AxisTime.createYAxis(settings, dataSeries); - result.setAxis(getyAxis()); - return result; - } - - @Override - public AxisSettings createXAxisSettings(GnuplotSettings settings, Collection dataSeries) { - AxisSettings result = new AxisSettings(); - result.setLabel("Cumulative Distribution"); - result.setType(Type.Percent); - result.setAxis(getxAxis()); - result.setFormat("%.0f%%"); - result.setTicIncrement(computeTicIncrement(settings)); - result.setFrom("0"); - result.setTo("100"); - return result; - } - - private int computeTicIncrement(GnuplotSettings settings) { - int widthByFontSize = settings.getWidth() / GnuplotSettings.TICKS_FONT_SIZE; - if (widthByFontSize < 50) { - return 20; - } else if (widthByFontSize < 75) { - return 10; - } else { - return 5; + public CumulativeDistributionHandler() { } - } - @Override - public void addPlot(StringBuilder result, AggregatedData aggregatedData, LineStyle lineStyle, - Optional title) { - appendfln(result, "'%s' using 1:2 %s with lines axes %s lw 2 %s, \\", // - aggregatedData.getDataFile().getAbsolutePath(), // - gnuplotTitle(title), // - gnuplotXYAxis(), // - lineStyle.darker()// - ); - } + @Override + Type getAxisType(GnuplotAxis axis) { + switch (axis) { + case X1: + case X2: + return Type.Percent; + case Y1: + case Y2: + return Type.Duration; + default: + throw new IllegalArgumentException("Unexpected value: " + axis); + } + } - @Override - public Aggregate getAggregateType() { - return Aggregate.CUM_DISTRIBUTION; - } + @Override + public AxisSettings createYAxisSettings(GnuplotSettings settings, Collection dataSeries) { + AxisSettings result = AxisTime.createYAxis(settings, dataSeries); + result.setAxis(getyAxis()); + return result; + } + + @Override + public AxisSettings createXAxisSettings(GnuplotSettings settings, Collection dataSeries) { + AxisSettings result = new AxisSettings(); + result.setLabel("Cumulative Distribution"); + result.setType(Type.Percent); + result.setAxis(getxAxis()); + result.setFormat("%.0f%%"); + result.setTicIncrement(computeTicIncrement(settings)); + result.setFrom("0"); + result.setTo("100"); + return result; + } + + private int computeTicIncrement(GnuplotSettings settings) { + int widthByFontSize = settings.getWidth() / GnuplotSettings.TICKS_FONT_SIZE; + if (widthByFontSize < 50) { + return 20; + } else if (widthByFontSize < 75) { + return 10; + } else { + return 5; + } + } + + @Override + public void addPlot(StringBuilder result, AggregatedData aggregatedData, LineStyle lineStyle, + Optional title) { + appendfln(result, "'%s' using 1:2 %s with lines axes %s lw 2 %s, \\", // + aggregatedData.getDataFile().getAbsolutePath(), // + gnuplotTitle(title), // + gnuplotXYAxis(), // + lineStyle.darker()// + ); + } + + @Override + public Aggregate getAggregateType() { + return Aggregate.CUM_DISTRIBUTION; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/CustomAggregator.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/CustomAggregator.java index c9e56b2..886b670 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/CustomAggregator.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/CustomAggregator.java @@ -4,9 +4,9 @@ import java.io.IOException; public interface CustomAggregator { - void addValue(boolean valueIsInYRange, long epochMilli, long value); + void addValue(boolean valueIsInYRange, long epochMilli, long value); - AggregatedData getAggregatedData() throws IOException; + AggregatedData getAggregatedData() throws IOException; - Aggregate getType(); + Aggregate getType(); } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/Limit.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/Limit.java index 5349484..bd104b9 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/Limit.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/Limit.java @@ -1,5 +1,5 @@ package org.lucares.pdb.plot.api; public enum Limit { - NO_LIMIT, MOST_VALUES, FEWEST_VALUES, MAX_VALUE, MIN_VALUE + NO_LIMIT, MOST_VALUES, FEWEST_VALUES, MAX_VALUE, MIN_VALUE } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ParallelRequestsAggregate.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ParallelRequestsAggregate.java index 4f8ea50..7ab4b15 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ParallelRequestsAggregate.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ParallelRequestsAggregate.java @@ -15,61 +15,61 @@ import org.lucares.recommind.logs.DataSeries; public class ParallelRequestsAggregate extends AggregateHandler { - @Override - Type getAxisType(GnuplotAxis axis) { - switch (axis) { - case X1: - case X2: - return Type.Time; - case Y1: - case Y2: - return Type.Number; - default: - throw new IllegalArgumentException("Unexpected value: " + axis); + @Override + Type getAxisType(GnuplotAxis axis) { + switch (axis) { + case X1: + case X2: + return Type.Time; + case Y1: + case Y2: + return Type.Number; + default: + throw new IllegalArgumentException("Unexpected value: " + axis); + } } - } - - @Override - public AxisSettings createYAxisSettings(GnuplotSettings settings, Collection dataSeries) { - final AxisSettings result = new AxisSettings(); - result.setLabel("Parallel Requests"); - result.setType(Type.Number); - result.setAxis(getyAxis()); - result.setTicsEnabled(true); - result.setFrom("0"); - return result; - } - - @Override - public AxisSettings createXAxisSettings(GnuplotSettings settings, Collection dataSeries) { - final AxisSettings result = AxisTime.createXAxis(settings); - result.setAxis(getxAxis()); - return result; - } - @Override - public void addPlot(StringBuilder result, AggregatedData aggregatedData, LineStyle lineStyle, - Optional title) { - appendfln(result, "'%s' using 1:2 %s with filledcurve axes %s lw 1 %s, \\", // - aggregatedData.getDataFile().getAbsolutePath(), // - gnuplotTitle(title), // - gnuplotXYAxis(), // - lineStyle.brighter().asGnuplotLineStyle()// - ); - } - - @Override - public CustomAggregator createCustomAggregator(final Path tmpDir, PlotSettings plotSettings, - final long fromEpochMilli, final long toEpochMilli) { - if ((toEpochMilli - fromEpochMilli) <= TimeUnit.HOURS.toMillis(50)) { - return new ParallelRequestsAggregator(tmpDir, fromEpochMilli, toEpochMilli); - } else { - return null; + @Override + public AxisSettings createYAxisSettings(GnuplotSettings settings, Collection dataSeries) { + final AxisSettings result = new AxisSettings(); + result.setLabel("Parallel Requests"); + result.setType(Type.Number); + result.setAxis(getyAxis()); + result.setTicsEnabled(true); + result.setFrom("0"); + return result; } - } - @Override - public Aggregate getAggregateType() { - return Aggregate.PARALLEL; - } + @Override + public AxisSettings createXAxisSettings(GnuplotSettings settings, Collection dataSeries) { + final AxisSettings result = AxisTime.createXAxis(settings); + result.setAxis(getxAxis()); + return result; + } + + @Override + public void addPlot(StringBuilder result, AggregatedData aggregatedData, LineStyle lineStyle, + Optional title) { + appendfln(result, "'%s' using 1:2 %s with filledcurve axes %s lw 1 %s, \\", // + aggregatedData.getDataFile().getAbsolutePath(), // + gnuplotTitle(title), // + gnuplotXYAxis(), // + lineStyle.brighter().asGnuplotLineStyle()// + ); + } + + @Override + public CustomAggregator createCustomAggregator(final Path tmpDir, PlotSettings plotSettings, + final long fromEpochMilli, final long toEpochMilli) { + if ((toEpochMilli - fromEpochMilli) <= TimeUnit.HOURS.toMillis(50)) { + return new ParallelRequestsAggregator(tmpDir, fromEpochMilli, toEpochMilli); + } else { + return null; + } + } + + @Override + public Aggregate getAggregateType() { + return Aggregate.PARALLEL; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ParallelRequestsAggregator.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ParallelRequestsAggregator.java index 2ce853c..47b8648 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ParallelRequestsAggregator.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ParallelRequestsAggregator.java @@ -15,88 +15,88 @@ import org.slf4j.LoggerFactory; public class ParallelRequestsAggregator implements CustomAggregator { - private static final char NEWLINE = '\n'; + private static final char NEWLINE = '\n'; - private static final char SEPARATOR = ','; + private static final char SEPARATOR = ','; - private static final Logger METRICS_LOGGER = LoggerFactory - .getLogger("org.lucares.metrics.aggregator.parallelRequests"); + private static final Logger METRICS_LOGGER = LoggerFactory + .getLogger("org.lucares.metrics.aggregator.parallelRequests"); - private final Path tmpDir; + private final Path tmpDir; - private final short[] increments; + private final short[] increments; - private final long fromEpochMilli; + private final long fromEpochMilli; - private final long toEpochMilli; + private final long toEpochMilli; - public ParallelRequestsAggregator(final Path tmpDir, final long fromEpochMilli, final long toEpochMilli) { - this.tmpDir = tmpDir; - this.fromEpochMilli = fromEpochMilli; - this.toEpochMilli = toEpochMilli; + public ParallelRequestsAggregator(final Path tmpDir, final long fromEpochMilli, final long toEpochMilli) { + this.tmpDir = tmpDir; + this.fromEpochMilli = fromEpochMilli; + this.toEpochMilli = toEpochMilli; - final int milliseconds = (int) (toEpochMilli - fromEpochMilli); - increments = new short[milliseconds+1]; - } + final int milliseconds = (int) (toEpochMilli - fromEpochMilli); + increments = new short[milliseconds + 1]; + } - @Override - public void addValue(boolean valueIsInYRange,final long epochMilli, final long value) { + @Override + public void addValue(boolean valueIsInYRange, final long epochMilli, final long value) { - final int endPos = (int) (epochMilli - fromEpochMilli); - increments[endPos]--; + final int endPos = (int) (epochMilli - fromEpochMilli); + increments[endPos]--; - final int startPos = Math.max(0, (int) (endPos - value)); - increments[startPos]++; + final int startPos = Math.max(0, (int) (endPos - value)); + increments[startPos]++; - } + } - @Override - public AggregatedData getAggregatedData() throws IOException { + @Override + public AggregatedData getAggregatedData() throws IOException { - final long start = System.nanoTime(); + final long start = System.nanoTime(); - final File dataFile = File.createTempFile("data", ".dat", tmpDir.toFile()); - try (final Writer output = new BufferedWriter( - new OutputStreamWriter(new FileOutputStream(dataFile), StandardCharsets.US_ASCII));) { + final File dataFile = File.createTempFile("data", ".dat", tmpDir.toFile()); + try (final Writer output = new BufferedWriter( + new OutputStreamWriter(new FileOutputStream(dataFile), StandardCharsets.US_ASCII));) { - final StringBuilder data = new StringBuilder(); + final StringBuilder data = new StringBuilder(); - // first and last value should be 0, or gnuplot will draw a diagonal line - appendTimeAndValue(data, fromEpochMilli, 0); + // first and last value should be 0, or gnuplot will draw a diagonal line + appendTimeAndValue(data, fromEpochMilli, 0); - int value = 0; - for (int i = 0; i < increments.length - 1; i++) { - final int increment = increments[i]; - final int nextIncrement = increments[i + 1]; - if (increment != 0 || nextIncrement != 0) { - value += increment; - appendTimeAndValue(data, fromEpochMilli + i, value); - } - } + int value = 0; + for (int i = 0; i < increments.length - 1; i++) { + final int increment = increments[i]; + final int nextIncrement = increments[i + 1]; + if (increment != 0 || nextIncrement != 0) { + value += increment; + appendTimeAndValue(data, fromEpochMilli + i, value); + } + } - // first and last value should be 0, or gnuplot will draw a diagonal line - appendTimeAndValue(data, toEpochMilli, 0); + // first and last value should be 0, or gnuplot will draw a diagonal line + appendTimeAndValue(data, toEpochMilli, 0); - output.write(data.toString()); + output.write(data.toString()); - } + } - final String title = String.format("parallelRequests"); - METRICS_LOGGER.debug("wrote parallelRequests csv in: {}ms file={}", (System.nanoTime() - start) / 1_000_000.0, - dataFile); - return new AggregatedData(title, dataFile); - } + final String title = String.format("parallelRequests"); + METRICS_LOGGER.debug("wrote parallelRequests csv in: {}ms file={}", (System.nanoTime() - start) / 1_000_000.0, + dataFile); + return new AggregatedData(title, dataFile); + } - private void appendTimeAndValue(final StringBuilder builder, final long timeEpochMilli, final int value) { - builder.append(String.format(Locale.US, "%.3f", timeEpochMilli / 1000.0)); - builder.append(SEPARATOR); - builder.append(value); - builder.append(NEWLINE); - } + private void appendTimeAndValue(final StringBuilder builder, final long timeEpochMilli, final int value) { + builder.append(String.format(Locale.US, "%.3f", timeEpochMilli / 1000.0)); + builder.append(SEPARATOR); + builder.append(value); + builder.append(NEWLINE); + } - @Override - public Aggregate getType() { - return Aggregate.PARALLEL; - } + @Override + public Aggregate getType() { + return Aggregate.PARALLEL; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/PlotSettings.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/PlotSettings.java index ef5620a..764ce52 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/PlotSettings.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/PlotSettings.java @@ -12,186 +12,186 @@ import org.lucares.utils.Preconditions; public class PlotSettings { - private static final DateTimeFormatter DATE_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); + private static final DateTimeFormatter DATE_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); - private String query; + private String query; - private int height; + private int height; - private int width; + private int width; - private int thumbnailMaxWidth = 0; + private int thumbnailMaxWidth = 0; - private int thumbnailMaxHeight = 0; + private int thumbnailMaxHeight = 0; - private List groupBy; + private List groupBy; - private Limit limitBy; + private Limit limitBy; - private int limit; + private int limit; - private String dateRangeAsString; + private String dateRangeAsString; - private AxisScale yAxisScale; + private AxisScale yAxisScale; - private AggregateHandlerCollection aggregates; + private AggregateHandlerCollection aggregates; - private int yRangeMin; - private int yRangeMax; - private TimeRangeUnitInternal yRangeUnit = TimeRangeUnitInternal.AUTOMATIC; + private int yRangeMin; + private int yRangeMax; + private TimeRangeUnitInternal yRangeUnit = TimeRangeUnitInternal.AUTOMATIC; - private boolean keyOutside; + private boolean keyOutside; - private boolean generateThumbnail; + private boolean generateThumbnail; - public String getQuery() { - return query; - } + public String getQuery() { + return query; + } - public void setQuery(final String query) { - this.query = query; - } + public void setQuery(final String query) { + this.query = query; + } - public int getHeight() { - return height; - } + public int getHeight() { + return height; + } - public void setHeight(final int height) { - this.height = height; - } + public void setHeight(final int height) { + this.height = height; + } - public int getWidth() { - return width; - } + public int getWidth() { + return width; + } - public void setWidth(final int width) { - this.width = width; - } + public void setWidth(final int width) { + this.width = width; + } - public int getThumbnailMaxWidth() { - return thumbnailMaxWidth; - } + public int getThumbnailMaxWidth() { + return thumbnailMaxWidth; + } - public void setThumbnailMaxWidth(final int thumbnailMaxWidth) { - this.thumbnailMaxWidth = thumbnailMaxWidth; - } + public void setThumbnailMaxWidth(final int thumbnailMaxWidth) { + this.thumbnailMaxWidth = thumbnailMaxWidth; + } - public int getThumbnailMaxHeight() { - return thumbnailMaxHeight; - } + public int getThumbnailMaxHeight() { + return thumbnailMaxHeight; + } - public void setThumbnailMaxHeight(final int thumbnailMaxHeight) { - this.thumbnailMaxHeight = thumbnailMaxHeight; - } + public void setThumbnailMaxHeight(final int thumbnailMaxHeight) { + this.thumbnailMaxHeight = thumbnailMaxHeight; + } - public List getGroupBy() { - return groupBy; - } + public List getGroupBy() { + return groupBy; + } - public void setGroupBy(final List groupBy) { - this.groupBy = groupBy; - } + public void setGroupBy(final List groupBy) { + this.groupBy = groupBy; + } - public Limit getLimitBy() { - return limitBy; - } + public Limit getLimitBy() { + return limitBy; + } - public void setLimitBy(final Limit limitBy) { - this.limitBy = limitBy; - } + public void setLimitBy(final Limit limitBy) { + this.limitBy = limitBy; + } - public int getLimit() { - return limit; - } + public int getLimit() { + return limit; + } - public void setLimit(final int limit) { - this.limit = limit; - } + public void setLimit(final int limit) { + this.limit = limit; + } - public String getDateRange() { - return dateRangeAsString; - } + public String getDateRange() { + return dateRangeAsString; + } - public void setDateRange(final String dateRangeAsString) { - this.dateRangeAsString = dateRangeAsString; - } + public void setDateRange(final String dateRangeAsString) { + this.dateRangeAsString = dateRangeAsString; + } - public DateTimeRange dateRange() { + public DateTimeRange dateRange() { - final String[] startEnd = dateRangeAsString.split(Pattern.quote(" - ")); - Preconditions.checkEqual(startEnd.length, 2, "invalid date range: ''{0}''", dateRangeAsString); + final String[] startEnd = dateRangeAsString.split(Pattern.quote(" - ")); + Preconditions.checkEqual(startEnd.length, 2, "invalid date range: ''{0}''", dateRangeAsString); - final OffsetDateTime startDate = LocalDateTime.parse(startEnd[0], DATE_FORMAT).atOffset(ZoneOffset.UTC); - final OffsetDateTime endDate = LocalDateTime.parse(startEnd[1], DATE_FORMAT).atOffset(ZoneOffset.UTC); + final OffsetDateTime startDate = LocalDateTime.parse(startEnd[0], DATE_FORMAT).atOffset(ZoneOffset.UTC); + final OffsetDateTime endDate = LocalDateTime.parse(startEnd[1], DATE_FORMAT).atOffset(ZoneOffset.UTC); - return new DateTimeRange(startDate, endDate); + return new DateTimeRange(startDate, endDate); - } + } - public void setYAxisScale(final AxisScale axisScale) { - this.yAxisScale = axisScale; - } + public void setYAxisScale(final AxisScale axisScale) { + this.yAxisScale = axisScale; + } - public AxisScale getYAxisScale() { - return yAxisScale; - } + public AxisScale getYAxisScale() { + return yAxisScale; + } - @Override - public String toString() { - return "PlotSettings [query=" + query + ", height=" + height + ", width=" + width + ", thumbnailMaxWidth=" - + thumbnailMaxWidth + ", thumbnailMaxHeight=" + thumbnailMaxHeight + ", groupBy=" + groupBy - + ", limitBy=" + limitBy + ", limit=" + limit + ", dateRangeAsString=" + dateRangeAsString - + ", yAxisScale=" + yAxisScale + ", aggregates=" + aggregates + ", yRangeMin=" + yRangeMin - + ", yRangeMax=" + yRangeMax + ", yRangeUnit=" + yRangeUnit + ", keyOutside=" + keyOutside - + ", generateThumbnail=" + generateThumbnail + "]"; - } + @Override + public String toString() { + return "PlotSettings [query=" + query + ", height=" + height + ", width=" + width + ", thumbnailMaxWidth=" + + thumbnailMaxWidth + ", thumbnailMaxHeight=" + thumbnailMaxHeight + ", groupBy=" + groupBy + + ", limitBy=" + limitBy + ", limit=" + limit + ", dateRangeAsString=" + dateRangeAsString + + ", yAxisScale=" + yAxisScale + ", aggregates=" + aggregates + ", yRangeMin=" + yRangeMin + + ", yRangeMax=" + yRangeMax + ", yRangeUnit=" + yRangeUnit + ", keyOutside=" + keyOutside + + ", generateThumbnail=" + generateThumbnail + "]"; + } - public void setAggregates(final AggregateHandlerCollection aggregates) { - this.aggregates = aggregates; - } + public void setAggregates(final AggregateHandlerCollection aggregates) { + this.aggregates = aggregates; + } - public AggregateHandlerCollection getAggregates() { - return aggregates; - } + public AggregateHandlerCollection getAggregates() { + return aggregates; + } - public void setKeyOutside(final boolean keyOutside) { - this.keyOutside = keyOutside; - } + public void setKeyOutside(final boolean keyOutside) { + this.keyOutside = keyOutside; + } - public boolean isKeyOutside() { - return keyOutside; - } + public boolean isKeyOutside() { + return keyOutside; + } - public void setGenerateThumbnail(final boolean generateThumbnail) { - this.generateThumbnail = generateThumbnail; - } + public void setGenerateThumbnail(final boolean generateThumbnail) { + this.generateThumbnail = generateThumbnail; + } - public boolean isGenerateThumbnail() { - return generateThumbnail; - } + public boolean isGenerateThumbnail() { + return generateThumbnail; + } - public int getYRangeMin() { - return yRangeMin; - } + public int getYRangeMin() { + return yRangeMin; + } - public void setYRangeMin(final int yRangeMin) { - this.yRangeMin = yRangeMin; - } + public void setYRangeMin(final int yRangeMin) { + this.yRangeMin = yRangeMin; + } - public int getYRangeMax() { - return yRangeMax; - } + public int getYRangeMax() { + return yRangeMax; + } - public void setYRangeMax(final int yRangeMax) { - this.yRangeMax = yRangeMax; - } + public void setYRangeMax(final int yRangeMax) { + this.yRangeMax = yRangeMax; + } - public TimeRangeUnitInternal getYRangeUnit() { - return yRangeUnit; - } + public TimeRangeUnitInternal getYRangeUnit() { + return yRangeUnit; + } - public void setYRangeUnit(final TimeRangeUnitInternal yRangeUnit) { - this.yRangeUnit = yRangeUnit; - } + public void setYRangeUnit(final TimeRangeUnitInternal yRangeUnit) { + this.yRangeUnit = yRangeUnit; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ScatterAggregateHandler.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ScatterAggregateHandler.java index 9328a2e..6126a58 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ScatterAggregateHandler.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ScatterAggregateHandler.java @@ -15,56 +15,56 @@ import org.lucares.recommind.logs.AxisSettings.Type; public class ScatterAggregateHandler extends AggregateHandler { - @Override - Type getAxisType(GnuplotAxis axis) { - switch (axis) { - case X1: - case X2: - return Type.Time; - case Y1: - case Y2: - return Type.Duration; - default: - throw new IllegalArgumentException("Unexpected value: " + axis); + @Override + Type getAxisType(GnuplotAxis axis) { + switch (axis) { + case X1: + case X2: + return Type.Time; + case Y1: + case Y2: + return Type.Duration; + default: + throw new IllegalArgumentException("Unexpected value: " + axis); + } } - } - - @Override - public AxisSettings createYAxisSettings(GnuplotSettings settings, Collection dataSeries) { - final AxisSettings result = AxisTime.createYAxis(settings, dataSeries); - result.setAxis(getyAxis()); - return result; - } - @Override - public AxisSettings createXAxisSettings(GnuplotSettings settings, Collection dataSeries) { - final AxisSettings result = AxisTime.createXAxis(settings); - result.setAxis(getxAxis()); - return result; - } + @Override + public AxisSettings createYAxisSettings(GnuplotSettings settings, Collection dataSeries) { + final AxisSettings result = AxisTime.createYAxis(settings, dataSeries); + result.setAxis(getyAxis()); + return result; + } - @Override - public void addPlot(StringBuilder result, AggregatedData aggregatedData, LineStyle lineStyle, - Optional title) { + @Override + public AxisSettings createXAxisSettings(GnuplotSettings settings, Collection dataSeries) { + final AxisSettings result = AxisTime.createXAxis(settings); + result.setAxis(getxAxis()); + return result; + } - appendfln(result, "'%s' using 1:2 %s with %s axes %s %s, \\", // - aggregatedData.getDataFile(), // - gnuplotTitle(title), // - GnuplotLineType.Points, // - gnuplotXYAxis(),// - lineStyle// - ); - } + @Override + public void addPlot(StringBuilder result, AggregatedData aggregatedData, LineStyle lineStyle, + Optional title) { - @Override - public CustomAggregator createCustomAggregator(Path tmpDir, PlotSettings plotSettings, long fromEpochMilli, - long toEpochMilli) { + appendfln(result, "'%s' using 1:2 %s with %s axes %s %s, \\", // + aggregatedData.getDataFile(), // + gnuplotTitle(title), // + GnuplotLineType.Points, // + gnuplotXYAxis(), // + lineStyle// + ); + } - return new ScatterAggregator(tmpDir, plotSettings, fromEpochMilli, toEpochMilli); - } + @Override + public CustomAggregator createCustomAggregator(Path tmpDir, PlotSettings plotSettings, long fromEpochMilli, + long toEpochMilli) { - @Override - public Aggregate getAggregateType() { - return Aggregate.SCATTER; - } + return new ScatterAggregator(tmpDir, plotSettings, fromEpochMilli, toEpochMilli); + } + + @Override + public Aggregate getAggregateType() { + return Aggregate.SCATTER; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ScatterAggregator.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ScatterAggregator.java index 5a499db..a4c4cbd 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ScatterAggregator.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/ScatterAggregator.java @@ -18,83 +18,82 @@ import org.lucares.recommind.logs.LongUtils; public class ScatterAggregator implements CustomAggregator { - private final Sparse2DLongArray matrix2d = new Sparse2DLongArray(); + private final Sparse2DLongArray matrix2d = new Sparse2DLongArray(); - private final boolean useMillis; - private final long plotAreaWidthInPx; - private final long plotAreaHeightInPx; - private final long epochMillisPerPixel; + private final boolean useMillis; + private final long plotAreaWidthInPx; + private final long plotAreaHeightInPx; + private final long epochMillisPerPixel; - private final long minValue; - private final long maxValue; - private final long durationMillisPerPixel; + private final long minValue; + private final long maxValue; + private final long durationMillisPerPixel; - private Path tmpDir; + private Path tmpDir; - public ScatterAggregator(Path tmpDir, PlotSettings plotSettings, long fromEpochMilli, long toEpochMilli) { + public ScatterAggregator(Path tmpDir, PlotSettings plotSettings, long fromEpochMilli, long toEpochMilli) { - this.tmpDir = tmpDir; - useMillis = (toEpochMilli - fromEpochMilli) < TimeUnit.MINUTES.toMillis(5); - plotAreaWidthInPx = plotSettings.getWidth() - GnuplotSettings.GNUPLOT_LEFT_RIGHT_MARGIN; - plotAreaHeightInPx = plotSettings.getHeight() - GnuplotSettings.GNUPLOT_TOP_BOTTOM_MARGIN; - epochMillisPerPixel = Math.max(1, (toEpochMilli - fromEpochMilli) / plotAreaWidthInPx); + this.tmpDir = tmpDir; + useMillis = (toEpochMilli - fromEpochMilli) < TimeUnit.MINUTES.toMillis(5); + plotAreaWidthInPx = plotSettings.getWidth() - GnuplotSettings.GNUPLOT_LEFT_RIGHT_MARGIN; + plotAreaHeightInPx = plotSettings.getHeight() - GnuplotSettings.GNUPLOT_TOP_BOTTOM_MARGIN; + epochMillisPerPixel = Math.max(1, (toEpochMilli - fromEpochMilli) / plotAreaWidthInPx); - minValue = plotSettings.getYRangeUnit() == TimeRangeUnitInternal.AUTOMATIC ? 0 - : plotSettings.getYRangeUnit().toMilliSeconds(plotSettings.getYRangeMin()); - maxValue = plotSettings.getYRangeUnit() == TimeRangeUnitInternal.AUTOMATIC ? Long.MAX_VALUE - : plotSettings.getYRangeUnit().toMilliSeconds(plotSettings.getYRangeMax()); - durationMillisPerPixel = plotSettings.getYAxisScale() == AxisScale.LINEAR - ? Math.max(1, (maxValue - minValue) / plotAreaHeightInPx) - : 1; - } - - @Override - public void addValue(boolean valueIsInYRange, long epochMilli, long value) { - final long roundedEpochMilli = epochMilli - epochMilli % epochMillisPerPixel; - final long roundedValue = value - value % durationMillisPerPixel; - matrix2d.put(roundedEpochMilli, roundedValue, 1); - } - - @Override - public AggregatedData getAggregatedData() throws IOException { - - final File dataFile = File.createTempFile("data", ".dat", tmpDir.toFile()); - final int separator = ','; - final int newline = '\n'; - - long[] actualValuesWritten = new long[1]; - final StringBuilder formattedDateBuilder = new StringBuilder(); - try ( - final LambdaFriendlyWriter output = new LambdaFriendlyWriter( - new BufferedWriter(new OutputStreamWriter(new FileOutputStream(dataFile), StandardCharsets.ISO_8859_1))); - final Formatter formatter = new Formatter(formattedDateBuilder);) { - - matrix2d.forEach((epochMilli, value, __) -> { - - final String stringValue = LongUtils.longToString(value); - final String formattedDate; - - if (useMillis) { - formattedDateBuilder.delete(0, formattedDateBuilder.length()); - formatter.format(Locale.US, "%.3f", epochMilli / 1000.0); - formattedDate = formattedDateBuilder.toString(); - } else { - formattedDate = String.valueOf(epochMilli / 1000); - } - - output.write(formattedDate); - output.write(separator); - output.write(stringValue); - output.write(newline); - actualValuesWritten[0]++; - }); + minValue = plotSettings.getYRangeUnit() == TimeRangeUnitInternal.AUTOMATIC ? 0 + : plotSettings.getYRangeUnit().toMilliSeconds(plotSettings.getYRangeMin()); + maxValue = plotSettings.getYRangeUnit() == TimeRangeUnitInternal.AUTOMATIC ? Long.MAX_VALUE + : plotSettings.getYRangeUnit().toMilliSeconds(plotSettings.getYRangeMax()); + durationMillisPerPixel = plotSettings.getYAxisScale() == AxisScale.LINEAR + ? Math.max(1, (maxValue - minValue) / plotAreaHeightInPx) + : 1; } - return new AggregatedData("scatter", dataFile); - } + @Override + public void addValue(boolean valueIsInYRange, long epochMilli, long value) { + final long roundedEpochMilli = epochMilli - epochMilli % epochMillisPerPixel; + final long roundedValue = value - value % durationMillisPerPixel; + matrix2d.put(roundedEpochMilli, roundedValue, 1); + } - @Override - public Aggregate getType() { - return Aggregate.SCATTER; - } + @Override + public AggregatedData getAggregatedData() throws IOException { + + final File dataFile = File.createTempFile("data", ".dat", tmpDir.toFile()); + final int separator = ','; + final int newline = '\n'; + + long[] actualValuesWritten = new long[1]; + final StringBuilder formattedDateBuilder = new StringBuilder(); + try (final LambdaFriendlyWriter output = new LambdaFriendlyWriter(new BufferedWriter( + new OutputStreamWriter(new FileOutputStream(dataFile), StandardCharsets.ISO_8859_1))); + final Formatter formatter = new Formatter(formattedDateBuilder);) { + + matrix2d.forEach((epochMilli, value, __) -> { + + final String stringValue = LongUtils.longToString(value); + final String formattedDate; + + if (useMillis) { + formattedDateBuilder.delete(0, formattedDateBuilder.length()); + formatter.format(Locale.US, "%.3f", epochMilli / 1000.0); + formattedDate = formattedDateBuilder.toString(); + } else { + formattedDate = String.valueOf(epochMilli / 1000); + } + + output.write(formattedDate); + output.write(separator); + output.write(stringValue); + output.write(newline); + actualValuesWritten[0]++; + }); + } + + return new AggregatedData("scatter", dataFile); + } + + @Override + public Aggregate getType() { + return Aggregate.SCATTER; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/TimeRangeUnitInternal.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/TimeRangeUnitInternal.java index 362736b..3f4ba61 100644 --- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/TimeRangeUnitInternal.java +++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/TimeRangeUnitInternal.java @@ -1,24 +1,24 @@ package org.lucares.pdb.plot.api; public enum TimeRangeUnitInternal { - AUTOMATIC, MILLISECONDS, SECONDS, MINUTES, HOURS, DAYS; + AUTOMATIC, MILLISECONDS, SECONDS, MINUTES, HOURS, DAYS; - public int toMilliSeconds(final int value) { + public int toMilliSeconds(final int value) { - switch (this) { - case MILLISECONDS: - return value; - case SECONDS: - return value * 1000; - case MINUTES: - return value * 60 * 1000; - case HOURS: - return value * 60 * 60 * 1000; - case DAYS: - return value * 24 * 60 * 60 * 1000; - case AUTOMATIC: - return Integer.MAX_VALUE; - } - return Integer.MAX_VALUE; - } + switch (this) { + case MILLISECONDS: + return value; + case SECONDS: + return value * 1000; + case MINUTES: + return value * 60 * 1000; + case HOURS: + return value * 60 * 60 * 1000; + case DAYS: + return value * 24 * 60 * 60 * 1000; + case AUTOMATIC: + return Integer.MAX_VALUE; + } + return Integer.MAX_VALUE; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/AxisSettings.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/AxisSettings.java index a175cf7..99139ef 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/AxisSettings.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/AxisSettings.java @@ -9,175 +9,173 @@ import com.fasterxml.jackson.databind.ObjectMapper; public class AxisSettings { - public enum Type { - Number, Time, Duration, Percent - } - - private String format = ""; - - private String label = ""; - - private int rotateLabel = 0; - - private String from; - private String to; - - private Type type = Type.Number; - - private GnuplotAxis axis = GnuplotAxis.X1; - - private double ticIncrement; - - private boolean ticsEnabled; - - private boolean logscale; - - private List ticsLabels; - - public String getFormat() { - return format; - } - - public void setFormat(String format) { - this.format = format; - } - - public String getLabel() { - return label; - } - - public void setLabel(String label) { - this.label = label; - } - - public int getRotateXAxisLabel() { - return rotateLabel; - } - - public void setRotateLabel(int rotateLabel) { - this.rotateLabel = rotateLabel; - } - - public String getFrom() { - return from; - } - - public void setFrom(String from) { - this.from = from; - } - - public String getTo() { - return to; - } - - public void setTo(String to) { - this.to = to; - } - - public Type getType() { - return type; - } - - public void setType(Type type) { - this.type = type; - } - - public GnuplotAxis getAxis() { - return axis; - } - - public void setAxis(GnuplotAxis axis) { - this.axis = axis; - } - - public void setTicIncrement(double ticIncrement) { - this.ticIncrement = ticIncrement; - } - - public double getTicIncrement() { - return ticIncrement; - } - - public void setTicsEnabled(boolean ticsEnabled) { - this.ticsEnabled = ticsEnabled; - } - - public boolean isTicsEnabled() { - return ticsEnabled; - } - - public void setLogscale(boolean logscale) { - this.logscale = logscale; - } - - public boolean isLogscale() { - return logscale; - } - - public void setTics(List ticsLabels) { - this.ticsLabels = ticsLabels; - } - - public List getTics() { - return ticsLabels; - } - - - public String toGnuplotDefinition(boolean renderLabels) { - StringBuilder result = new StringBuilder(); - if (type == Type.Time) { - appendfln(result, "set %sdata time", axis); + public enum Type { + Number, Time, Duration, Percent } - if (renderLabels) { - - if (ticIncrement != 0) { - appendfln(result, "set %stics %f nomirror", axis, ticIncrement); - } - else if (ticsLabels != null && ticsLabels.size() > 0) { - appendfln(result,"set %stics(%s) nomirror", axis, String.join(", ", ticsLabels)); - }else if(ticsEnabled) { - appendfln(result, "set %stics nomirror", axis); - } - - if (StringUtils.isNotBlank(format)) { - appendfln(result, "set format %s \"%s\"", axis, format); - } - - if (rotateLabel != 0) { - appendfln(result, "set %stics nomirror rotate by %d", axis, rotateLabel); - } - if (StringUtils.isNotBlank(label)) { - appendfln(result, "set %slabel \"%s\"", axis, label); - } - }else { - - appendfln(result, "set format %s \"\"", axis); - appendfln(result, "set %slabel \"\"", axis); + private String format = ""; + + private String label = ""; + + private int rotateLabel = 0; + + private String from; + private String to; + + private Type type = Type.Number; + + private GnuplotAxis axis = GnuplotAxis.X1; + + private double ticIncrement; + + private boolean ticsEnabled; + + private boolean logscale; + + private List ticsLabels; + + public String getFormat() { + return format; } - if (!StringUtils.isAllBlank(from, to)) { - final String f = StringUtils.isEmpty(from) ? "" : "\""+from+"\""; - final String t = StringUtils.isEmpty(to) ? "" : "\""+to+"\""; - appendfln(result, "set %srange [%s:%s]", axis, f, t); + public void setFormat(String format) { + this.format = format; } - if (logscale) { - appendfln(result, "set logscale %s", axis); - } - return result.toString(); - } - private void appendfln(final StringBuilder builder, final String format, final Object... args) { - builder.append(String.format(format + "\n", args)); - } - - @Override - public String toString() { - ObjectMapper mapper = new ObjectMapper(); - try { - return mapper.writeValueAsString(this); - } catch (JsonProcessingException e) { - return e.getMessage(); + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + public int getRotateXAxisLabel() { + return rotateLabel; + } + + public void setRotateLabel(int rotateLabel) { + this.rotateLabel = rotateLabel; + } + + public String getFrom() { + return from; + } + + public void setFrom(String from) { + this.from = from; + } + + public String getTo() { + return to; + } + + public void setTo(String to) { + this.to = to; + } + + public Type getType() { + return type; + } + + public void setType(Type type) { + this.type = type; + } + + public GnuplotAxis getAxis() { + return axis; + } + + public void setAxis(GnuplotAxis axis) { + this.axis = axis; + } + + public void setTicIncrement(double ticIncrement) { + this.ticIncrement = ticIncrement; + } + + public double getTicIncrement() { + return ticIncrement; + } + + public void setTicsEnabled(boolean ticsEnabled) { + this.ticsEnabled = ticsEnabled; + } + + public boolean isTicsEnabled() { + return ticsEnabled; + } + + public void setLogscale(boolean logscale) { + this.logscale = logscale; + } + + public boolean isLogscale() { + return logscale; + } + + public void setTics(List ticsLabels) { + this.ticsLabels = ticsLabels; + } + + public List getTics() { + return ticsLabels; + } + + public String toGnuplotDefinition(boolean renderLabels) { + StringBuilder result = new StringBuilder(); + if (type == Type.Time) { + appendfln(result, "set %sdata time", axis); + } + + if (renderLabels) { + + if (ticIncrement != 0) { + appendfln(result, "set %stics %f nomirror", axis, ticIncrement); + } else if (ticsLabels != null && ticsLabels.size() > 0) { + appendfln(result, "set %stics(%s) nomirror", axis, String.join(", ", ticsLabels)); + } else if (ticsEnabled) { + appendfln(result, "set %stics nomirror", axis); + } + + if (StringUtils.isNotBlank(format)) { + appendfln(result, "set format %s \"%s\"", axis, format); + } + + if (rotateLabel != 0) { + appendfln(result, "set %stics nomirror rotate by %d", axis, rotateLabel); + } + if (StringUtils.isNotBlank(label)) { + appendfln(result, "set %slabel \"%s\"", axis, label); + } + } else { + + appendfln(result, "set format %s \"\"", axis); + appendfln(result, "set %slabel \"\"", axis); + } + + if (!StringUtils.isAllBlank(from, to)) { + final String f = StringUtils.isEmpty(from) ? "" : "\"" + from + "\""; + final String t = StringUtils.isEmpty(to) ? "" : "\"" + to + "\""; + appendfln(result, "set %srange [%s:%s]", axis, f, t); + } + if (logscale) { + appendfln(result, "set logscale %s", axis); + } + return result.toString(); + } + + private void appendfln(final StringBuilder builder, final String format, final Object... args) { + builder.append(String.format(format + "\n", args)); + } + + @Override + public String toString() { + ObjectMapper mapper = new ObjectMapper(); + try { + return mapper.writeValueAsString(this); + } catch (JsonProcessingException e) { + return e.getMessage(); + } } - } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/AxisTime.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/AxisTime.java index fde734b..f5cb73b 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/AxisTime.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/AxisTime.java @@ -11,116 +11,89 @@ import org.lucares.pdb.plot.api.AxisScale; import org.lucares.recommind.logs.AxisSettings.Type; public class AxisTime { - public static AxisSettings createXAxis(GnuplotSettings settings) { - AxisSettings result = new AxisSettings(); - - final OffsetDateTime minDate = settings.getDateTimeRange().getStart(); - final OffsetDateTime maxDate = settings.getDateTimeRange().getEnd(); - final String formatX; - if (minDate.until(maxDate, ChronoUnit.WEEKS) > 1) { - formatX = "%Y-%m-%d"; - } else if (minDate.until(maxDate, ChronoUnit.SECONDS) > 30) { - formatX = "%Y-%m-%d\\n%H:%M:%S"; - } else { - formatX = "%Y-%m-%d\\n%H:%M:%.3S"; - } - final String formattedMinDate = String.valueOf(minDate.toEpochSecond()); - final String formattedMaxDate = String.valueOf(maxDate.toEpochSecond()); + public static AxisSettings createXAxis(GnuplotSettings settings) { + AxisSettings result = new AxisSettings(); - result.setLabel("Time"); - result.setType(Type.Time); - result.setTicsEnabled(true); - result.setFormat(formatX); - result.setFrom(formattedMinDate); - result.setTo(formattedMaxDate); - result.setTicIncrement(computeTimeTicIncrement(settings.getWidth(), settings.getDateTimeRange())); - return result; - } - - public static AxisSettings createYAxis(GnuplotSettings settings, Collection dataSeries) { - AxisSettings result = new AxisSettings(); - result.setLabel("Duration"); - result.setType(Type.Duration); - result.setAxis(GnuplotAxis.Y1); - result.setTicsEnabled(true); + final OffsetDateTime minDate = settings.getDateTimeRange().getStart(); + final OffsetDateTime maxDate = settings.getDateTimeRange().getEnd(); + final String formatX; + if (minDate.until(maxDate, ChronoUnit.WEEKS) > 1) { + formatX = "%Y-%m-%d"; + } else if (minDate.until(maxDate, ChronoUnit.SECONDS) > 30) { + formatX = "%Y-%m-%d\\n%H:%M:%S"; + } else { + formatX = "%Y-%m-%d\\n%H:%M:%.3S"; + } + final String formattedMinDate = String.valueOf(minDate.toEpochSecond()); + final String formattedMaxDate = String.valueOf(maxDate.toEpochSecond()); - final int graphOffset = settings.getYAxisScale() == AxisScale.LINEAR ? 0 : 1; - if (settings.hasYRange()) { - final int min = Math.max(settings.getYRangeMin(), graphOffset); - final int max = settings.getYRangeMax(); - result.setFrom(String.valueOf(min)); - result.setTo(String.valueOf(max)); - } else { - result.setFrom(String.valueOf(graphOffset)); + result.setLabel("Time"); + result.setType(Type.Time); + result.setTicsEnabled(true); + result.setFormat(formatX); + result.setFrom(formattedMinDate); + result.setTo(formattedMaxDate); + result.setTicIncrement(computeTimeTicIncrement(settings.getWidth(), settings.getDateTimeRange())); + return result; } - result.setLogscale(settings.getYAxisScale() == AxisScale.LOG10); - - result.setTics(YAxisTicks.computeYTicks(settings, dataSeries)); + public static AxisSettings createYAxis(GnuplotSettings settings, Collection dataSeries) { + AxisSettings result = new AxisSettings(); + result.setLabel("Duration"); + result.setType(Type.Duration); + result.setAxis(GnuplotAxis.Y1); + result.setTicsEnabled(true); - return result; - } + final int graphOffset = settings.getYAxisScale() == AxisScale.LINEAR ? 0 : 1; + if (settings.hasYRange()) { + final int min = Math.max(settings.getYRangeMin(), graphOffset); + final int max = settings.getYRangeMax(); + result.setFrom(String.valueOf(min)); + result.setTo(String.valueOf(max)); + } else { + result.setFrom(String.valueOf(graphOffset)); + } - public static double computeTimeTicIncrement(int width, DateTimeRange dateTimeRange) { - final long startEpochMilli = dateTimeRange.getStartEpochMilli(); - final long endEpochMilli = dateTimeRange.getEndEpochMilli(); - final long rangeInMs = endEpochMilli - startEpochMilli + 1; - - int widthInPx = width - GnuplotSettings.GNUPLOT_LEFT_RIGHT_MARGIN; - - final long maxLabels = Math.max(1, widthInPx / (GnuplotSettings.TICKS_FONT_SIZE * 8)); + result.setLogscale(settings.getYAxisScale() == AxisScale.LOG10); - final long tickIncrement = roundToTickIncrement(rangeInMs / maxLabels); - return tickIncrement/1000.0; - } - - private static long roundToTickIncrement(long milliseconds) { - LongList increments = LongList.of( - 100, - 200, - 500, - TimeUnit.SECONDS.toMillis(1), - TimeUnit.SECONDS.toMillis(2), - TimeUnit.SECONDS.toMillis(5), - TimeUnit.SECONDS.toMillis(10), - TimeUnit.SECONDS.toMillis(15), - TimeUnit.SECONDS.toMillis(30), - TimeUnit.MINUTES.toMillis(1), - TimeUnit.MINUTES.toMillis(2), - TimeUnit.MINUTES.toMillis(5), - TimeUnit.MINUTES.toMillis(10), - TimeUnit.MINUTES.toMillis(15), - TimeUnit.MINUTES.toMillis(30), - TimeUnit.HOURS.toMillis(1), - TimeUnit.HOURS.toMillis(2), - TimeUnit.HOURS.toMillis(3), - TimeUnit.HOURS.toMillis(6), - TimeUnit.HOURS.toMillis(12), - TimeUnit.HOURS.toMillis(18), - TimeUnit.DAYS.toMillis(1), - TimeUnit.DAYS.toMillis(2), - TimeUnit.DAYS.toMillis(3), - TimeUnit.DAYS.toMillis(4), - TimeUnit.DAYS.toMillis(5), - TimeUnit.DAYS.toMillis(6), - TimeUnit.DAYS.toMillis(7), - TimeUnit.DAYS.toMillis(14), - TimeUnit.DAYS.toMillis(30), - TimeUnit.DAYS.toMillis(90), - TimeUnit.DAYS.toMillis(180), - TimeUnit.DAYS.toMillis(365), - TimeUnit.DAYS.toMillis(365*2), - TimeUnit.DAYS.toMillis(365*5), - TimeUnit.DAYS.toMillis(365*10), - TimeUnit.DAYS.toMillis(365*20) - ); - - for ( int i = 0; i < increments.size(); i++) { - if (increments.get(i) > milliseconds) { - return increments.get(i); - } + result.setTics(YAxisTicks.computeYTicks(settings, dataSeries)); + + return result; + } + + public static double computeTimeTicIncrement(int width, DateTimeRange dateTimeRange) { + final long startEpochMilli = dateTimeRange.getStartEpochMilli(); + final long endEpochMilli = dateTimeRange.getEndEpochMilli(); + final long rangeInMs = endEpochMilli - startEpochMilli + 1; + + int widthInPx = width - GnuplotSettings.GNUPLOT_LEFT_RIGHT_MARGIN; + + final long maxLabels = Math.max(1, widthInPx / (GnuplotSettings.TICKS_FONT_SIZE * 8)); + + final long tickIncrement = roundToTickIncrement(rangeInMs / maxLabels); + return tickIncrement / 1000.0; + } + + private static long roundToTickIncrement(long milliseconds) { + LongList increments = LongList.of(100, 200, 500, TimeUnit.SECONDS.toMillis(1), TimeUnit.SECONDS.toMillis(2), + TimeUnit.SECONDS.toMillis(5), TimeUnit.SECONDS.toMillis(10), TimeUnit.SECONDS.toMillis(15), + TimeUnit.SECONDS.toMillis(30), TimeUnit.MINUTES.toMillis(1), TimeUnit.MINUTES.toMillis(2), + TimeUnit.MINUTES.toMillis(5), TimeUnit.MINUTES.toMillis(10), TimeUnit.MINUTES.toMillis(15), + TimeUnit.MINUTES.toMillis(30), TimeUnit.HOURS.toMillis(1), TimeUnit.HOURS.toMillis(2), + TimeUnit.HOURS.toMillis(3), TimeUnit.HOURS.toMillis(6), TimeUnit.HOURS.toMillis(12), + TimeUnit.HOURS.toMillis(18), TimeUnit.DAYS.toMillis(1), TimeUnit.DAYS.toMillis(2), + TimeUnit.DAYS.toMillis(3), TimeUnit.DAYS.toMillis(4), TimeUnit.DAYS.toMillis(5), + TimeUnit.DAYS.toMillis(6), TimeUnit.DAYS.toMillis(7), TimeUnit.DAYS.toMillis(14), + TimeUnit.DAYS.toMillis(30), TimeUnit.DAYS.toMillis(90), TimeUnit.DAYS.toMillis(180), + TimeUnit.DAYS.toMillis(365), TimeUnit.DAYS.toMillis(365 * 2), TimeUnit.DAYS.toMillis(365 * 5), + TimeUnit.DAYS.toMillis(365 * 10), TimeUnit.DAYS.toMillis(365 * 20)); + + for (int i = 0; i < increments.size(); i++) { + if (increments.get(i) > milliseconds) { + return increments.get(i); + } + } + + return TimeUnit.DAYS.toMillis(365 * 10); } - - return TimeUnit.DAYS.toMillis(365*10); - } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/Config.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/Config.java index 6e6ff0b..02e2f5d 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/Config.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/Config.java @@ -4,5 +4,5 @@ import java.nio.file.Path; import java.nio.file.Paths; public class Config { - public static final Path DATA_DIR = Paths.get("/home/andi/ws/performanceDb/db"); + public static final Path DATA_DIR = Paths.get("/home/andi/ws/performanceDb/db"); } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/CsvSummary.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/CsvSummary.java index 3e6d1cb..e4195bd 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/CsvSummary.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/CsvSummary.java @@ -3,52 +3,51 @@ package org.lucares.recommind.logs; import org.lucares.pdb.plot.api.AggregatedDataCollection; class CsvSummary { - private final int values; - private final long maxValue; - private final AggregatedDataCollection aggregatedData; - private final double statsAverage; - private final int plottedValues; + private final int values; + private final long maxValue; + private final AggregatedDataCollection aggregatedData; + private final double statsAverage; + private final int plottedValues; - public CsvSummary(final int values, final int plottedValues, final long maxValue, - final double statsAverage, final AggregatedDataCollection aggregatedData) { - super(); - this.values = values; - this.plottedValues = plottedValues; - this.maxValue = maxValue; - this.statsAverage = statsAverage; - this.aggregatedData = aggregatedData; - } + public CsvSummary(final int values, final int plottedValues, final long maxValue, final double statsAverage, + final AggregatedDataCollection aggregatedData) { + super(); + this.values = values; + this.plottedValues = plottedValues; + this.maxValue = maxValue; + this.statsAverage = statsAverage; + this.aggregatedData = aggregatedData; + } + /** + * Total number of values in the selected date range. + * + * @see CsvSummary#getPlottedValues() + * @return total number of values + */ + public int getValues() { + return values; + } - /** - * Total number of values in the selected date range. - * - * @see CsvSummary#getPlottedValues() - * @return total number of values - */ - public int getValues() { - return values; - } + /** + * Number of plotted values in the selected date range and y-range. + * + * @see CsvSummary#getValues() + * @return number of plotted values + */ + public int getPlottedValues() { + return plottedValues; + } - /** - * Number of plotted values in the selected date range and y-range. - * - * @see CsvSummary#getValues() - * @return number of plotted values - */ - public int getPlottedValues() { - return plottedValues; - } + public long getMaxValue() { + return maxValue; + } - public long getMaxValue() { - return maxValue; - } + public double getStatsAverage() { + return statsAverage; + } - public double getStatsAverage() { - return statsAverage; - } - - public AggregatedDataCollection getAggregatedData() { - return aggregatedData; - } + public AggregatedDataCollection getAggregatedData() { + return aggregatedData; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/DashTypes.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/DashTypes.java index 57d58db..49874a0 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/DashTypes.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/DashTypes.java @@ -2,20 +2,20 @@ package org.lucares.recommind.logs; public enum DashTypes { - DASH_TYPE_2("2"), DASH_TYPE_3("3"), DASH_TYPE_4("4"), DASH_TYPE_5("5"), DASH_TYPE_6("6"), DASH_TYPE_DOT( - "\".\""), DASH_TYPE_DASH("\"-\""), DASH_TYPE_DOT_DASH("\"._\""), DASH_TYPE_DOT_DOT_DASH("\"..- \""); + DASH_TYPE_2("2"), DASH_TYPE_3("3"), DASH_TYPE_4("4"), DASH_TYPE_5("5"), DASH_TYPE_6("6"), DASH_TYPE_DOT("\".\""), + DASH_TYPE_DASH("\"-\""), DASH_TYPE_DOT_DASH("\"._\""), DASH_TYPE_DOT_DOT_DASH("\"..- \""); - private final String gnuplotDashType; + private final String gnuplotDashType; - private DashTypes(final String gnuplotDashType) { - this.gnuplotDashType = gnuplotDashType; - } + private DashTypes(final String gnuplotDashType) { + this.gnuplotDashType = gnuplotDashType; + } - public String toGnuplotDashType() { - return gnuplotDashType; - } + public String toGnuplotDashType() { + return gnuplotDashType; + } - static DashTypes get(final int i) { - return values()[i % values().length]; - } + static DashTypes get(final int i) { + return values()[i % values().length]; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/DataSeries.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/DataSeries.java index dcc0944..1a77f07 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/DataSeries.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/DataSeries.java @@ -10,110 +10,110 @@ import org.lucares.pdb.plot.api.AggregatedDataCollection; import org.lucares.pdb.plot.api.Limit; public interface DataSeries { - public static final Comparator BY_NUMBER_OF_VALUES = (a, b) -> { - return a.getValues() - b.getValues(); - }; + public static final Comparator BY_NUMBER_OF_VALUES = (a, b) -> { + return a.getValues() - b.getValues(); + }; - public static final Comparator BY_MAX_VALUE = (a, b) -> { - final long result = a.getMaxValue() - b.getMaxValue(); - return result < 0 ? -1 : (result > 0 ? 1 : 0); - }; + public static final Comparator BY_MAX_VALUE = (a, b) -> { + final long result = a.getMaxValue() - b.getMaxValue(); + return result < 0 ? -1 : (result > 0 ? 1 : 0); + }; - public static final Comparator BY_NAME = (a, b) -> { - return a.getTitle().compareToIgnoreCase(b.getTitle()); - }; + public static final Comparator BY_NAME = (a, b) -> { + return a.getTitle().compareToIgnoreCase(b.getTitle()); + }; - public String getIdAsString(); + public String getIdAsString(); - public int getId(); + public int getId(); - public String getTitle(); + public String getTitle(); - public int getValues(); + public int getValues(); - public int getPlottedValues(); + public int getPlottedValues(); - public long getMaxValue(); + public long getMaxValue(); - public double getAverage(); + public double getAverage(); - public void setStyle(LineStyle style); + public void setStyle(LineStyle style); - public LineStyle getStyle(); + public LineStyle getStyle(); - public AggregatedDataCollection getAggregatedData(); + public AggregatedDataCollection getAggregatedData(); - public static Map toMap(final List dataSeries) { - final Map result = new LinkedHashMap<>(); + public static Map toMap(final List dataSeries) { + final Map result = new LinkedHashMap<>(); - for (final DataSeries dataSerie : dataSeries) { + for (final DataSeries dataSerie : dataSeries) { - result.put(dataSerie.getTitle(), dataSerie.getValues()); + result.put(dataSerie.getTitle(), dataSerie.getValues()); - } + } - return result; - } + return result; + } - static Comparator getDataSeriesComparator(final Limit limitBy) { + static Comparator getDataSeriesComparator(final Limit limitBy) { - switch (limitBy) { - case MOST_VALUES: - return DataSeries.BY_NUMBER_OF_VALUES.reversed(); - case FEWEST_VALUES: - return DataSeries.BY_NUMBER_OF_VALUES; - case MAX_VALUE: - return DataSeries.BY_MAX_VALUE.reversed(); - case MIN_VALUE: - return DataSeries.BY_MAX_VALUE; - case NO_LIMIT: - return DataSeries.BY_NAME; - } - throw new IllegalStateException("unhandled enum: " + limitBy); - } + switch (limitBy) { + case MOST_VALUES: + return DataSeries.BY_NUMBER_OF_VALUES.reversed(); + case FEWEST_VALUES: + return DataSeries.BY_NUMBER_OF_VALUES; + case MAX_VALUE: + return DataSeries.BY_MAX_VALUE.reversed(); + case MIN_VALUE: + return DataSeries.BY_MAX_VALUE; + case NO_LIMIT: + return DataSeries.BY_NAME; + } + throw new IllegalStateException("unhandled enum: " + limitBy); + } - static void sortAndLimit(final List dataSeries, final Limit limitBy, final int limit) { + static void sortAndLimit(final List dataSeries, final Limit limitBy, final int limit) { - dataSeries.sort(DataSeries.getDataSeriesComparator(limitBy)); + dataSeries.sort(DataSeries.getDataSeriesComparator(limitBy)); - switch (limitBy) { - case FEWEST_VALUES: - case MOST_VALUES: - case MAX_VALUE: - case MIN_VALUE: - while (dataSeries.size() > limit) { - dataSeries.remove(limit); - } - break; - case NO_LIMIT: - } - } + switch (limitBy) { + case FEWEST_VALUES: + case MOST_VALUES: + case MAX_VALUE: + case MIN_VALUE: + while (dataSeries.size() > limit) { + dataSeries.remove(limit); + } + break; + case NO_LIMIT: + } + } - static void setColors(final List dataSeries) { + static void setColors(final List dataSeries) { - int i = 0; + int i = 0; - for (final DataSeries dataSerie : dataSeries) { + for (final DataSeries dataSerie : dataSeries) { - final int numColors = GnuplotColorPalettes.DEFAULT.size(); + final int numColors = GnuplotColorPalettes.DEFAULT.size(); - final GnuplotColor color = GnuplotColorPalettes.DEFAULT.get(i % numColors); + final GnuplotColor color = GnuplotColorPalettes.DEFAULT.get(i % numColors); - final DashTypes dashType = DashTypes.get(i / numColors); - final LineStyle lineStyle = new LineStyle(color, dashType); - dataSerie.setStyle(lineStyle); - i++; - } - } + final DashTypes dashType = DashTypes.get(i / numColors); + final LineStyle lineStyle = new LineStyle(color, dashType); + dataSerie.setStyle(lineStyle); + i++; + } + } - public static long maxValue(final Collection dataSeries) { - long result = 0; + public static long maxValue(final Collection dataSeries) { + long result = 0; - for (final DataSeries series : dataSeries) { - result = Math.max(result, series.getMaxValue()); - } + for (final DataSeries series : dataSeries) { + result = Math.max(result, series.getMaxValue()); + } - return result; - } + return result; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/FileBackedDataSeries.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/FileBackedDataSeries.java index daf86f5..2f34d5b 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/FileBackedDataSeries.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/FileBackedDataSeries.java @@ -4,69 +4,67 @@ import org.lucares.pdb.plot.api.AggregatedDataCollection; public class FileBackedDataSeries implements DataSeries { - private final String title; + private final String title; - private final CsvSummary csvSummary; + private final CsvSummary csvSummary; - private final int id; + private final int id; - private LineStyle style; + private LineStyle style; - public FileBackedDataSeries(final int id, final String title, final CsvSummary csvSummary - ) { - this.id = id; - this.title = title; - this.csvSummary = csvSummary; - } + public FileBackedDataSeries(final int id, final String title, final CsvSummary csvSummary) { + this.id = id; + this.title = title; + this.csvSummary = csvSummary; + } - @Override - public String getIdAsString() { - return "id" + id; - } + @Override + public String getIdAsString() { + return "id" + id; + } - @Override - public int getId() { - return id; - } + @Override + public int getId() { + return id; + } - @Override - public void setStyle(final LineStyle style) { - this.style = style; - } + @Override + public void setStyle(final LineStyle style) { + this.style = style; + } - @Override - public LineStyle getStyle() { - return style; - } + @Override + public LineStyle getStyle() { + return style; + } + @Override + public String getTitle() { + return title; + } - @Override - public String getTitle() { - return title; - } + @Override + public int getValues() { + return csvSummary.getValues(); + } - @Override - public int getValues() { - return csvSummary.getValues(); - } + @Override + public int getPlottedValues() { + return csvSummary.getPlottedValues(); + } - @Override - public int getPlottedValues() { - return csvSummary.getPlottedValues(); - } + @Override + public long getMaxValue() { + return csvSummary.getMaxValue(); + } - @Override - public long getMaxValue() { - return csvSummary.getMaxValue(); - } + @Override + public double getAverage() { + return csvSummary.getStatsAverage(); + } - @Override - public double getAverage() { - return csvSummary.getStatsAverage(); - } - - @Override - public AggregatedDataCollection getAggregatedData() { - return csvSummary.getAggregatedData(); - } + @Override + public AggregatedDataCollection getAggregatedData() { + return csvSummary.getAggregatedData(); + } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/Gnuplot.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/Gnuplot.java index 331bf70..4959601 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/Gnuplot.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/Gnuplot.java @@ -18,95 +18,95 @@ import org.slf4j.LoggerFactory; public class Gnuplot { - private static final Logger LOGGER = LoggerFactory.getLogger(Gnuplot.class); - private static final Logger METRICS_LOGGER = LoggerFactory.getLogger("org.lucares.metrics.gnuplot"); + private static final Logger LOGGER = LoggerFactory.getLogger(Gnuplot.class); + private static final Logger METRICS_LOGGER = LoggerFactory.getLogger("org.lucares.metrics.gnuplot"); - private static final String ENV_GNUPLOT_HOME = "GNUPLOT_HOME"; - private static final String PROPERTY_GNUPLOT_HOME = "gnuplot.home"; - private final Path tmpDirectory; + private static final String ENV_GNUPLOT_HOME = "GNUPLOT_HOME"; + private static final String PROPERTY_GNUPLOT_HOME = "gnuplot.home"; + private final Path tmpDirectory; - // This would be bad style if this code was executed in a web-container, because - // it would cause a memory leak. - // But this code is only (and will only) be executed as standalone application. - private static final ExecutorService POOL = Executors.newCachedThreadPool(); + // This would be bad style if this code was executed in a web-container, because + // it would cause a memory leak. + // But this code is only (and will only) be executed as standalone application. + private static final ExecutorService POOL = Executors.newCachedThreadPool(); - public Gnuplot(final Path tmpDirectory) { - this.tmpDirectory = tmpDirectory; - } + public Gnuplot(final Path tmpDirectory) { + this.tmpDirectory = tmpDirectory; + } - public void plot(final GnuplotSettings settings, final Collection dataSeries) - throws IOException, InterruptedException { + public void plot(final GnuplotSettings settings, final Collection dataSeries) + throws IOException, InterruptedException { - final GnuplotFileGenerator generator = new GnuplotFileGenerator(); + final GnuplotFileGenerator generator = new GnuplotFileGenerator(); - final String gnuplotFileContent = generator.generate(settings, dataSeries); - LOGGER.debug(gnuplotFileContent); + final String gnuplotFileContent = generator.generate(settings, dataSeries); + LOGGER.debug(gnuplotFileContent); - final File gnuplotFile = File.createTempFile("gnuplot", ".dem", tmpDirectory.toFile()); - Files.writeString(gnuplotFile.toPath(), gnuplotFileContent, StandardCharsets.UTF_8); + final File gnuplotFile = File.createTempFile("gnuplot", ".dem", tmpDirectory.toFile()); + Files.writeString(gnuplotFile.toPath(), gnuplotFileContent, StandardCharsets.UTF_8); - final long start = System.nanoTime(); + final long start = System.nanoTime(); - try { - final ProcessBuilder processBuilder = new ProcessBuilder(gnuplotBinary(), gnuplotFile.getAbsolutePath())// - .redirectOutput(Redirect.PIPE)// - .redirectError(Redirect.PIPE); + try { + final ProcessBuilder processBuilder = new ProcessBuilder(gnuplotBinary(), gnuplotFile.getAbsolutePath())// + .redirectOutput(Redirect.PIPE)// + .redirectError(Redirect.PIPE); - final Process process = processBuilder.start(); - logOutput("stderr", process.getErrorStream()); - logOutput("stdout", process.getInputStream()); - process.waitFor(); - } catch (final IOException e) { - if (e.getMessage().contains("No such file or directory")) { - throw new IOException("Did not find gnuplot. Add it to the 'PATH' or create an environment variable '" - + ENV_GNUPLOT_HOME + "' or add the java property '" + PROPERTY_GNUPLOT_HOME + "': " - + e.getMessage(), e); - } else { - throw e; - } - } + final Process process = processBuilder.start(); + logOutput("stderr", process.getErrorStream()); + logOutput("stdout", process.getInputStream()); + process.waitFor(); + } catch (final IOException e) { + if (e.getMessage().contains("No such file or directory")) { + throw new IOException("Did not find gnuplot. Add it to the 'PATH' or create an environment variable '" + + ENV_GNUPLOT_HOME + "' or add the java property '" + PROPERTY_GNUPLOT_HOME + "': " + + e.getMessage(), e); + } else { + throw e; + } + } - METRICS_LOGGER.debug("gnuplot: {}ms", (System.nanoTime() - start) / 1_000_000.0); - } + METRICS_LOGGER.debug("gnuplot: {}ms", (System.nanoTime() - start) / 1_000_000.0); + } - private void logOutput(final String humanReadableType, final InputStream stream) throws IOException { + private void logOutput(final String humanReadableType, final InputStream stream) throws IOException { - POOL.submit(() -> { - try { - final BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8)); - String line; - while ((line = reader.readLine()) != null) { - LOGGER.info("gnuplot {}: {}", humanReadableType, line); - } - } catch (final Exception e) { - LOGGER.warn("Exception while reading " + humanReadableType + " of gnuplot command", e); - } - }); - } + POOL.submit(() -> { + try { + final BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8)); + String line; + while ((line = reader.readLine()) != null) { + LOGGER.info("gnuplot {}: {}", humanReadableType, line); + } + } catch (final Exception e) { + LOGGER.warn("Exception while reading " + humanReadableType + " of gnuplot command", e); + } + }); + } - private String gnuplotBinary() { + private String gnuplotBinary() { - if (System.getProperty(PROPERTY_GNUPLOT_HOME) != null) { + if (System.getProperty(PROPERTY_GNUPLOT_HOME) != null) { - if (isWindows()) { - return System.getProperty(PROPERTY_GNUPLOT_HOME) + "\\bin\\gnuplot.exe"; - } else { - return System.getProperty(PROPERTY_GNUPLOT_HOME) + "/bin/gnuplot"; - } - } - if (System.getenv(ENV_GNUPLOT_HOME) != null) { + if (isWindows()) { + return System.getProperty(PROPERTY_GNUPLOT_HOME) + "\\bin\\gnuplot.exe"; + } else { + return System.getProperty(PROPERTY_GNUPLOT_HOME) + "/bin/gnuplot"; + } + } + if (System.getenv(ENV_GNUPLOT_HOME) != null) { - if (isWindows()) { - return System.getenv(ENV_GNUPLOT_HOME) + "\\bin\\gnuplot.exe"; - } else { - return System.getenv(ENV_GNUPLOT_HOME) + "/bin/gnuplot"; - } - } + if (isWindows()) { + return System.getenv(ENV_GNUPLOT_HOME) + "\\bin\\gnuplot.exe"; + } else { + return System.getenv(ENV_GNUPLOT_HOME) + "/bin/gnuplot"; + } + } - return "gnuplot"; - } + return "gnuplot"; + } - private boolean isWindows() { - return System.getProperty("os.name").toLowerCase().startsWith("windows"); - } + private boolean isWindows() { + return System.getProperty("os.name").toLowerCase().startsWith("windows"); + } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotAxis.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotAxis.java index b60d2c5..73aa6c3 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotAxis.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotAxis.java @@ -1,43 +1,43 @@ package org.lucares.recommind.logs; public enum GnuplotAxis { - X1("x", "x1"), + X1("x", "x1"), - X2("x2", "x2"), + X2("x2", "x2"), - Y1("y", "y1"), + Y1("y", "y1"), - Y2("y2", "y2"); + Y2("y2", "y2"); - private String axis; - private String axisNameForPlots; + private String axis; + private String axisNameForPlots; - private GnuplotAxis(String axis, String axisNameForPlots) { - this.axis = axis; - this.axisNameForPlots = axisNameForPlots; - } - - @Override - public String toString() { - return axis; - } - - public String getAxisNameForPlots() { - return axisNameForPlots; - } - - public GnuplotAxis mirrorAxis() { - switch (this) { - case X1: - return X2; - case X2: - return X1; - case Y1: - return Y2; - case Y2: - return Y1; - default: - throw new IllegalArgumentException("Unexpected value: " + this); + private GnuplotAxis(String axis, String axisNameForPlots) { + this.axis = axis; + this.axisNameForPlots = axisNameForPlots; + } + + @Override + public String toString() { + return axis; + } + + public String getAxisNameForPlots() { + return axisNameForPlots; + } + + public GnuplotAxis mirrorAxis() { + switch (this) { + case X1: + return X2; + case X2: + return X1; + case Y1: + return Y2; + case Y2: + return Y1; + default: + throw new IllegalArgumentException("Unexpected value: " + this); + } } - } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotColor.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotColor.java index af17cf4..318aa28 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotColor.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotColor.java @@ -3,54 +3,53 @@ package org.lucares.recommind.logs; import java.awt.Color; public class GnuplotColor { - private final String color; // hex: 00efcc + private final String color; // hex: 00efcc - private GnuplotColor(String color) { - this.color = color; - } + private GnuplotColor(String color) { + this.color = color; + } - public static GnuplotColor byHex(String aHex) { - return new GnuplotColor(aHex); - } + public static GnuplotColor byHex(String aHex) { + return new GnuplotColor(aHex); + } - public static GnuplotColor byAwtColor(Color color) { + public static GnuplotColor byAwtColor(Color color) { - final String hex = String.format("%02x%02x%02x",// - color.getRed(),// - color.getGreen(),// - color.getBlue()// - ); + final String hex = String.format("%02x%02x%02x", // + color.getRed(), // + color.getGreen(), // + color.getBlue()// + ); - return new GnuplotColor(hex); - } + return new GnuplotColor(hex); + } - public String getColor() { - return "rgb \"#" + color + "\""; - } + public String getColor() { + return "rgb \"#" + color + "\""; + } - @Override - public String toString() { - return getColor(); - } + @Override + public String toString() { + return getColor(); + } - Color toAwtColor() { - int red = Integer.parseInt(color.substring(0, 2), 16); - int green = Integer.parseInt(color.substring(2, 4), 16); - int blue = Integer.parseInt(color.substring(4, 6), 16); - return new Color(red, green, blue); - } + Color toAwtColor() { + int red = Integer.parseInt(color.substring(0, 2), 16); + int green = Integer.parseInt(color.substring(2, 4), 16); + int blue = Integer.parseInt(color.substring(4, 6), 16); + return new Color(red, green, blue); + } - public GnuplotColor brighter() { + public GnuplotColor brighter() { - final Color brighterColor = toAwtColor().brighter(); - - - return byAwtColor(brighterColor); - } + final Color brighterColor = toAwtColor().brighter(); - public GnuplotColor darker() { + return byAwtColor(brighterColor); + } - final Color darkerColor = toAwtColor().darker(); - return byAwtColor(darkerColor); - } + public GnuplotColor darker() { + + final Color darkerColor = toAwtColor().darker(); + return byAwtColor(darkerColor); + } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotColorPalettes.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotColorPalettes.java index 7f5e5a7..a5859f1 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotColorPalettes.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotColorPalettes.java @@ -5,52 +5,50 @@ import java.util.List; public interface GnuplotColorPalettes { - - - /** - * - *

#9400D3
- *
#009e73
- *
#56b4e9
- *
#e69f00
- *
#f0e442
- *
#0072b2
- *
#e51e10
- *
#FF69B4
- */ + /** + * + *
#9400D3
+ *
#009e73
+ *
#56b4e9
+ *
#e69f00
+ *
#f0e442
+ *
#0072b2
+ *
#e51e10
+ *
#FF69B4
+ */ - List GNUPLOT = Arrays.asList(// - GnuplotColor.byHex("9400D3"), // purple - GnuplotColor.byHex("009e73"), // green - GnuplotColor.byHex("56b4e9"), // light blue - GnuplotColor.byHex("e69f00"), // orange - GnuplotColor.byHex("f0e442"), // yellow - GnuplotColor.byHex("0072b2"), // blue - GnuplotColor.byHex("e51e10"), // red - GnuplotColor.byHex("FF69B4")// magenta -); - - List GNUPLOT_REORDERED = Arrays.asList(// - GnuplotColor.byHex("0072b2"), // blue - GnuplotColor.byHex("e69f00"), // orange - GnuplotColor.byHex("9400D3"), //purple - GnuplotColor.byHex("009e73"), //green + List GNUPLOT = Arrays.asList(// + GnuplotColor.byHex("9400D3"), // purple + GnuplotColor.byHex("009e73"), // green + GnuplotColor.byHex("56b4e9"), // light blue + GnuplotColor.byHex("e69f00"), // orange + GnuplotColor.byHex("f0e442"), // yellow + GnuplotColor.byHex("0072b2"), // blue + GnuplotColor.byHex("e51e10"), // red + GnuplotColor.byHex("FF69B4")// magenta + ); + + List GNUPLOT_REORDERED = Arrays.asList(// + GnuplotColor.byHex("0072b2"), // blue + GnuplotColor.byHex("e69f00"), // orange + GnuplotColor.byHex("9400D3"), // purple + GnuplotColor.byHex("009e73"), // green GnuplotColor.byHex("f0e442"), // yellow GnuplotColor.byHex("e51e10"), // red GnuplotColor.byHex("56b4e9"), // lightblue GnuplotColor.byHex("FF69B4")// magenta ); - - /** - *
#1f77b4
*
#ff7f0e
@@ -63,23 +61,21 @@ public interface GnuplotColorPalettes { *
#b3df72
*
#feffbe
- * -- + * 8px;">#feffbe -- *
#4660ff
- * - * - */ - List MATPLOTLIB = Arrays.asList(// - GnuplotColor.byHex("1f77b4"), // blue - GnuplotColor.byHex("ff7f0e"), // orange - GnuplotColor.byHex("d62728"), // red - GnuplotColor.byHex("2ca02c"), // green - GnuplotColor.byHex("fdbb6c"), // light orange - GnuplotColor.byHex("b3df72"), // light green - GnuplotColor.byHex("feffbe")// light yellow -); - - - List DEFAULT = GNUPLOT_REORDERED; + * + * + */ + List MATPLOTLIB = Arrays.asList(// + GnuplotColor.byHex("1f77b4"), // blue + GnuplotColor.byHex("ff7f0e"), // orange + GnuplotColor.byHex("d62728"), // red + GnuplotColor.byHex("2ca02c"), // green + GnuplotColor.byHex("fdbb6c"), // light orange + GnuplotColor.byHex("b3df72"), // light green + GnuplotColor.byHex("feffbe")// light yellow + ); + + List DEFAULT = GNUPLOT_REORDERED; } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotFileGenerator.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotFileGenerator.java index 63e0778..e1075c6 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotFileGenerator.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotFileGenerator.java @@ -9,72 +9,72 @@ import org.slf4j.LoggerFactory; public class GnuplotFileGenerator implements Appender { - private static final Logger LOGGER = LoggerFactory.getLogger(GnuplotFileGenerator.class); + private static final Logger LOGGER = LoggerFactory.getLogger(GnuplotFileGenerator.class); - private static final int KEY_FONT_SIZE = 10; + private static final int KEY_FONT_SIZE = 10; - public String generate(final GnuplotSettings settings, final Collection dataSeries) { + public String generate(final GnuplotSettings settings, final Collection dataSeries) { - final StringBuilder result = new StringBuilder(); + final StringBuilder result = new StringBuilder(); - appendfln(result, "set terminal %s noenhanced size %d,%d", settings.getTerminal(), settings.getWidth(), - settings.getHeight()); + appendfln(result, "set terminal %s noenhanced size %d,%d", settings.getTerminal(), settings.getWidth(), + settings.getHeight()); - appendfln(result, "set datafile separator \"%s\"", settings.getDatafileSeparator()); - appendfln(result, "set timefmt '%s'", settings.getTimefmt()); + appendfln(result, "set datafile separator \"%s\"", settings.getDatafileSeparator()); + appendfln(result, "set timefmt '%s'", settings.getTimefmt()); - final List xAxisDefinitions = settings.getAggregates().getXAxisDefinitions(settings, dataSeries); - for (AxisSettings axisSettings : xAxisDefinitions) { - appendln(result, axisSettings.toGnuplotDefinition(settings.isRenderLabels())); + final List xAxisDefinitions = settings.getAggregates().getXAxisDefinitions(settings, dataSeries); + for (AxisSettings axisSettings : xAxisDefinitions) { + appendln(result, axisSettings.toGnuplotDefinition(settings.isRenderLabels())); + } + + final List yAxisDefinitions = settings.getAggregates().getYAxisDefinitions(settings, dataSeries); + if (dataSeries.isEmpty()) { + // If there is no data, then Gnuplot won't generate an image. + // Workaround is to explicitly specify the y-axis range. + // We choose a range for which no ticks are defined. This creates an empty + // y-axis. + yAxisDefinitions.forEach(s -> s.setFrom("0")); + yAxisDefinitions.forEach(s -> s.setFrom("-1")); + } + for (AxisSettings axisSettings : yAxisDefinitions) { + appendln(result, axisSettings.toGnuplotDefinition(settings.isRenderLabels())); + } + + appendfln(result, "set grid"); + appendfln(result, "set output \"%s\"", settings.getOutput().toAbsolutePath().toString().replace("\\", "/")); + + appendfln(result, "set key font \",%d\"", KEY_FONT_SIZE); + appendfln(result, "set tics font \",%d\"", GnuplotSettings.TICKS_FONT_SIZE); + + if (!settings.isRenderLabels()) { + + appendln(result, "set nokey"); + } else { + if (settings.isKeyOutside()) { + appendfln(result, "set key outside"); + } else { + + // make sure left and right margins are always the same + // this is need to be able to zoom in by selecting a region + // (horizontal: 1 unit = 10px; vertical: 1 unit = 19px) + appendln(result, "set lmargin 11"); // margin 11 -> 110px + appendln(result, "set rmargin 11"); // margin 11 -> 110px + appendln(result, "set tmargin 3"); // margin 3 -> 57px - marker (1) + appendln(result, "set bmargin 4"); // margin 4 -> 76 + } + } + + appendf(result, "plot "); + + settings.getAggregates().addPlots(result, dataSeries); + + // Add a plot outside of the visible range. Without this gnuplot would not + // render images when there are not data points on it. + appendf(result, "-1 with lines notitle"); + + LOGGER.info("{}", result); + + return result.toString(); } - - final List yAxisDefinitions = settings.getAggregates().getYAxisDefinitions(settings, dataSeries); - if (dataSeries.isEmpty()) { - // If there is no data, then Gnuplot won't generate an image. - // Workaround is to explicitly specify the y-axis range. - // We choose a range for which no ticks are defined. This creates an empty - // y-axis. - yAxisDefinitions.forEach(s -> s.setFrom("0")); - yAxisDefinitions.forEach(s -> s.setFrom("-1")); - } - for (AxisSettings axisSettings : yAxisDefinitions) { - appendln(result, axisSettings.toGnuplotDefinition(settings.isRenderLabels())); - } - - appendfln(result, "set grid"); - appendfln(result, "set output \"%s\"", settings.getOutput().toAbsolutePath().toString().replace("\\", "/")); - - appendfln(result, "set key font \",%d\"", KEY_FONT_SIZE); - appendfln(result, "set tics font \",%d\"", GnuplotSettings.TICKS_FONT_SIZE); - - if (!settings.isRenderLabels()) { - - appendln(result, "set nokey"); - } else { - if (settings.isKeyOutside()) { - appendfln(result, "set key outside"); - } else { - - // make sure left and right margins are always the same - // this is need to be able to zoom in by selecting a region - // (horizontal: 1 unit = 10px; vertical: 1 unit = 19px) - appendln(result, "set lmargin 11"); // margin 11 -> 110px - appendln(result, "set rmargin 11"); // margin 11 -> 110px - appendln(result, "set tmargin 3"); // margin 3 -> 57px - marker (1) - appendln(result, "set bmargin 4"); // margin 4 -> 76 - } - } - - appendf(result, "plot "); - - settings.getAggregates().addPlots(result, dataSeries); - - // Add a plot outside of the visible range. Without this gnuplot would not - // render images when there are not data points on it. - appendf(result, "-1 with lines notitle"); - - LOGGER.info("{}", result); - - return result.toString(); - } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotLineType.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotLineType.java index 427f86d..2d7fd00 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotLineType.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotLineType.java @@ -1,20 +1,19 @@ package org.lucares.recommind.logs; public enum GnuplotLineType { - LINE("line"), - - Points("points"); - - private String gnuplotLineType; + LINE("line"), - GnuplotLineType(String gnuplotLineType) { - this.gnuplotLineType = gnuplotLineType; - } + Points("points"); + + private String gnuplotLineType; + + GnuplotLineType(String gnuplotLineType) { + this.gnuplotLineType = gnuplotLineType; + } + + @Override + public String toString() { + return gnuplotLineType; + } - @Override - public String toString() { - return gnuplotLineType; - } - } - diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotSettings.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotSettings.java index 9404136..a82c00c 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotSettings.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/GnuplotSettings.java @@ -8,148 +8,148 @@ import org.lucares.pdb.plot.api.AxisScale; public class GnuplotSettings { - public final static int GNUPLOT_LEFT_MARGIN = 110; // The left margin configured for gnuplot - public final static int GNUPLOT_RIGHT_MARGIN = 110; // The right margin configured for gnuplot - public final static int GNUPLOT_TOP_MARGIN = 57; // The top margin configured for gnuplot - public final static int GNUPLOT_BOTTOM_MARGIN = 76; // The bottom margin configured for gnuplot - public final static int GNUPLOT_TOP_BOTTOM_MARGIN = GNUPLOT_TOP_MARGIN + GNUPLOT_BOTTOM_MARGIN; - public final static int GNUPLOT_LEFT_RIGHT_MARGIN = GNUPLOT_LEFT_MARGIN+GNUPLOT_RIGHT_MARGIN; - public static final int TICKS_FONT_SIZE = 12; + public final static int GNUPLOT_LEFT_MARGIN = 110; // The left margin configured for gnuplot + public final static int GNUPLOT_RIGHT_MARGIN = 110; // The right margin configured for gnuplot + public final static int GNUPLOT_TOP_MARGIN = 57; // The top margin configured for gnuplot + public final static int GNUPLOT_BOTTOM_MARGIN = 76; // The bottom margin configured for gnuplot + public final static int GNUPLOT_TOP_BOTTOM_MARGIN = GNUPLOT_TOP_MARGIN + GNUPLOT_BOTTOM_MARGIN; + public final static int GNUPLOT_LEFT_RIGHT_MARGIN = GNUPLOT_LEFT_MARGIN + GNUPLOT_RIGHT_MARGIN; + public static final int TICKS_FONT_SIZE = 12; - private String terminal = "png"; - private int height = 1200; - private int width = 1600; - private String timefmt = "%s"; // time as unix epoch, but as double + private String terminal = "png"; + private int height = 1200; + private int width = 1600; + private String timefmt = "%s"; // time as unix epoch, but as double - // set datafile separator - private String datafileSeparator = ","; + // set datafile separator + private String datafileSeparator = ","; - // set output "datausage.png" - private final Path output; + // set output "datausage.png" + private final Path output; - private AxisScale yAxisScale; - private AggregateHandlerCollection aggregates; - private boolean keyOutside = false; + private AxisScale yAxisScale; + private AggregateHandlerCollection aggregates; + private boolean keyOutside = false; - private AxisSettings xAxisSettings = new AxisSettings(); - private boolean renderLabels = true; - private int yRangeMin = -1; - private int yRangeMax = -1; - private DateTimeRange dateTimeRange; + private AxisSettings xAxisSettings = new AxisSettings(); + private boolean renderLabels = true; + private int yRangeMin = -1; + private int yRangeMax = -1; + private DateTimeRange dateTimeRange; - public GnuplotSettings(final Path output) { - this.output = output; - } + public GnuplotSettings(final Path output) { + this.output = output; + } - public AxisSettings getxAxisSettings() { - return xAxisSettings; - } + public AxisSettings getxAxisSettings() { + return xAxisSettings; + } - public void setxAxisSettings(final AxisSettings xAxisSettings) { - this.xAxisSettings = xAxisSettings; - } + public void setxAxisSettings(final AxisSettings xAxisSettings) { + this.xAxisSettings = xAxisSettings; + } - public String getTerminal() { - return terminal; - } + public String getTerminal() { + return terminal; + } - public void setTerminal(final String terminal) { - this.terminal = terminal; - } + public void setTerminal(final String terminal) { + this.terminal = terminal; + } - public int getHeight() { - return height; - } + public int getHeight() { + return height; + } - public void setHeight(final int height) { - this.height = height; - } + public void setHeight(final int height) { + this.height = height; + } - public int getWidth() { - return width; - } + public int getWidth() { + return width; + } - public void setWidth(final int width) { - this.width = width; - } + public void setWidth(final int width) { + this.width = width; + } - public String getTimefmt() { - return timefmt; - } + public String getTimefmt() { + return timefmt; + } - public void setTimefmt(final String timefmt) { - this.timefmt = timefmt; - } + public void setTimefmt(final String timefmt) { + this.timefmt = timefmt; + } - public String getDatafileSeparator() { - return datafileSeparator; - } + public String getDatafileSeparator() { + return datafileSeparator; + } - public void setDatafileSeparator(final String datafileSeparator) { - this.datafileSeparator = datafileSeparator; - } + public void setDatafileSeparator(final String datafileSeparator) { + this.datafileSeparator = datafileSeparator; + } - public Path getOutput() { - return output; - } + public Path getOutput() { + return output; + } - public void setYAxisScale(final AxisScale yAxisScale) { - this.yAxisScale = yAxisScale; - } + public void setYAxisScale(final AxisScale yAxisScale) { + this.yAxisScale = yAxisScale; + } - public AxisScale getYAxisScale() { - return yAxisScale; - } + public AxisScale getYAxisScale() { + return yAxisScale; + } - public void setAggregates(final AggregateHandlerCollection aggregates) { - this.aggregates = aggregates; - } + public void setAggregates(final AggregateHandlerCollection aggregates) { + this.aggregates = aggregates; + } - public AggregateHandlerCollection getAggregates() { - return aggregates; - } + public AggregateHandlerCollection getAggregates() { + return aggregates; + } - public void setKeyOutside(final boolean keyOutside) { - this.keyOutside = keyOutside; - } + public void setKeyOutside(final boolean keyOutside) { + this.keyOutside = keyOutside; + } - public boolean isKeyOutside() { - return keyOutside; - } + public boolean isKeyOutside() { + return keyOutside; + } - public void renderLabels(final boolean renderLabels) { - this.renderLabels = renderLabels; - } + public void renderLabels(final boolean renderLabels) { + this.renderLabels = renderLabels; + } - public boolean isRenderLabels() { - return renderLabels; - } + public boolean isRenderLabels() { + return renderLabels; + } - public boolean hasYRange() { - return yRangeMin >= 0 && yRangeMax >= 0 && yRangeMin < yRangeMax; - } + public boolean hasYRange() { + return yRangeMin >= 0 && yRangeMax >= 0 && yRangeMin < yRangeMax; + } - public void setYRange(final int yRangeMin, final int yRangeMax) { - this.yRangeMin = yRangeMin; - this.yRangeMax = yRangeMax; - } + public void setYRange(final int yRangeMin, final int yRangeMax) { + this.yRangeMin = yRangeMin; + this.yRangeMax = yRangeMax; + } - public int getYRangeMin() { - return yRangeMin; - } + public int getYRangeMin() { + return yRangeMin; + } - public int getYRangeMax() { - return yRangeMax; - } + public int getYRangeMax() { + return yRangeMax; + } - public void setDateTimeRange(DateTimeRange dateTimeRange) { - this.dateTimeRange = dateTimeRange; - } - - public DateTimeRange getDateTimeRange() { - return dateTimeRange; - } + public void setDateTimeRange(DateTimeRange dateTimeRange) { + this.dateTimeRange = dateTimeRange; + } - // plot 'sample.txt' using 1:2 title 'Bytes' with linespoints 2 + public DateTimeRange getDateTimeRange() { + return dateTimeRange; + } + + // plot 'sample.txt' using 1:2 title 'Bytes' with linespoints 2 } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/InternalPlottingException.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/InternalPlottingException.java index 7a40207..0474277 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/InternalPlottingException.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/InternalPlottingException.java @@ -2,13 +2,13 @@ package org.lucares.recommind.logs; public class InternalPlottingException extends Exception { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = 1L; - public InternalPlottingException() { - super(); - } + public InternalPlottingException() { + super(); + } - public InternalPlottingException(final String message, final Throwable cause) { - super(message, cause); - } + public InternalPlottingException(final String message, final Throwable cause) { + super(message, cause); + } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/LambdaFriendlyWriter.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/LambdaFriendlyWriter.java index 0ef0e9b..5e103bf 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/LambdaFriendlyWriter.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/LambdaFriendlyWriter.java @@ -5,75 +5,75 @@ import java.io.Writer; import org.lucares.pdb.api.RuntimeIOException; -public class LambdaFriendlyWriter extends Writer{ +public class LambdaFriendlyWriter extends Writer { - private final Writer writer; - - public LambdaFriendlyWriter(Writer writer) { - this.writer = writer; - } + private final Writer writer; - @Override - public void write(char[] cbuf, int off, int len) { - try { - writer.write(cbuf, off, len); - } catch (IOException e) { - throw new RuntimeIOException(e); - } - } - - @Override - public void write(int c) { - try { - writer.write(c); - } catch (IOException e) { - throw new RuntimeIOException(e); - } - } - - @Override - public void write(String str) { - try { - writer.write(str); - } catch (IOException e) { - throw new RuntimeIOException(e); - } - } - - @Override - public Writer append(CharSequence csq) { - try { - return writer.append(csq); - } catch (IOException e) { - throw new RuntimeIOException(e); - } - } - - @Override - public Writer append(char c) { - try { - return writer.append(c); - } catch (IOException e) { - throw new RuntimeIOException(e); - } - } + public LambdaFriendlyWriter(Writer writer) { + this.writer = writer; + } - @Override - public void flush() { - try { - writer.flush(); - } catch (IOException e) { - throw new RuntimeIOException(e); - } - } + @Override + public void write(char[] cbuf, int off, int len) { + try { + writer.write(cbuf, off, len); + } catch (IOException e) { + throw new RuntimeIOException(e); + } + } - @Override - public void close() { - try { - writer.close(); - } catch (IOException e) { - throw new RuntimeIOException(e); - } - } + @Override + public void write(int c) { + try { + writer.write(c); + } catch (IOException e) { + throw new RuntimeIOException(e); + } + } + + @Override + public void write(String str) { + try { + writer.write(str); + } catch (IOException e) { + throw new RuntimeIOException(e); + } + } + + @Override + public Writer append(CharSequence csq) { + try { + return writer.append(csq); + } catch (IOException e) { + throw new RuntimeIOException(e); + } + } + + @Override + public Writer append(char c) { + try { + return writer.append(c); + } catch (IOException e) { + throw new RuntimeIOException(e); + } + } + + @Override + public void flush() { + try { + writer.flush(); + } catch (IOException e) { + throw new RuntimeIOException(e); + } + } + + @Override + public void close() { + try { + writer.close(); + } catch (IOException e) { + throw new RuntimeIOException(e); + } + } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/LineStyle.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/LineStyle.java index 796b5cf..bbe766a 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/LineStyle.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/LineStyle.java @@ -2,31 +2,31 @@ package org.lucares.recommind.logs; public class LineStyle { - private final GnuplotColor color; - private final DashTypes dashType; + private final GnuplotColor color; + private final DashTypes dashType; - public LineStyle(final GnuplotColor color, final DashTypes dashType) { - this.color = color; - this.dashType = dashType; - } + public LineStyle(final GnuplotColor color, final DashTypes dashType) { + this.color = color; + this.dashType = dashType; + } - public String asGnuplotLineStyle() { - return String.format("lt %s dt %s ", // - color.getColor(), // - dashType.toGnuplotDashType()// - ); - } + public String asGnuplotLineStyle() { + return String.format("lt %s dt %s ", // + color.getColor(), // + dashType.toGnuplotDashType()// + ); + } - @Override - public String toString() { - return asGnuplotLineStyle(); - } + @Override + public String toString() { + return asGnuplotLineStyle(); + } - public LineStyle brighter() { - return new LineStyle(color.brighter(), dashType); - } + public LineStyle brighter() { + return new LineStyle(color.brighter(), dashType); + } - public LineStyle darker() { - return new LineStyle(color.darker(), dashType); - } + public LineStyle darker() { + return new LineStyle(color.darker(), dashType); + } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/LongUtils.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/LongUtils.java index 9be22cf..18c28a6 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/LongUtils.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/LongUtils.java @@ -2,23 +2,23 @@ package org.lucares.recommind.logs; public class LongUtils { - private static final int INT_TO_STRING_CACHE_SIZE = 10_000; - private static final String[] INT_TO_STRING; - static { + private static final int INT_TO_STRING_CACHE_SIZE = 10_000; + private static final String[] INT_TO_STRING; + static { - INT_TO_STRING = new String[INT_TO_STRING_CACHE_SIZE]; + INT_TO_STRING = new String[INT_TO_STRING_CACHE_SIZE]; - for (int i = 0; i < INT_TO_STRING_CACHE_SIZE; i++) { - INT_TO_STRING[i] = String.valueOf(i); - } - } + for (int i = 0; i < INT_TO_STRING_CACHE_SIZE; i++) { + INT_TO_STRING[i] = String.valueOf(i); + } + } - public static String longToString(final long value) { - // using pre-generated strings reduces memory allocation by up to 25% + public static String longToString(final long value) { + // using pre-generated strings reduces memory allocation by up to 25% - if (value < INT_TO_STRING_CACHE_SIZE) { - return INT_TO_STRING[(int) value]; - } - return String.valueOf(value); - } + if (value < INT_TO_STRING_CACHE_SIZE) { + return INT_TO_STRING[(int) value]; + } + return String.valueOf(value); + } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/NoDataPointsException.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/NoDataPointsException.java index c0e8850..b81667e 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/NoDataPointsException.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/NoDataPointsException.java @@ -2,9 +2,9 @@ package org.lucares.recommind.logs; public class NoDataPointsException extends InternalPlottingException { - private static final long serialVersionUID = 1054594230615520105L; + private static final long serialVersionUID = 1054594230615520105L; - public NoDataPointsException() { - super(); - } + public NoDataPointsException() { + super(); + } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/PlotResult.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/PlotResult.java index caed6dc..7a8f83f 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/PlotResult.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/PlotResult.java @@ -4,34 +4,34 @@ import java.nio.file.Path; import java.util.List; public class PlotResult { - private final Path imagePath; - private final List dataSeries; - private final Path thumbnail; + private final Path imagePath; + private final List dataSeries; + private final Path thumbnail; - public PlotResult(final Path imagePath, final List dataSeries, final Path thumbnail) { - super(); - this.imagePath = imagePath; - this.dataSeries = dataSeries; - this.thumbnail = thumbnail; - } + public PlotResult(final Path imagePath, final List dataSeries, final Path thumbnail) { + super(); + this.imagePath = imagePath; + this.dataSeries = dataSeries; + this.thumbnail = thumbnail; + } - public Path getImageName() { - return imagePath.getFileName(); - } + public Path getImageName() { + return imagePath.getFileName(); + } - public Path getImagePath() { - return imagePath; - } + public Path getImagePath() { + return imagePath; + } - public Path getThumbnailName() { - return thumbnail.getFileName(); - } + public Path getThumbnailName() { + return thumbnail.getFileName(); + } - public Path getThumbnailPath() { - return thumbnail; - } + public Path getThumbnailPath() { + return thumbnail; + } - public List getDataSeries() { - return dataSeries; - } + public List getDataSeries() { + return dataSeries; + } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/Plotter.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/Plotter.java index a8fcfa5..03d8ef6 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/Plotter.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/Plotter.java @@ -32,234 +32,229 @@ import org.slf4j.LoggerFactory; public class Plotter { - private static final Logger LOGGER = LoggerFactory.getLogger(Plotter.class); - private static final Logger METRICS_LOGGER = LoggerFactory.getLogger("org.lucares.metrics.plotter.scatter"); + private static final Logger LOGGER = LoggerFactory.getLogger(Plotter.class); + private static final Logger METRICS_LOGGER = LoggerFactory.getLogger("org.lucares.metrics.plotter.scatter"); - static final String DEFAULT_GROUP = ""; + static final String DEFAULT_GROUP = ""; - private final PerformanceDb db; - private final Path tmpBaseDir; - private final Path outputDir; + private final PerformanceDb db; + private final Path tmpBaseDir; + private final Path outputDir; - public Plotter(final PerformanceDb db, final Path tmpBaseDir, final Path outputDir) { - this.db = db; - this.tmpBaseDir = tmpBaseDir; - this.outputDir = outputDir; + public Plotter(final PerformanceDb db, final Path tmpBaseDir, final Path outputDir) { + this.db = db; + this.tmpBaseDir = tmpBaseDir; + this.outputDir = outputDir; - if (!Files.isDirectory(tmpBaseDir, LinkOption.NOFOLLOW_LINKS)) { - throw new IllegalArgumentException(tmpBaseDir + " is not a directory"); + if (!Files.isDirectory(tmpBaseDir, LinkOption.NOFOLLOW_LINKS)) { + throw new IllegalArgumentException(tmpBaseDir + " is not a directory"); + } + if (!Files.isDirectory(outputDir)) { + throw new IllegalArgumentException(outputDir + " is not a directory"); + } } - if (!Files.isDirectory(outputDir)) { - throw new IllegalArgumentException(outputDir + " is not a directory"); + + public Path getOutputDir() { + return outputDir; } - } - public Path getOutputDir() { - return outputDir; - } + public PlotResult plot(final PlotSettings plotSettings) throws InternalPlottingException { - public PlotResult plot(final PlotSettings plotSettings) throws InternalPlottingException { + LOGGER.trace("start plot: {}", plotSettings); - LOGGER.trace("start plot: {}", plotSettings); - - final String tmpSubDir = uniqueDirectoryName(); - final Path tmpDir = tmpBaseDir.resolve(tmpSubDir); - try { - Files.createDirectories(tmpDir); - final List dataSeries = Collections.synchronizedList(new ArrayList<>()); - - final String query = plotSettings.getQuery(); - final List groupBy = plotSettings.getGroupBy(); - final int height = plotSettings.getHeight(); - final int width = plotSettings.getWidth(); - final DateTimeRange dateRange = plotSettings.dateRange(); - final OffsetDateTime dateFrom = dateRange.getStart(); - final OffsetDateTime dateTo = dateRange.getEnd(); - - final Result result = db.get(new Query(query, dateRange), groupBy); - - final long start = System.nanoTime(); - final AtomicInteger idCounter = new AtomicInteger(0); - result.getGroups().stream().parallel().forEach(groupResult -> { + final String tmpSubDir = uniqueDirectoryName(); + final Path tmpDir = tmpBaseDir.resolve(tmpSubDir); try { - final CsvSummary csvSummary = toCsvDeduplicated(groupResult, tmpDir, dateFrom, dateTo, plotSettings); + Files.createDirectories(tmpDir); + final List dataSeries = Collections.synchronizedList(new ArrayList<>()); - final int id = idCounter.incrementAndGet(); - final String title = title(groupResult.getGroupedBy(), csvSummary); - final DataSeries dataSerie = new FileBackedDataSeries(id, title, csvSummary); - if (dataSerie.getValues() > 0) { - dataSeries.add(dataSerie); - } - } catch (final Exception e) { - throw new IllegalStateException(e); + final String query = plotSettings.getQuery(); + final List groupBy = plotSettings.getGroupBy(); + final int height = plotSettings.getHeight(); + final int width = plotSettings.getWidth(); + final DateTimeRange dateRange = plotSettings.dateRange(); + final OffsetDateTime dateFrom = dateRange.getStart(); + final OffsetDateTime dateTo = dateRange.getEnd(); + + final Result result = db.get(new Query(query, dateRange), groupBy); + + final long start = System.nanoTime(); + final AtomicInteger idCounter = new AtomicInteger(0); + result.getGroups().stream().parallel().forEach(groupResult -> { + try { + final CsvSummary csvSummary = toCsvDeduplicated(groupResult, tmpDir, dateFrom, dateTo, + plotSettings); + + final int id = idCounter.incrementAndGet(); + final String title = title(groupResult.getGroupedBy(), csvSummary); + final DataSeries dataSerie = new FileBackedDataSeries(id, title, csvSummary); + if (dataSerie.getValues() > 0) { + dataSeries.add(dataSerie); + } + } catch (final Exception e) { + throw new IllegalStateException(e); + } + }); + METRICS_LOGGER.debug("csv generation took: " + (System.nanoTime() - start) / 1_000_000.0 + "ms"); + + final Limit limitBy = plotSettings.getLimitBy(); + final int limit = plotSettings.getLimit(); + DataSeries.sortAndLimit(dataSeries, limitBy, limit); + DataSeries.setColors(dataSeries); + + final Path outputFile = Files.createTempFile(outputDir, "", ".png"); + { + final Gnuplot gnuplot = new Gnuplot(tmpBaseDir); + final GnuplotSettings gnuplotSettings = new GnuplotSettings(outputFile); + gnuplotSettings.setHeight(height); + gnuplotSettings.setWidth(width); + gnuplotSettings.setDateTimeRange(plotSettings.dateRange()); + + gnuplotSettings.setYAxisScale(plotSettings.getYAxisScale()); + gnuplotSettings.setAggregates(plotSettings.getAggregates()); + defineYRange(gnuplotSettings, plotSettings.getYRangeMin(), plotSettings.getYRangeMax(), + plotSettings.getYRangeUnit()); + gnuplotSettings.setKeyOutside(plotSettings.isKeyOutside()); + gnuplot.plot(gnuplotSettings, dataSeries); + } + + final Path thumbnail; + if (plotSettings.isGenerateThumbnail()) { + thumbnail = Files.createTempFile(outputDir, "", ".png"); + final Gnuplot gnuplot = new Gnuplot(tmpBaseDir); + final GnuplotSettings gnuplotSettings = new GnuplotSettings(thumbnail); + gnuplotSettings.setHeight(plotSettings.getThumbnailMaxHeight()); + gnuplotSettings.setWidth(plotSettings.getThumbnailMaxWidth()); + gnuplotSettings.setDateTimeRange(plotSettings.dateRange()); + + gnuplotSettings.setYAxisScale(plotSettings.getYAxisScale()); + gnuplotSettings.setAggregates(plotSettings.getAggregates()); + defineYRange(gnuplotSettings, plotSettings.getYRangeMin(), plotSettings.getYRangeMax(), + plotSettings.getYRangeUnit()); + gnuplotSettings.setKeyOutside(false); + gnuplotSettings.renderLabels(false); + gnuplot.plot(gnuplotSettings, dataSeries); + } else { + thumbnail = null; + } + + return new PlotResult(outputFile, dataSeries, thumbnail); + } catch (final InterruptedException e) { + Thread.currentThread().interrupt(); + throw new IllegalStateException("Plotting was interrupted."); + } catch (final IOException e) { + throw new InternalPlottingException("Plotting failed: " + e.getMessage(), e); + } finally { + FileUtils.delete(tmpDir); + LOGGER.trace("done plot"); } - }); - METRICS_LOGGER.debug("csv generation took: " + (System.nanoTime() - start) / 1_000_000.0 + "ms"); - - final Limit limitBy = plotSettings.getLimitBy(); - final int limit = plotSettings.getLimit(); - DataSeries.sortAndLimit(dataSeries, limitBy, limit); - DataSeries.setColors(dataSeries); - - final Path outputFile = Files.createTempFile(outputDir, "", ".png"); - { - final Gnuplot gnuplot = new Gnuplot(tmpBaseDir); - final GnuplotSettings gnuplotSettings = new GnuplotSettings(outputFile); - gnuplotSettings.setHeight(height); - gnuplotSettings.setWidth(width); - gnuplotSettings.setDateTimeRange(plotSettings.dateRange()); - - gnuplotSettings.setYAxisScale(plotSettings.getYAxisScale()); - gnuplotSettings.setAggregates(plotSettings.getAggregates()); - defineYRange(gnuplotSettings, plotSettings.getYRangeMin(), plotSettings.getYRangeMax(), - plotSettings.getYRangeUnit()); - gnuplotSettings.setKeyOutside(plotSettings.isKeyOutside()); - gnuplot.plot(gnuplotSettings, dataSeries); - } - - final Path thumbnail; - if (plotSettings.isGenerateThumbnail()) { - thumbnail = Files.createTempFile(outputDir, "", ".png"); - final Gnuplot gnuplot = new Gnuplot(tmpBaseDir); - final GnuplotSettings gnuplotSettings = new GnuplotSettings(thumbnail); - gnuplotSettings.setHeight(plotSettings.getThumbnailMaxHeight()); - gnuplotSettings.setWidth(plotSettings.getThumbnailMaxWidth()); - gnuplotSettings.setDateTimeRange(plotSettings.dateRange()); - - gnuplotSettings.setYAxisScale(plotSettings.getYAxisScale()); - gnuplotSettings.setAggregates(plotSettings.getAggregates()); - defineYRange(gnuplotSettings, plotSettings.getYRangeMin(), plotSettings.getYRangeMax(), - plotSettings.getYRangeUnit()); - gnuplotSettings.setKeyOutside(false); - gnuplotSettings.renderLabels(false); - gnuplot.plot(gnuplotSettings, dataSeries); - } else { - thumbnail = null; - } - - return new PlotResult(outputFile, dataSeries, thumbnail); - } catch (final InterruptedException e) { - Thread.currentThread().interrupt(); - throw new IllegalStateException("Plotting was interrupted."); - } catch (final IOException e) { - throw new InternalPlottingException("Plotting failed: " + e.getMessage(), e); - } finally { - FileUtils.delete(tmpDir); - LOGGER.trace("done plot"); } - } - private void defineYRange(final GnuplotSettings gnuplotSettings, final int yRangeMin, final int yRangeMax, - final TimeRangeUnitInternal yRangeUnit) { + private void defineYRange(final GnuplotSettings gnuplotSettings, final int yRangeMin, final int yRangeMax, + final TimeRangeUnitInternal yRangeUnit) { - if (yRangeUnit != TimeRangeUnitInternal.AUTOMATIC) { - final int min = yRangeUnit.toMilliSeconds(yRangeMin); - final int max = yRangeUnit.toMilliSeconds(yRangeMax); - gnuplotSettings.setYRange(min, max); + if (yRangeUnit != TimeRangeUnitInternal.AUTOMATIC) { + final int min = yRangeUnit.toMilliSeconds(yRangeMin); + final int max = yRangeUnit.toMilliSeconds(yRangeMax); + gnuplotSettings.setYRange(min, max); + } } - } - + private static CsvSummary toCsvDeduplicated(final GroupResult groupResult, final Path tmpDir, + final OffsetDateTime dateFrom, final OffsetDateTime dateTo, final PlotSettings plotSettings) + throws IOException { - private static CsvSummary toCsvDeduplicated(final GroupResult groupResult, final Path tmpDir, - final OffsetDateTime dateFrom, final OffsetDateTime dateTo, final PlotSettings plotSettings) throws IOException { + final long start = System.nanoTime(); + final Stream timeValueStream = groupResult.asStream(); + final long fromEpochMilli = dateFrom.toInstant().toEpochMilli(); + final long toEpochMilli = dateTo.toInstant().toEpochMilli(); + final boolean useMillis = (toEpochMilli - fromEpochMilli) < TimeUnit.MINUTES.toMillis(5); - final long start = System.nanoTime(); - final Stream timeValueStream = groupResult.asStream(); - final long fromEpochMilli = dateFrom.toInstant().toEpochMilli(); - final long toEpochMilli = dateTo.toInstant().toEpochMilli(); - final boolean useMillis = (toEpochMilli - fromEpochMilli) < TimeUnit.MINUTES.toMillis(5); + final long minValue = plotSettings.getYRangeUnit() == TimeRangeUnitInternal.AUTOMATIC ? 0 + : plotSettings.getYRangeUnit().toMilliSeconds(plotSettings.getYRangeMin()); + final long maxValue = plotSettings.getYRangeUnit() == TimeRangeUnitInternal.AUTOMATIC ? Long.MAX_VALUE + : plotSettings.getYRangeUnit().toMilliSeconds(plotSettings.getYRangeMax()); - final long minValue = plotSettings.getYRangeUnit() == TimeRangeUnitInternal.AUTOMATIC ? 0 - : plotSettings.getYRangeUnit().toMilliSeconds(plotSettings.getYRangeMin()); - final long maxValue = plotSettings.getYRangeUnit() == TimeRangeUnitInternal.AUTOMATIC ? Long.MAX_VALUE - : plotSettings.getYRangeUnit().toMilliSeconds(plotSettings.getYRangeMax()); + final AggregatorCollection aggregator = plotSettings.getAggregates().createCustomAggregator(tmpDir, + plotSettings, fromEpochMilli, toEpochMilli); - final AggregatorCollection aggregator = plotSettings.getAggregates().createCustomAggregator(tmpDir, plotSettings, fromEpochMilli, - toEpochMilli); + int count = 0; // number of values in the x-axis range (used to compute stats) + int plottedValues = 0; + long statsMaxValue = 0; + double statsCurrentAverage = 0.0; + long ignoredValues = 0; - int count = 0; // number of values in the x-axis range (used to compute stats) - int plottedValues = 0; - long statsMaxValue = 0; - double statsCurrentAverage = 0.0; - long ignoredValues = 0; + final Iterator it = timeValueStream.iterator(); + while (it.hasNext()) { + final LongList entry = it.next(); - final Iterator it = timeValueStream.iterator(); - while (it.hasNext()) { - final LongList entry = it.next(); + for (int i = 0; i < entry.size(); i += 2) { - for (int i = 0; i < entry.size(); i += 2) { + final long epochMilli = entry.get(i); + if (fromEpochMilli > epochMilli || epochMilli > toEpochMilli) { + ignoredValues++; + continue; + } - final long epochMilli = entry.get(i); - if (fromEpochMilli > epochMilli || epochMilli > toEpochMilli) { - ignoredValues++; - continue; + final long value = entry.get(i + 1); + + // compute stats + count++; + statsMaxValue = Math.max(statsMaxValue, value); + + // compute average (important to do this after 'count' has been incremented) + statsCurrentAverage = statsCurrentAverage + (value - statsCurrentAverage) / count; + + // check if value is in the selected y-range + boolean valueIsInYRange = value < minValue || value > maxValue; + if (valueIsInYRange) { + ignoredValues++; + } else { + plottedValues++; + } + + aggregator.addValue(valueIsInYRange, epochMilli, value); + } } - final long value = entry.get(i + 1); + METRICS_LOGGER.debug("wrote {} values to csv in: {}ms (ignored {} values) use millis: {}, grouping={}", + plottedValues, (System.nanoTime() - start) / 1_000_000.0, ignoredValues, Boolean.toString(useMillis), + groupResult.getGroupedBy().asString()); + return new CsvSummary(count, plottedValues, statsMaxValue, statsCurrentAverage, aggregator.getAggregatedData()); - // compute stats - count++; - statsMaxValue = Math.max(statsMaxValue, value); + } - // compute average (important to do this after 'count' has been incremented) - statsCurrentAverage = statsCurrentAverage + (value - statsCurrentAverage) / count; + static String uniqueDirectoryName() { + return OffsetDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH_mm_ss")) + "_" + + UUID.randomUUID().toString(); + } - // check if value is in the selected y-range - boolean valueIsInYRange = value < minValue || value > maxValue; - if (valueIsInYRange) { - ignoredValues++; - }else { - plottedValues++; + static String title(final Tags tags, final CsvSummary csvSummary) { + + final StringBuilder result = new StringBuilder(); + + final int values = csvSummary.getValues(); + final int plottedValues = csvSummary.getPlottedValues(); + + if (tags.isEmpty()) { + result.append(DEFAULT_GROUP); + } else { + tags.forEach((k, v) -> { + if (result.length() > 0) { + result.append(" / "); + } + result.append(v); + }); } - - aggregator.addValue(valueIsInYRange, epochMilli, value); - } - } - - - METRICS_LOGGER.debug( - "wrote {} values to csv in: {}ms (ignored {} values) use millis: {}, grouping={}", - plottedValues, - (System.nanoTime() - start) / 1_000_000.0, ignoredValues, Boolean.toString(useMillis), - groupResult.getGroupedBy().asString()); - return new CsvSummary( count, plottedValues, statsMaxValue, statsCurrentAverage, - aggregator.getAggregatedData()); - - } - - static String uniqueDirectoryName() { - return OffsetDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH_mm_ss")) + "_" - + UUID.randomUUID().toString(); - } - - static String title(final Tags tags, final CsvSummary csvSummary) { - - final StringBuilder result = new StringBuilder(); - - final int values = csvSummary.getValues(); - final int plottedValues = csvSummary.getPlottedValues(); - - if (tags.isEmpty()) { - result.append(DEFAULT_GROUP); - } else { - tags.forEach((k, v) -> { - if (result.length() > 0) { - result.append(" / "); + result.append(" ("); + if (plottedValues != values) { + result.append(String.format("%,d / %,d", plottedValues, values)); + } else { + result.append(String.format("%,d", values)); } - result.append(v); - }); - } + result.append(")"); - result.append(" ("); - if (plottedValues != values) { - result.append(String.format("%,d / %,d", plottedValues, values)); - } else { - result.append(String.format("%,d", values)); + return result.toString(); } - result.append(")"); - - return result.toString(); - } } diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/YAxisTicks.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/YAxisTicks.java index 7a3a9e8..d6c6091 100644 --- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/YAxisTicks.java +++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/YAxisTicks.java @@ -13,147 +13,144 @@ import java.util.Locale; import java.util.concurrent.TimeUnit; class YAxisTicks { - - - public static List computeYTicks(final GnuplotSettings settings, final Collection dataSeries) { - List result = new ArrayList(); - final long yRangeMax; - final long yRangeMin; - if (settings.hasYRange()) { - yRangeMax = settings.getYRangeMax(); - yRangeMin = settings.getYRangeMin(); - } else { - yRangeMax = DataSeries.maxValue(dataSeries); - yRangeMin = 0; - } - final int height = settings.getHeight(); + public static List computeYTicks(final GnuplotSettings settings, final Collection dataSeries) { + List result = new ArrayList(); - switch (settings.getYAxisScale()) { - case LINEAR: - result = computeLinearYTicks(height, yRangeMin, yRangeMax); - break; - case LOG10: - result = computeLog10YTicks(height, yRangeMin, yRangeMax); - break; - default: - // use the default - } - return result; -} - -private static List computeLog10YTicks(final int height, final long yRangeMin, final long yRangeMax) { - - final List ticsLabels = Arrays.asList(// - "\"1ms\" 1", // - "\"2ms\" 2", // - "\"5ms\" 5", // - "\"10ms\" 10", // - "\"20ms\" 20", // - "\"50ms\" 50", // - "\"100ms\" 100", // - "\"200ms\" 200", // - "\"500ms\" 500", // - "\"1s\" 1000", // - "\"2s\" 2000", // - "\"5s\" 5000", // - "\"10s\" 10000", // - "\"30s\" 30000", // - "\"1m\" 60000", // - "\"2m\" 120000", // - "\"5m\" 300000", // - "\"10m\" 600000", // - "\"30m\" 1800000", // - "\"1h\" 3600000", // - "\"2h\" 7200000", // - "\"4h\" 14400000", // - "\"8h\" 28800000", // - "\"16h\" 57600000", // - "\"1d\" 86400000", // - "\"2d\" 172800000", // - "\"1 week\" 604800000", // - "\"2 week\" 1209600000.0", // - "\"4 week\" 2419200000.0", // - "\"3 month\" 7776000000.0", // - "\"1 year\" 31536000000.0", // - "\"5 year\" 157680000000.0", // - "\"10 year\" 315360000000.0" - ); - - - return ticsLabels; -} - -private static List computeLinearYTicks(final long height, final long yRangeMin, final long yRangeMax) { - - final long plotHeight = height - GnuplotSettings.GNUPLOT_TOP_BOTTOM_MARGIN; - final long maxLabels = plotHeight / (GnuplotSettings.TICKS_FONT_SIZE * 5); - - final long range = yRangeMax - yRangeMin; - final long msPerLabel = roundToLinearLabelSteps(range / maxLabels); - - final List ticsLabels = new ArrayList<>(); - for (long i = yRangeMin; i <= yRangeMax; i += msPerLabel) { - ticsLabels.add("\"" + msToTic(i, msPerLabel) + "\" " + i); - } - - return ticsLabels; -} - -private static long roundToLinearLabelSteps(final long msPerLabel) { - final List steps = Arrays.asList(2L, 5L, 10L, 20L, 50L, 100L, 200L, 500L, 1000L, 2000L, 5000L, 10_000L, - 20_000L, MINUTES.toMillis(1), MINUTES.toMillis(2), MINUTES.toMillis(5), MINUTES.toMillis(10), - MINUTES.toMillis(15), MINUTES.toMillis(30), HOURS.toMillis(1), HOURS.toMillis(2), HOURS.toMillis(5), - HOURS.toMillis(10), HOURS.toMillis(12), DAYS.toMillis(1), DAYS.toMillis(2), DAYS.toMillis(5), - DAYS.toMillis(7)); - - for (final Long step : steps) { - if (msPerLabel < step) { - return step; - } - } - - return msPerLabel; -} - -private static String msToTic(final long ms, final double msPerLabel) { - - if (ms < 1000) { - return ms + "ms"; - } else if (ms < MINUTES.toMillis(1)) { - if (msPerLabel % 1000 == 0) { - return String.format(Locale.US,"%ds", ms / 1_000); + final long yRangeMax; + final long yRangeMin; + if (settings.hasYRange()) { + yRangeMax = settings.getYRangeMax(); + yRangeMin = settings.getYRangeMin(); } else { - return String.format(Locale.US,"%.1fs", ms / 1_000.0); + yRangeMax = DataSeries.maxValue(dataSeries); + yRangeMin = 0; } - } else if (ms < TimeUnit.HOURS.toMillis(1)) { + final int height = settings.getHeight(); - final long sec = (ms % MINUTES.toMillis(1)) / SECONDS.toMillis(1); - final long min = ms / MINUTES.toMillis(1); - if (msPerLabel % MINUTES.toMillis(1) == 0) { - return min + "m "; + switch (settings.getYAxisScale()) { + case LINEAR: + result = computeLinearYTicks(height, yRangeMin, yRangeMax); + break; + case LOG10: + result = computeLog10YTicks(height, yRangeMin, yRangeMax); + break; + default: + // use the default + } + return result; + } + + private static List computeLog10YTicks(final int height, final long yRangeMin, final long yRangeMax) { + + final List ticsLabels = Arrays.asList(// + "\"1ms\" 1", // + "\"2ms\" 2", // + "\"5ms\" 5", // + "\"10ms\" 10", // + "\"20ms\" 20", // + "\"50ms\" 50", // + "\"100ms\" 100", // + "\"200ms\" 200", // + "\"500ms\" 500", // + "\"1s\" 1000", // + "\"2s\" 2000", // + "\"5s\" 5000", // + "\"10s\" 10000", // + "\"30s\" 30000", // + "\"1m\" 60000", // + "\"2m\" 120000", // + "\"5m\" 300000", // + "\"10m\" 600000", // + "\"30m\" 1800000", // + "\"1h\" 3600000", // + "\"2h\" 7200000", // + "\"4h\" 14400000", // + "\"8h\" 28800000", // + "\"16h\" 57600000", // + "\"1d\" 86400000", // + "\"2d\" 172800000", // + "\"1 week\" 604800000", // + "\"2 week\" 1209600000.0", // + "\"4 week\" 2419200000.0", // + "\"3 month\" 7776000000.0", // + "\"1 year\" 31536000000.0", // + "\"5 year\" 157680000000.0", // + "\"10 year\" 315360000000.0"); + + return ticsLabels; + } + + private static List computeLinearYTicks(final long height, final long yRangeMin, final long yRangeMax) { + + final long plotHeight = height - GnuplotSettings.GNUPLOT_TOP_BOTTOM_MARGIN; + final long maxLabels = plotHeight / (GnuplotSettings.TICKS_FONT_SIZE * 5); + + final long range = yRangeMax - yRangeMin; + final long msPerLabel = roundToLinearLabelSteps(range / maxLabels); + + final List ticsLabels = new ArrayList<>(); + for (long i = yRangeMin; i <= yRangeMax; i += msPerLabel) { + ticsLabels.add("\"" + msToTic(i, msPerLabel) + "\" " + i); + } + + return ticsLabels; + } + + private static long roundToLinearLabelSteps(final long msPerLabel) { + final List steps = Arrays.asList(2L, 5L, 10L, 20L, 50L, 100L, 200L, 500L, 1000L, 2000L, 5000L, 10_000L, + 20_000L, MINUTES.toMillis(1), MINUTES.toMillis(2), MINUTES.toMillis(5), MINUTES.toMillis(10), + MINUTES.toMillis(15), MINUTES.toMillis(30), HOURS.toMillis(1), HOURS.toMillis(2), HOURS.toMillis(5), + HOURS.toMillis(10), HOURS.toMillis(12), DAYS.toMillis(1), DAYS.toMillis(2), DAYS.toMillis(5), + DAYS.toMillis(7)); + + for (final Long step : steps) { + if (msPerLabel < step) { + return step; + } + } + + return msPerLabel; + } + + private static String msToTic(final long ms, final double msPerLabel) { + + if (ms < 1000) { + return ms + "ms"; + } else if (ms < MINUTES.toMillis(1)) { + if (msPerLabel % 1000 == 0) { + return String.format(Locale.US, "%ds", ms / 1_000); + } else { + return String.format(Locale.US, "%.1fs", ms / 1_000.0); + } + } else if (ms < TimeUnit.HOURS.toMillis(1)) { + + final long sec = (ms % MINUTES.toMillis(1)) / SECONDS.toMillis(1); + final long min = ms / MINUTES.toMillis(1); + if (msPerLabel % MINUTES.toMillis(1) == 0) { + return min + "m "; + } else { + return min + "m " + sec + "s"; + } + } else if (ms < DAYS.toMillis(1)) { + // ms is a multiple of 1 hour, see roundToLinearLabelSteps + final long hour = (ms % DAYS.toMillis(1)) / HOURS.toMillis(1); + final long min = (ms % HOURS.toMillis(1)) / MINUTES.toMillis(1); + final long sec = (ms % MINUTES.toMillis(1)) / SECONDS.toMillis(1); + + if (msPerLabel % MINUTES.toMillis(1) == 0) { + return hour + "h " + min + "m "; + } else if (msPerLabel % HOURS.toMillis(1) == 0) { + return hour + "h "; + } else { + return hour + "h " + min + "m " + sec + "s"; + } } else { - return min + "m " + sec + "s"; - } - } else if (ms < DAYS.toMillis(1)) { - // ms is a multiple of 1 hour, see roundToLinearLabelSteps - final long hour = (ms % DAYS.toMillis(1)) / HOURS.toMillis(1); - final long min = (ms % HOURS.toMillis(1)) / MINUTES.toMillis(1); - final long sec = (ms % MINUTES.toMillis(1)) / SECONDS.toMillis(1); + // ms is a multiple of 1 day, see roundToLinearLabelSteps + final long day = ms / DAYS.toMillis(1); + final long hour = (ms % DAYS.toMillis(1)) / HOURS.toMillis(1); - if (msPerLabel % MINUTES.toMillis(1) == 0) { - return hour + "h " + min + "m "; - } else if (msPerLabel % HOURS.toMillis(1) == 0) { - return hour + "h "; - } else { - return hour + "h " + min + "m " + sec + "s"; + return day + "d " + hour + "h "; } - } else { - // ms is a multiple of 1 day, see roundToLinearLabelSteps - final long day = ms / DAYS.toMillis(1); - final long hour = (ms % DAYS.toMillis(1)) / HOURS.toMillis(1); - - return day + "d " + hour + "h "; } } -} diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/BadRequest.java b/pdb-ui/src/main/java/org/lucares/pdbui/BadRequest.java index 1198f02..efcdfe0 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/BadRequest.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/BadRequest.java @@ -6,17 +6,17 @@ import org.springframework.web.bind.annotation.ResponseStatus; @ResponseStatus(value = HttpStatus.BAD_REQUEST) public class BadRequest extends RuntimeException { - private static final long serialVersionUID = 694206253376122420L; + private static final long serialVersionUID = 694206253376122420L; - public BadRequest(final String message, final Throwable cause) { - super(message, cause); - } + public BadRequest(final String message, final Throwable cause) { + super(message, cause); + } - public BadRequest(final String message) { - super(message); - } + public BadRequest(final String message) { + super(message); + } - public BadRequest(final Throwable cause) { - super(cause); - } + public BadRequest(final Throwable cause) { + super(cause); + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/CleanupThread.java b/pdb-ui/src/main/java/org/lucares/pdbui/CleanupThread.java index 1e3463e..98b95b7 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/CleanupThread.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/CleanupThread.java @@ -21,74 +21,74 @@ import org.springframework.stereotype.Component; @Component public class CleanupThread implements DisposableBean, PropertyKeys { - private static final Logger LOGGER = LoggerFactory.getLogger(CleanupThread.class); + private static final Logger LOGGER = LoggerFactory.getLogger(CleanupThread.class); - private static final class RemoveTempFiles implements Runnable { + private static final class RemoveTempFiles implements Runnable { - private final Path outputPath; - private final int cacheDurationInSeconds; + private final Path outputPath; + private final int cacheDurationInSeconds; - public RemoveTempFiles(final Path outputPath, final int cacheDurationInSeconds) { - this.outputPath = outputPath; - this.cacheDurationInSeconds = cacheDurationInSeconds; - } + public RemoveTempFiles(final Path outputPath, final int cacheDurationInSeconds) { + this.outputPath = outputPath; + this.cacheDurationInSeconds = cacheDurationInSeconds; + } - @Override - public void run() { + @Override + public void run() { - try { - Files.walk(outputPath)// - .filter(Files::isRegularFile)// - .filter(this::isStale)// - .forEach(RemoveTempFiles::delete); - } catch (final IOException | RuntimeException e) { - LOGGER.warn("failed to walk " + outputPath + ". Cannot delete stale files", e); - } - } + try { + Files.walk(outputPath)// + .filter(Files::isRegularFile)// + .filter(this::isStale)// + .forEach(RemoveTempFiles::delete); + } catch (final IOException | RuntimeException e) { + LOGGER.warn("failed to walk " + outputPath + ". Cannot delete stale files", e); + } + } - private static void delete(final Path path) { - try { - LOGGER.debug("deleting stale file: " + path); - Files.delete(path); - } catch (final IOException e) { - LOGGER.warn("failed to delete stale file " + path, e); - } - } + private static void delete(final Path path) { + try { + LOGGER.debug("deleting stale file: " + path); + Files.delete(path); + } catch (final IOException e) { + LOGGER.warn("failed to delete stale file " + path, e); + } + } - private boolean isStale(final Path path) { - final Instant maxAge = Instant.now().minusSeconds(cacheDurationInSeconds); - try { - final FileTime lastModifiedTime = Files.getLastModifiedTime(path); - final Instant lastModifiedInstant = lastModifiedTime.toInstant(); - return lastModifiedInstant.compareTo(maxAge) < 0; - } catch (final IOException e) { - LOGGER.warn("failed to get last modified time of " + path + ". Considering this file as stale.", e); - return true; - } - } + private boolean isStale(final Path path) { + final Instant maxAge = Instant.now().minusSeconds(cacheDurationInSeconds); + try { + final FileTime lastModifiedTime = Files.getLastModifiedTime(path); + final Instant lastModifiedInstant = lastModifiedTime.toInstant(); + return lastModifiedInstant.compareTo(maxAge) < 0; + } catch (final IOException e) { + LOGGER.warn("failed to get last modified time of " + path + ". Considering this file as stale.", e); + return true; + } + } - } + } - private final ScheduledExecutorService scheduledThreadPool; - private static final int CACHE_DURATION_IN_SECONDS = 24 * 3600; + private final ScheduledExecutorService scheduledThreadPool; + private static final int CACHE_DURATION_IN_SECONDS = 24 * 3600; - @Autowired - public CleanupThread(@Value("${" + PATH_GENERATED_IMAGES + "}") final String outputDir) { - scheduledThreadPool = Executors.newScheduledThreadPool(1, new CustomizableThreadFactory("cleanup-")); + @Autowired + public CleanupThread(@Value("${" + PATH_GENERATED_IMAGES + "}") final String outputDir) { + scheduledThreadPool = Executors.newScheduledThreadPool(1, new CustomizableThreadFactory("cleanup-")); - final Path outputPath = Paths.get(outputDir); - scheduledThreadPool.scheduleWithFixedDelay(new RemoveTempFiles(outputPath, CACHE_DURATION_IN_SECONDS), 1, 5, - TimeUnit.MINUTES); - } + final Path outputPath = Paths.get(outputDir); + scheduledThreadPool.scheduleWithFixedDelay(new RemoveTempFiles(outputPath, CACHE_DURATION_IN_SECONDS), 1, 5, + TimeUnit.MINUTES); + } - @Override - public void destroy() { - scheduledThreadPool.shutdown(); - try { - scheduledThreadPool.awaitTermination(10, TimeUnit.SECONDS); - } catch (final InterruptedException e) { - Thread.currentThread().interrupt(); - } - } + @Override + public void destroy() { + scheduledThreadPool.shutdown(); + try { + scheduledThreadPool.awaitTermination(10, TimeUnit.SECONDS); + } catch (final InterruptedException e) { + Thread.currentThread().interrupt(); + } + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/CustomExportFormatToEntryTransformer.java b/pdb-ui/src/main/java/org/lucares/pdbui/CustomExportFormatToEntryTransformer.java index 2b7ad76..a517f58 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/CustomExportFormatToEntryTransformer.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/CustomExportFormatToEntryTransformer.java @@ -38,90 +38,90 @@ import org.slf4j.LoggerFactory; public class CustomExportFormatToEntryTransformer { - private static final int ENTRY_BUFFER_SIZE = 100; + private static final int ENTRY_BUFFER_SIZE = 100; - private static final Logger LOGGER = LoggerFactory.getLogger(CustomExportFormatToEntryTransformer.class); + private static final Logger LOGGER = LoggerFactory.getLogger(CustomExportFormatToEntryTransformer.class); - private final Pattern splitByComma = Pattern.compile(","); + private final Pattern splitByComma = Pattern.compile(","); - private final Map tagsDictionary = new HashMap<>(); + private final Map tagsDictionary = new HashMap<>(); - private long lastEpochMilli; + private long lastEpochMilli; - public void read(final BufferedReader in, final ArrayBlockingQueue queue) throws IOException { + public void read(final BufferedReader in, final ArrayBlockingQueue queue) throws IOException { - Entries bufferedEntries = new Entries(ENTRY_BUFFER_SIZE); + Entries bufferedEntries = new Entries(ENTRY_BUFFER_SIZE); - try { - String line; - while ((line = in.readLine()) != null) { - try { - if (line.startsWith(PdbExport.MARKER_DICT_ENTRY)) { - readDictionaryEntry(line); - } else { - final Entry entry = readEntry(line); - if (entry != null) { + try { + String line; + while ((line = in.readLine()) != null) { + try { + if (line.startsWith(PdbExport.MARKER_DICT_ENTRY)) { + readDictionaryEntry(line); + } else { + final Entry entry = readEntry(line); + if (entry != null) { - bufferedEntries.add(entry); + bufferedEntries.add(entry); - if (bufferedEntries.size() == ENTRY_BUFFER_SIZE) { - queue.put(bufferedEntries); - bufferedEntries = new Entries(ENTRY_BUFFER_SIZE); - } - } - } - } catch (final Exception e) { - LOGGER.error("ignoring line '{}'", line, e); - } - queue.put(bufferedEntries); - bufferedEntries = new Entries(ENTRY_BUFFER_SIZE); - } - } catch (final InterruptedException e) { - Thread.currentThread().interrupt(); - LOGGER.info("aborting because of interruption"); - } - } + if (bufferedEntries.size() == ENTRY_BUFFER_SIZE) { + queue.put(bufferedEntries); + bufferedEntries = new Entries(ENTRY_BUFFER_SIZE); + } + } + } + } catch (final Exception e) { + LOGGER.error("ignoring line '{}'", line, e); + } + queue.put(bufferedEntries); + bufferedEntries = new Entries(ENTRY_BUFFER_SIZE); + } + } catch (final InterruptedException e) { + Thread.currentThread().interrupt(); + LOGGER.info("aborting because of interruption"); + } + } - private Entry readEntry(final String line) { + private Entry readEntry(final String line) { - final String[] timeValueTags = splitByComma.split(line); + final String[] timeValueTags = splitByComma.split(line); - final long timeDelta = Long.parseLong(timeValueTags[0]); - final long value = Long.parseLong(timeValueTags[1]); - final long tagsId = Long.parseLong(timeValueTags[2]); + final long timeDelta = Long.parseLong(timeValueTags[0]); + final long value = Long.parseLong(timeValueTags[1]); + final long tagsId = Long.parseLong(timeValueTags[2]); - lastEpochMilli = lastEpochMilli + timeDelta; + lastEpochMilli = lastEpochMilli + timeDelta; - final Tags tags = tagsDictionary.get(tagsId); - if (tags == null) { - LOGGER.info("no tags available for tagsId {}. Ignoring line '{}'", tagsId, line); - return null; - } + final Tags tags = tagsDictionary.get(tagsId); + if (tags == null) { + LOGGER.info("no tags available for tagsId {}. Ignoring line '{}'", tagsId, line); + return null; + } - return new Entry(lastEpochMilli, value, tags); - } + return new Entry(lastEpochMilli, value, tags); + } - private void readDictionaryEntry(final String line) { - final String[] tagsIdToSerializedTags = line.split(Pattern.quote(PdbExport.SEPARATOR_TAG_ID)); + private void readDictionaryEntry(final String line) { + final String[] tagsIdToSerializedTags = line.split(Pattern.quote(PdbExport.SEPARATOR_TAG_ID)); - final Long tagId = Long.parseLong(tagsIdToSerializedTags[0], 1, tagsIdToSerializedTags[0].length(), 10); - final Tags tags = tagsFromCsv(tagsIdToSerializedTags[1]); - tagsDictionary.put(tagId, tags); - } + final Long tagId = Long.parseLong(tagsIdToSerializedTags[0], 1, tagsIdToSerializedTags[0].length(), 10); + final Tags tags = tagsFromCsv(tagsIdToSerializedTags[1]); + tagsDictionary.put(tagId, tags); + } - public static Tags tagsFromCsv(final String line) { + public static Tags tagsFromCsv(final String line) { - final TagsBuilder tagsBuilder = new TagsBuilder(); - final String[] tagsAsString = line.split(Pattern.quote(",")); + final TagsBuilder tagsBuilder = new TagsBuilder(); + final String[] tagsAsString = line.split(Pattern.quote(",")); - for (final String tagAsString : tagsAsString) { - final String[] keyValue = tagAsString.split(Pattern.quote("=")); + for (final String tagAsString : tagsAsString) { + final String[] keyValue = tagAsString.split(Pattern.quote("=")); - final int key = Tags.STRING_COMPRESSOR.put(keyValue[0]); - final int value = Tags.STRING_COMPRESSOR.put(keyValue[1]); - tagsBuilder.add(key, value); - } + final int key = Tags.STRING_COMPRESSOR.put(keyValue[0]); + final int value = Tags.STRING_COMPRESSOR.put(keyValue[1]); + tagsBuilder.add(key, value); + } - return tagsBuilder.build(); - } + return tagsBuilder.build(); + } } \ No newline at end of file diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/HardcodedValues.java b/pdb-ui/src/main/java/org/lucares/pdbui/HardcodedValues.java index e093b77..b7fe0d2 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/HardcodedValues.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/HardcodedValues.java @@ -2,8 +2,8 @@ package org.lucares.pdbui; public interface HardcodedValues { - /** - * The path for generated images relative to the context root. - */ - String WEB_IMAGE_OUTPUT_PATH = "img-generated"; + /** + * The path for generated images relative to the context root. + */ + String WEB_IMAGE_OUTPUT_PATH = "img-generated"; } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/Ingestion.java b/pdb-ui/src/main/java/org/lucares/pdbui/Ingestion.java index 08bc4bc..31923e5 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/Ingestion.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/Ingestion.java @@ -7,15 +7,15 @@ import org.springframework.stereotype.Component; @Component public class Ingestion { - private final Ingestor tcpIngestor; + private final Ingestor tcpIngestor; - public Ingestion(final Ingestor tcpIngestor) { - this.tcpIngestor = tcpIngestor; - } + public Ingestion(final Ingestor tcpIngestor) { + this.tcpIngestor = tcpIngestor; + } - @PostConstruct - public void start() throws Exception { - tcpIngestor.start(); - } + @PostConstruct + public void start() throws Exception { + tcpIngestor.start(); + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/Ingestor.java b/pdb-ui/src/main/java/org/lucares/pdbui/Ingestor.java index f6577c3..378e6c3 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/Ingestor.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/Ingestor.java @@ -2,6 +2,6 @@ package org.lucares.pdbui; public interface Ingestor { - void start() throws Exception; + void start() throws Exception; } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/InternalServerError.java b/pdb-ui/src/main/java/org/lucares/pdbui/InternalServerError.java index 1c4cb40..80fe19b 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/InternalServerError.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/InternalServerError.java @@ -6,13 +6,13 @@ import org.springframework.web.bind.annotation.ResponseStatus; @ResponseStatus(value = HttpStatus.INTERNAL_SERVER_ERROR, reason = "Internal Server Error") public class InternalServerError extends RuntimeException { - private static final long serialVersionUID = 548651821080252932L; + private static final long serialVersionUID = 548651821080252932L; - public InternalServerError(final String message, final Throwable cause) { - super(message, cause); - } + public InternalServerError(final String message, final Throwable cause) { + super(message, cause); + } - public InternalServerError(final Throwable cause) { - super(cause); - } + public InternalServerError(final Throwable cause) { + super(cause); + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/JsonToEntryTransformer.java b/pdb-ui/src/main/java/org/lucares/pdbui/JsonToEntryTransformer.java index 4e86369..010798f 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/JsonToEntryTransformer.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/JsonToEntryTransformer.java @@ -16,82 +16,82 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; public class JsonToEntryTransformer implements LineToEntryTransformer { - private static final Logger LOGGER = LoggerFactory.getLogger(JsonToEntryTransformer.class); + private static final Logger LOGGER = LoggerFactory.getLogger(JsonToEntryTransformer.class); - private final TypeReference> typeReferenceForMap = new TypeReference>() { - }; + private final TypeReference> typeReferenceForMap = new TypeReference>() { + }; - private final ObjectMapper objectMapper = new ObjectMapper(); - private final ObjectReader objectReader = objectMapper.readerFor(typeReferenceForMap); - private final FastISODateParser fastISODateParser = new FastISODateParser(); + private final ObjectMapper objectMapper = new ObjectMapper(); + private final ObjectReader objectReader = objectMapper.readerFor(typeReferenceForMap); + private final FastISODateParser fastISODateParser = new FastISODateParser(); - @Override - public Optional toEntry(final String line) throws IOException { + @Override + public Optional toEntry(final String line) throws IOException { - final Map object = objectReader.readValue(line); + final Map object = objectReader.readValue(line); - final Optional entry = createEntry(object); + final Optional entry = createEntry(object); - return entry; - } + return entry; + } - public Optional createEntry(final Map map) { - try { + public Optional createEntry(final Map map) { + try { - if (map.containsKey("duration") && map.containsKey("@timestamp")) { - final long epochMilli = getDate(map); - final long duration = (int) map.get("duration"); + if (map.containsKey("duration") && map.containsKey("@timestamp")) { + final long epochMilli = getDate(map); + final long duration = (int) map.get("duration"); - final Tags tags = createTags(map); + final Tags tags = createTags(map); - final Entry entry = new Entry(epochMilli, duration, tags); - return Optional.of(entry); - } else { - LOGGER.info("Skipping invalid entry: " + map); - return Optional.empty(); - } - } catch (final Exception e) { - LOGGER.error("Failed to create entry from map: " + map, e); - return Optional.empty(); - } - } + final Entry entry = new Entry(epochMilli, duration, tags); + return Optional.of(entry); + } else { + LOGGER.info("Skipping invalid entry: " + map); + return Optional.empty(); + } + } catch (final Exception e) { + LOGGER.error("Failed to create entry from map: " + map, e); + return Optional.empty(); + } + } - private Tags createTags(final Map map) { - final TagsBuilder tags = TagsBuilder.create(); - for (final java.util.Map.Entry e : map.entrySet()) { + private Tags createTags(final Map map) { + final TagsBuilder tags = TagsBuilder.create(); + for (final java.util.Map.Entry e : map.entrySet()) { - final String key = e.getKey(); - final Object value = e.getValue(); + final String key = e.getKey(); + final Object value = e.getValue(); - switch (key) { - case "@timestamp": - case "duration": - // these fields are not tags - break; - case "tags": - // ignore: we only support key/value tags - break; - default: - final int keyAsInt = Tags.STRING_COMPRESSOR.put(key); - final int valueAsInt; - if (value instanceof String) { - valueAsInt = Tags.STRING_COMPRESSOR.put((String) value); - } else if (value != null) { - valueAsInt = Tags.STRING_COMPRESSOR.put(String.valueOf(value)); - } else { - continue; - } - tags.add(keyAsInt, valueAsInt); - break; - } - } - return tags.build(); - } + switch (key) { + case "@timestamp": + case "duration": + // these fields are not tags + break; + case "tags": + // ignore: we only support key/value tags + break; + default: + final int keyAsInt = Tags.STRING_COMPRESSOR.put(key); + final int valueAsInt; + if (value instanceof String) { + valueAsInt = Tags.STRING_COMPRESSOR.put((String) value); + } else if (value != null) { + valueAsInt = Tags.STRING_COMPRESSOR.put(String.valueOf(value)); + } else { + continue; + } + tags.add(keyAsInt, valueAsInt); + break; + } + } + return tags.build(); + } - private long getDate(final Map map) { - final String timestamp = (String) map.get("@timestamp"); + private long getDate(final Map map) { + final String timestamp = (String) map.get("@timestamp"); - return fastISODateParser.parseAsEpochMilli(timestamp); - } + return fastISODateParser.parseAsEpochMilli(timestamp); + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/LineToEntryTransformer.java b/pdb-ui/src/main/java/org/lucares/pdbui/LineToEntryTransformer.java index f60ef84..ea7789a 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/LineToEntryTransformer.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/LineToEntryTransformer.java @@ -6,5 +6,5 @@ import java.util.Optional; import org.lucares.pdb.api.Entry; public interface LineToEntryTransformer { - public Optional toEntry(String line) throws IOException; + public Optional toEntry(String line) throws IOException; } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/MySpringConfiguration.java b/pdb-ui/src/main/java/org/lucares/pdbui/MySpringConfiguration.java index 85cfc78..263d4d2 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/MySpringConfiguration.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/MySpringConfiguration.java @@ -18,14 +18,14 @@ import org.springframework.scheduling.annotation.EnableAsync; @ComponentScan("org.lucares.pdbui") public class MySpringConfiguration { - private static final Logger LOGGER = LoggerFactory.getLogger(MySpringConfiguration.class); + private static final Logger LOGGER = LoggerFactory.getLogger(MySpringConfiguration.class); - @Bean - PerformanceDb performanceDb(@Value("${db.base}") final String dbBaseDir) throws IOException { - final Path dataDirectory = Paths.get(dbBaseDir); + @Bean + PerformanceDb performanceDb(@Value("${db.base}") final String dbBaseDir) throws IOException { + final Path dataDirectory = Paths.get(dbBaseDir); - LOGGER.info("using database in {}", dataDirectory.toAbsolutePath()); + LOGGER.info("using database in {}", dataDirectory.toAbsolutePath()); - return new PerformanceDb(dataDirectory); - } + return new PerformanceDb(dataDirectory); + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/NotFoundException.java b/pdb-ui/src/main/java/org/lucares/pdbui/NotFoundException.java index 4c19ebd..50f4700 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/NotFoundException.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/NotFoundException.java @@ -6,13 +6,13 @@ import org.springframework.web.bind.annotation.ResponseStatus; @ResponseStatus(value = HttpStatus.NOT_FOUND) public class NotFoundException extends RuntimeException { - private static final long serialVersionUID = 694206253376122420L; + private static final long serialVersionUID = 694206253376122420L; - public NotFoundException(final String message, final Throwable cause) { - super(message, cause); - } + public NotFoundException(final String message, final Throwable cause) { + super(message, cause); + } - public NotFoundException(final Throwable cause) { - super(cause); - } + public NotFoundException(final Throwable cause) { + super(cause); + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/PdbController.java b/pdb-ui/src/main/java/org/lucares/pdbui/PdbController.java index 4808bdb..eec9d16 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/PdbController.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/PdbController.java @@ -65,268 +65,268 @@ import com.fasterxml.jackson.databind.ObjectMapper; @Controller @EnableAutoConfiguration -@CrossOrigin(origins = {"http://localhost:4200", "http://127.0.0.1:4200"}) +@CrossOrigin(origins = { "http://localhost:4200", "http://127.0.0.1:4200" }) public class PdbController implements HardcodedValues, PropertyKeys { - private static final Logger LOGGER = LoggerFactory.getLogger(PdbController.class); + private static final Logger LOGGER = LoggerFactory.getLogger(PdbController.class); - private final Plotter plotter; - private final PerformanceDb db; + private final Plotter plotter; + private final PerformanceDb db; - private final ReentrantLock plotterLock = new ReentrantLock(); + private final ReentrantLock plotterLock = new ReentrantLock(); - @Value("${" + PRODUCTION_MODE + ":true}") - private boolean modeProduction; - - @Value("${"+DEFAULTS_QUERY_EXAMPLES+":}") - private String queryExamples; - - @Value("${"+DEFAULTS_GROUP_BY+":}") - private String defaultsGroupBy; - - @Value("${"+DEFAULTS_SPLIT_BY+":}") - private String defaultsSplitBy; + @Value("${" + PRODUCTION_MODE + ":true}") + private boolean modeProduction; - public PdbController(final PerformanceDb db, final Plotter plotter) { - this.db = db; - this.plotter = plotter; - } + @Value("${" + DEFAULTS_QUERY_EXAMPLES + ":}") + private String queryExamples; - @GetMapping("/") - public ModelAndView index() { - final String view = "main"; - final Map model = new HashMap<>(); - // model.put("oldestValue", - // LocalDateTime.now().minusDays(7).format(DATE_FORMAT_BEGIN)); - // model.put("latestValue", LocalDateTime.now().format(DATE_FORMAT_END)); - model.put("isProduction", modeProduction); - return new ModelAndView(view, model); - } + @Value("${" + DEFAULTS_GROUP_BY + ":}") + private String defaultsGroupBy; - @RequestMapping(path = "/plots", // - method = RequestMethod.GET, // - consumes = MediaType.APPLICATION_JSON_VALUE, // - produces = MediaType.APPLICATION_JSON_VALUE // - ) - @ResponseBody - ResponseEntity createPlotGet(@RequestParam(name = "request") final String request) - throws InternalPlottingException, InterruptedException, JsonParseException, JsonMappingException, - IOException { + @Value("${" + DEFAULTS_SPLIT_BY + ":}") + private String defaultsSplitBy; - final ObjectMapper objectMapper = new ObjectMapper(); - final PlotRequest plotRequest = objectMapper.readValue(request, PlotRequest.class); - - return createPlot(plotRequest); - } - - @RequestMapping(path = "/plots", // - method = RequestMethod.POST, // - consumes = MediaType.APPLICATION_JSON_VALUE, // - produces = MediaType.APPLICATION_JSON_VALUE // - ) - @ResponseBody - ResponseEntity createPlot(@RequestBody final PlotRequest request) - throws InternalPlottingException, InterruptedException { - - final PlotSettings plotSettings = PlotSettingsTransformer.toSettings(request); - if (StringUtils.isBlank(plotSettings.getQuery())) { - throw new BadRequest("The query must not be empty!"); - } - - // TODO the UI should cancel requests that are in flight before sending a plot - // request - if (plotterLock.tryLock(5, TimeUnit.SECONDS)) { - try { - final PlotResult result = plotter.plot(plotSettings); - - final String imageUrl = WEB_IMAGE_OUTPUT_PATH + "/" + result.getImageName(); - LOGGER.trace("image url: {}", imageUrl); - - final String thumbnailUrl = result.getThumbnailPath() != null - ? WEB_IMAGE_OUTPUT_PATH + "/" + result.getThumbnailName() - : "img/no-thumbnail.png"; - - final PlotResponseStats stats = PlotResponseStats.fromDataSeries(result.getDataSeries()); - final PlotResponse plotResponse = new PlotResponse(stats, imageUrl, thumbnailUrl); - - return ResponseEntity.ok().body(plotResponse); - } catch (final NoDataPointsException e) { - throw new NotFoundException("No data was found. Try another query, or change the date range.",e); - } finally { - plotterLock.unlock(); - } - - } else { - throw new ServiceUnavailableException("Too many parallel requests!"); - } - } - - @RequestMapping(path = "/plots", // - method = RequestMethod.GET, // - produces = MediaType.APPLICATION_OCTET_STREAM_VALUE // - ) - StreamingResponseBody createPlotImage(@RequestParam(name = "query", defaultValue = "") final String query, - @RequestParam(name = "groupBy[]", defaultValue = "") final List aGroupBy, - @RequestParam(name = "limitBy.number", defaultValue = "10") final int limit, - @RequestParam(name = "limitBy.selected", defaultValue = "NO_LIMIT") final Limit limitBy, - @RequestParam(name = "dateRange") final String dateRange, - @RequestParam(name = "axisScale", defaultValue = "LINEAR") final AxisScale axisScale, - @RequestParam(name = "aggregates") final EnumSetaggregate, - @RequestParam(name = "keyOutside", defaultValue = "false") final boolean keyOutside, - @RequestParam(name = "width", defaultValue = "1920") final int hidth, - @RequestParam(name = "height", defaultValue = "1080") final int height) { - return (final OutputStream outputStream) -> { - - if (StringUtils.isBlank(query)) { - throw new BadRequest("The query must not be empty!"); - } - - if (StringUtils.isBlank(dateRange)) { - throw new BadRequest("The parameter 'dateRange' must be set."); - } - - final PlotSettings plotSettings = new PlotSettings(); - plotSettings.setQuery(query); - plotSettings.setGroupBy(aGroupBy); - plotSettings.setHeight(height); - plotSettings.setWidth(hidth); - plotSettings.setLimit(limit); - plotSettings.setLimitBy(limitBy); - plotSettings.setDateRange(dateRange); - plotSettings.setYAxisScale(axisScale); - plotSettings.setAggregates(PlotSettingsTransformer.toAggregateInternal(plotSettings.getYRangeUnit(), plotSettings.getYAxisScale(), aggregate)); - plotSettings.setKeyOutside(keyOutside); - plotSettings.setGenerateThumbnail(false); - - if (plotterLock.tryLock()) { - try { - final PlotResult result = plotter.plot(plotSettings); - - try (FileInputStream in = new FileInputStream(result.getImagePath().toFile())) { - StreamUtils.copy(in, outputStream); - } - } catch (final NoDataPointsException e) { - throw new NotFoundException(e); - } catch (final InternalPlottingException e) { - throw new InternalServerError(e); - } finally { - plotterLock.unlock(); - } - - } else { - throw new ServiceUnavailableException("Too many parallel requests!"); - } - }; - } - - @RequestMapping(path = "/autocomplete", // - method = RequestMethod.GET, // - produces = MediaType.APPLICATION_JSON_VALUE // - ) - @ResponseBody - AutocompleteResponse autocomplete(@RequestParam(name = "query") final String query, - @RequestParam(name = "caretIndex") final int caretIndex, - @RequestParam(name="resultMode", defaultValue = "CUT_AT_DOT") ResultMode resultMode) { - - // TODO get date range from UI - final DateTimeRange dateRange = DateTimeRange.max(); - final int zeroBasedCaretIndex = caretIndex - 1; - final QueryWithCaretMarker q = new QueryWithCaretMarker(query, dateRange, zeroBasedCaretIndex, - resultMode); - - final AutocompleteResponse result = new AutocompleteResponse(); - - final List proposals = db.autocomplete(q); - if (query.trim().length() == 0) { - proposals.addAll(exampleProposals()); - } - - final List autocompleteProposals = toAutocompleteProposals(proposals); - Collections.sort(autocompleteProposals, new AutocompleteProposalByValue()); - - result.setProposals(autocompleteProposals); - return result; - } - - private List exampleProposals() { - List result = new ArrayList(); - - if (queryExamples.length() > 0) { - final String[] exampleQueries = queryExamples.split(Pattern.quote(";")); - for (String example : exampleQueries) { - Proposal p = new Proposal(" Example: "+example, example, true, example+" ", example.length()+1); - result.add(p); - } + public PdbController(final PerformanceDb db, final Plotter plotter) { + this.db = db; + this.plotter = plotter; } - - return result; - } - @RequestMapping(path = "/fields", // - method = RequestMethod.GET, // - //consumes = MediaType.APPLICATION_JSON_UTF8_VALUE, // - produces = MediaType.APPLICATION_JSON_VALUE // - ) - @ResponseBody - List fields() { - final DateTimeRange dateTimeRange = DateTimeRange.max(); - final List fields = db.getFields(dateTimeRange); + @GetMapping("/") + public ModelAndView index() { + final String view = "main"; + final Map model = new HashMap<>(); + // model.put("oldestValue", + // LocalDateTime.now().minusDays(7).format(DATE_FORMAT_BEGIN)); + // model.put("latestValue", LocalDateTime.now().format(DATE_FORMAT_END)); + model.put("isProduction", modeProduction); + return new ModelAndView(view, model); + } - fields.sort(Collator.getInstance(Locale.ENGLISH)); + @RequestMapping(path = "/plots", // + method = RequestMethod.GET, // + consumes = MediaType.APPLICATION_JSON_VALUE, // + produces = MediaType.APPLICATION_JSON_VALUE // + ) + @ResponseBody + ResponseEntity createPlotGet(@RequestParam(name = "request") final String request) + throws InternalPlottingException, InterruptedException, JsonParseException, JsonMappingException, + IOException { - return fields; - } + final ObjectMapper objectMapper = new ObjectMapper(); + final PlotRequest plotRequest = objectMapper.readValue(request, PlotRequest.class); - @RequestMapping(path = "/fields/{fieldName}/values", // - method = RequestMethod.GET, // - consumes = MediaType.APPLICATION_JSON_VALUE, // - produces = MediaType.APPLICATION_JSON_VALUE // - ) - @ResponseBody - SortedSet fields(@PathVariable(name = "fieldName") final String fieldName, - @RequestParam(name = "query") final String query) { + return createPlot(plotRequest); + } - // TODO get date range from UI - final String q = query.isBlank()// - ? String.format("%s = ", fieldName)// - : String.format("(%s) and %s=", query, fieldName); - final int zeroBasedCaretIndex = q.length(); - final DateTimeRange dateRange = DateTimeRange.max(); - final QueryWithCaretMarker autocompleteQuery = new QueryWithCaretMarker(q, dateRange, zeroBasedCaretIndex, - ResultMode.FULL_VALUES); + @RequestMapping(path = "/plots", // + method = RequestMethod.POST, // + consumes = MediaType.APPLICATION_JSON_VALUE, // + produces = MediaType.APPLICATION_JSON_VALUE // + ) + @ResponseBody + ResponseEntity createPlot(@RequestBody final PlotRequest request) + throws InternalPlottingException, InterruptedException { - final List result = db.autocomplete(autocompleteQuery); + final PlotSettings plotSettings = PlotSettingsTransformer.toSettings(request); + if (StringUtils.isBlank(plotSettings.getQuery())) { + throw new BadRequest("The query must not be empty!"); + } - final SortedSet fields = CollectionUtils.map(result, new TreeSet<>(), Proposal::getProposedTag); + // TODO the UI should cancel requests that are in flight before sending a plot + // request + if (plotterLock.tryLock(5, TimeUnit.SECONDS)) { + try { + final PlotResult result = plotter.plot(plotSettings); - return fields; - } - - @RequestMapping(path = "/filters/defaults", // - method = RequestMethod.GET, // - produces = MediaType.APPLICATION_JSON_VALUE // - ) - @ResponseBody - public FilterDefaults getFilterDefaults() { - final Set groupBy = defaultsGroupBy.isBlank() ? Set.of() : Set.of(defaultsGroupBy.split("\\s*,\\s*")); - final List fields = fields(); - return new FilterDefaults(fields, groupBy, defaultsSplitBy); - } + final String imageUrl = WEB_IMAGE_OUTPUT_PATH + "/" + result.getImageName(); + LOGGER.trace("image url: {}", imageUrl); - private List toAutocompleteProposals(final List proposals) { + final String thumbnailUrl = result.getThumbnailPath() != null + ? WEB_IMAGE_OUTPUT_PATH + "/" + result.getThumbnailName() + : "img/no-thumbnail.png"; - final List result = new ArrayList<>(); + final PlotResponseStats stats = PlotResponseStats.fromDataSeries(result.getDataSeries()); + final PlotResponse plotResponse = new PlotResponse(stats, imageUrl, thumbnailUrl); - for (final Proposal proposal : proposals) { - final AutocompleteProposal e = new AutocompleteProposal(); - e.setValue(proposal.getProposedTag()); - e.setNewQuery(proposal.getNewQuery()); - e.setNewCaretPosition(proposal.getNewCaretPosition()); + return ResponseEntity.ok().body(plotResponse); + } catch (final NoDataPointsException e) { + throw new NotFoundException("No data was found. Try another query, or change the date range.", e); + } finally { + plotterLock.unlock(); + } - result.add(e); - } + } else { + throw new ServiceUnavailableException("Too many parallel requests!"); + } + } - return result; - } + @RequestMapping(path = "/plots", // + method = RequestMethod.GET, // + produces = MediaType.APPLICATION_OCTET_STREAM_VALUE // + ) + StreamingResponseBody createPlotImage(@RequestParam(name = "query", defaultValue = "") final String query, + @RequestParam(name = "groupBy[]", defaultValue = "") final List aGroupBy, + @RequestParam(name = "limitBy.number", defaultValue = "10") final int limit, + @RequestParam(name = "limitBy.selected", defaultValue = "NO_LIMIT") final Limit limitBy, + @RequestParam(name = "dateRange") final String dateRange, + @RequestParam(name = "axisScale", defaultValue = "LINEAR") final AxisScale axisScale, + @RequestParam(name = "aggregates") final EnumSet aggregate, + @RequestParam(name = "keyOutside", defaultValue = "false") final boolean keyOutside, + @RequestParam(name = "width", defaultValue = "1920") final int hidth, + @RequestParam(name = "height", defaultValue = "1080") final int height) { + return (final OutputStream outputStream) -> { + + if (StringUtils.isBlank(query)) { + throw new BadRequest("The query must not be empty!"); + } + + if (StringUtils.isBlank(dateRange)) { + throw new BadRequest("The parameter 'dateRange' must be set."); + } + + final PlotSettings plotSettings = new PlotSettings(); + plotSettings.setQuery(query); + plotSettings.setGroupBy(aGroupBy); + plotSettings.setHeight(height); + plotSettings.setWidth(hidth); + plotSettings.setLimit(limit); + plotSettings.setLimitBy(limitBy); + plotSettings.setDateRange(dateRange); + plotSettings.setYAxisScale(axisScale); + plotSettings.setAggregates(PlotSettingsTransformer.toAggregateInternal(plotSettings.getYRangeUnit(), + plotSettings.getYAxisScale(), aggregate)); + plotSettings.setKeyOutside(keyOutside); + plotSettings.setGenerateThumbnail(false); + + if (plotterLock.tryLock()) { + try { + final PlotResult result = plotter.plot(plotSettings); + + try (FileInputStream in = new FileInputStream(result.getImagePath().toFile())) { + StreamUtils.copy(in, outputStream); + } + } catch (final NoDataPointsException e) { + throw new NotFoundException(e); + } catch (final InternalPlottingException e) { + throw new InternalServerError(e); + } finally { + plotterLock.unlock(); + } + + } else { + throw new ServiceUnavailableException("Too many parallel requests!"); + } + }; + } + + @RequestMapping(path = "/autocomplete", // + method = RequestMethod.GET, // + produces = MediaType.APPLICATION_JSON_VALUE // + ) + @ResponseBody + AutocompleteResponse autocomplete(@RequestParam(name = "query") final String query, + @RequestParam(name = "caretIndex") final int caretIndex, + @RequestParam(name = "resultMode", defaultValue = "CUT_AT_DOT") ResultMode resultMode) { + + // TODO get date range from UI + final DateTimeRange dateRange = DateTimeRange.max(); + final int zeroBasedCaretIndex = caretIndex - 1; + final QueryWithCaretMarker q = new QueryWithCaretMarker(query, dateRange, zeroBasedCaretIndex, resultMode); + + final AutocompleteResponse result = new AutocompleteResponse(); + + final List proposals = db.autocomplete(q); + if (query.trim().length() == 0) { + proposals.addAll(exampleProposals()); + } + + final List autocompleteProposals = toAutocompleteProposals(proposals); + Collections.sort(autocompleteProposals, new AutocompleteProposalByValue()); + + result.setProposals(autocompleteProposals); + return result; + } + + private List exampleProposals() { + List result = new ArrayList(); + + if (queryExamples.length() > 0) { + final String[] exampleQueries = queryExamples.split(Pattern.quote(";")); + for (String example : exampleQueries) { + Proposal p = new Proposal(" Example: " + example, example, true, example + " ", example.length() + 1); + result.add(p); + } + } + + return result; + } + + @RequestMapping(path = "/fields", // + method = RequestMethod.GET, // + // consumes = MediaType.APPLICATION_JSON_UTF8_VALUE, // + produces = MediaType.APPLICATION_JSON_VALUE // + ) + @ResponseBody + List fields() { + final DateTimeRange dateTimeRange = DateTimeRange.max(); + final List fields = db.getFields(dateTimeRange); + + fields.sort(Collator.getInstance(Locale.ENGLISH)); + + return fields; + } + + @RequestMapping(path = "/fields/{fieldName}/values", // + method = RequestMethod.GET, // + consumes = MediaType.APPLICATION_JSON_VALUE, // + produces = MediaType.APPLICATION_JSON_VALUE // + ) + @ResponseBody + SortedSet fields(@PathVariable(name = "fieldName") final String fieldName, + @RequestParam(name = "query") final String query) { + + // TODO get date range from UI + final String q = query.isBlank()// + ? String.format("%s = ", fieldName)// + : String.format("(%s) and %s=", query, fieldName); + final int zeroBasedCaretIndex = q.length(); + final DateTimeRange dateRange = DateTimeRange.max(); + final QueryWithCaretMarker autocompleteQuery = new QueryWithCaretMarker(q, dateRange, zeroBasedCaretIndex, + ResultMode.FULL_VALUES); + + final List result = db.autocomplete(autocompleteQuery); + + final SortedSet fields = CollectionUtils.map(result, new TreeSet<>(), Proposal::getProposedTag); + + return fields; + } + + @RequestMapping(path = "/filters/defaults", // + method = RequestMethod.GET, // + produces = MediaType.APPLICATION_JSON_VALUE // + ) + @ResponseBody + public FilterDefaults getFilterDefaults() { + final Set groupBy = defaultsGroupBy.isBlank() ? Set.of() : Set.of(defaultsGroupBy.split("\\s*,\\s*")); + final List fields = fields(); + return new FilterDefaults(fields, groupBy, defaultsSplitBy); + } + + private List toAutocompleteProposals(final List proposals) { + + final List result = new ArrayList<>(); + + for (final Proposal proposal : proposals) { + final AutocompleteProposal e = new AutocompleteProposal(); + e.setValue(proposal.getProposedTag()); + e.setNewQuery(proposal.getNewQuery()); + e.setNewCaretPosition(proposal.getNewCaretPosition()); + + result.add(e); + } + + return result; + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/PdbWebapp.java b/pdb-ui/src/main/java/org/lucares/pdbui/PdbWebapp.java index 015a51c..2996a4d 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/PdbWebapp.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/PdbWebapp.java @@ -6,24 +6,22 @@ import org.springframework.boot.SpringApplication; public class PdbWebapp { - public static void main(final String[] args) throws Exception { - SpringApplication.run(MySpringConfiguration.class, args); - - Thread t = new Thread(()-> { - - - while(true){ - try{ - TimeUnit.MINUTES.sleep(10); - }catch(InterruptedException e) - { - // ignore - } - System.gc(); - } - }); - t.setDaemon(true); - t.setName("periodic-gc"); - t.start(); - } + public static void main(final String[] args) throws Exception { + SpringApplication.run(MySpringConfiguration.class, args); + + Thread t = new Thread(() -> { + + while (true) { + try { + TimeUnit.MINUTES.sleep(10); + } catch (InterruptedException e) { + // ignore + } + System.gc(); + } + }); + t.setDaemon(true); + t.setName("periodic-gc"); + t.start(); + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/PlotSettingsTransformer.java b/pdb-ui/src/main/java/org/lucares/pdbui/PlotSettingsTransformer.java index e73c48e..958f081 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/PlotSettingsTransformer.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/PlotSettingsTransformer.java @@ -12,76 +12,79 @@ import org.lucares.pdbui.domain.PlotRequest; import org.lucares.pdbui.domain.TimeRangeUnit; class PlotSettingsTransformer { - static PlotSettings toSettings(final PlotRequest request) { + static PlotSettings toSettings(final PlotRequest request) { - final PlotSettings result = new PlotSettings(); + final PlotSettings result = new PlotSettings(); - result.setQuery(request.getQuery()); - result.setGroupBy(request.getGroupBy()); - result.setHeight(request.getHeight()); - result.setWidth(request.getWidth()); - result.setLimit(request.getLimit()); - result.setLimitBy(request.getLimitBy()); - result.setDateRange(request.getDateRange()); - result.setYAxisScale(request.getAxisScale()); - result.setKeyOutside(request.isKeyOutside()); - result.setThumbnailMaxWidth(request.getThumbnailMaxWidth()); - result.setThumbnailMaxHeight(request.getThumbnailMaxHeight()); - result.setGenerateThumbnail(request.isGenerateThumbnail()); - result.setYRangeMin(request.getyRangeMin()); - result.setYRangeMax(request.getyRangeMax()); - result.setYRangeUnit(toTimeRangeUnitInternal(request.getyRangeUnit())); - result.setAggregates(toAggregateInternal(result.getYRangeUnit(), result.getYAxisScale(), request.getAggregates())); + result.setQuery(request.getQuery()); + result.setGroupBy(request.getGroupBy()); + result.setHeight(request.getHeight()); + result.setWidth(request.getWidth()); + result.setLimit(request.getLimit()); + result.setLimitBy(request.getLimitBy()); + result.setDateRange(request.getDateRange()); + result.setYAxisScale(request.getAxisScale()); + result.setKeyOutside(request.isKeyOutside()); + result.setThumbnailMaxWidth(request.getThumbnailMaxWidth()); + result.setThumbnailMaxHeight(request.getThumbnailMaxHeight()); + result.setGenerateThumbnail(request.isGenerateThumbnail()); + result.setYRangeMin(request.getyRangeMin()); + result.setYRangeMax(request.getyRangeMax()); + result.setYRangeUnit(toTimeRangeUnitInternal(request.getyRangeUnit())); + result.setAggregates( + toAggregateInternal(result.getYRangeUnit(), result.getYAxisScale(), request.getAggregates())); - return result; - } - - private static TimeRangeUnitInternal toTimeRangeUnitInternal(final TimeRangeUnit yRangeUnit) { - switch (yRangeUnit) { - case AUTOMATIC: - return TimeRangeUnitInternal.AUTOMATIC; - case MILLISECONDS: - return TimeRangeUnitInternal.MILLISECONDS; - case SECONDS: - return TimeRangeUnitInternal.SECONDS; - case MINUTES: - return TimeRangeUnitInternal.MINUTES; - case HOURS: - return TimeRangeUnitInternal.HOURS; - case DAYS: - return TimeRangeUnitInternal.DAYS; + return result; } - throw new IllegalStateException("unhandled enum value: " + yRangeUnit); - } - static AggregateHandlerCollection toAggregateInternal(TimeRangeUnitInternal yRangeUnit, AxisScale yAxisScale, - final Iterable aggregates) { - final AggregateHandlerCollection aggregateHandlerCollection = new AggregateHandlerCollection(); - - for (Aggregate aggregate : aggregates) { - - switch (aggregate) { - case CUM_DISTRIBUTION: - aggregateHandlerCollection.add(new CumulativeDistributionHandler()); - break; - case PARALLEL: - aggregateHandlerCollection.add(new ParallelRequestsAggregate()); - break; - case SCATTER: - if (yRangeUnit == TimeRangeUnitInternal.AUTOMATIC && yAxisScale == AxisScale.LINEAR) { - // TODO need a second ScatterAggregateHandler for YRangeUnit() == TimeRangeUnitInternal.AUTOMATIC - throw new UnsupportedOperationException("linear axis with automatic y range does not work, use logarthmic y-axis, or define a y-axis range"); - }else { - aggregateHandlerCollection.add(new ScatterAggregateHandler()); + private static TimeRangeUnitInternal toTimeRangeUnitInternal(final TimeRangeUnit yRangeUnit) { + switch (yRangeUnit) { + case AUTOMATIC: + return TimeRangeUnitInternal.AUTOMATIC; + case MILLISECONDS: + return TimeRangeUnitInternal.MILLISECONDS; + case SECONDS: + return TimeRangeUnitInternal.SECONDS; + case MINUTES: + return TimeRangeUnitInternal.MINUTES; + case HOURS: + return TimeRangeUnitInternal.HOURS; + case DAYS: + return TimeRangeUnitInternal.DAYS; } - break; - default: - throw new IllegalStateException("unhandled enum: " + aggregate); - } + throw new IllegalStateException("unhandled enum value: " + yRangeUnit); } - aggregateHandlerCollection.updateAxisForHandlers(); - - return aggregateHandlerCollection; - } + static AggregateHandlerCollection toAggregateInternal(TimeRangeUnitInternal yRangeUnit, AxisScale yAxisScale, + final Iterable aggregates) { + final AggregateHandlerCollection aggregateHandlerCollection = new AggregateHandlerCollection(); + + for (Aggregate aggregate : aggregates) { + + switch (aggregate) { + case CUM_DISTRIBUTION: + aggregateHandlerCollection.add(new CumulativeDistributionHandler()); + break; + case PARALLEL: + aggregateHandlerCollection.add(new ParallelRequestsAggregate()); + break; + case SCATTER: + if (yRangeUnit == TimeRangeUnitInternal.AUTOMATIC && yAxisScale == AxisScale.LINEAR) { + // TODO need a second ScatterAggregateHandler for YRangeUnit() == + // TimeRangeUnitInternal.AUTOMATIC + throw new UnsupportedOperationException( + "linear axis with automatic y range does not work, use logarthmic y-axis, or define a y-axis range"); + } else { + aggregateHandlerCollection.add(new ScatterAggregateHandler()); + } + break; + default: + throw new IllegalStateException("unhandled enum: " + aggregate); + } + } + + aggregateHandlerCollection.updateAxisForHandlers(); + + return aggregateHandlerCollection; + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/PlotterBeanFactory.java b/pdb-ui/src/main/java/org/lucares/pdbui/PlotterBeanFactory.java index 3f2dd1f..bd02615 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/PlotterBeanFactory.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/PlotterBeanFactory.java @@ -14,29 +14,29 @@ import org.springframework.stereotype.Component; @Component public class PlotterBeanFactory extends AbstractFactoryBean implements PropertyKeys { - private final PerformanceDb db; - private final Path tmpDir; - private final Path outputDir; + private final PerformanceDb db; + private final Path tmpDir; + private final Path outputDir; - @Autowired - public PlotterBeanFactory(final PerformanceDb db, @Value("${" + TMP_DIR + "}") final String tmpDir, - @Value("${" + PATH_GENERATED_IMAGES + "}") final String outputDir) { - this.db = db; - this.tmpDir = Paths.get(tmpDir); - this.outputDir = Paths.get(outputDir); - } + @Autowired + public PlotterBeanFactory(final PerformanceDb db, @Value("${" + TMP_DIR + "}") final String tmpDir, + @Value("${" + PATH_GENERATED_IMAGES + "}") final String outputDir) { + this.db = db; + this.tmpDir = Paths.get(tmpDir); + this.outputDir = Paths.get(outputDir); + } - @Override - public Class getObjectType() { - return Plotter.class; - } + @Override + public Class getObjectType() { + return Plotter.class; + } - @Override - protected Plotter createInstance() throws Exception { - Files.createDirectories(tmpDir); - Files.createDirectories(outputDir); + @Override + protected Plotter createInstance() throws Exception { + Files.createDirectories(tmpDir); + Files.createDirectories(outputDir); - return new Plotter(db, tmpDir, outputDir); - } + return new Plotter(db, tmpDir, outputDir); + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/PropertyKeys.java b/pdb-ui/src/main/java/org/lucares/pdbui/PropertyKeys.java index 986ec1d..c0c7929 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/PropertyKeys.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/PropertyKeys.java @@ -2,25 +2,25 @@ package org.lucares.pdbui; public interface PropertyKeys { - /** - * The path for generated images - */ - String PATH_GENERATED_IMAGES = "path.output"; + /** + * The path for generated images + */ + String PATH_GENERATED_IMAGES = "path.output"; - /** - * Path for temporary files - */ - String TMP_DIR = "path.tmp"; + /** + * Path for temporary files + */ + String TMP_DIR = "path.tmp"; - /** - * Indicates whether or not this instance is running in production. This - * property is used to switch Vue.js into production or development mode. - */ - String PRODUCTION_MODE = "mode.production"; - - String DEFAULTS_QUERY_EXAMPLES = "defaults.query.examples"; - - String DEFAULTS_GROUP_BY = "defaults.groupBy"; - - String DEFAULTS_SPLIT_BY = "defaults.splitBy"; + /** + * Indicates whether or not this instance is running in production. This + * property is used to switch Vue.js into production or development mode. + */ + String PRODUCTION_MODE = "mode.production"; + + String DEFAULTS_QUERY_EXAMPLES = "defaults.query.examples"; + + String DEFAULTS_GROUP_BY = "defaults.groupBy"; + + String DEFAULTS_SPLIT_BY = "defaults.splitBy"; } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/ServiceUnavailableException.java b/pdb-ui/src/main/java/org/lucares/pdbui/ServiceUnavailableException.java index d8d67f0..7f1adde 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/ServiceUnavailableException.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/ServiceUnavailableException.java @@ -6,9 +6,9 @@ import org.springframework.web.bind.annotation.ResponseStatus; @ResponseStatus(value = HttpStatus.SERVICE_UNAVAILABLE, reason = "Service Unavailable") public class ServiceUnavailableException extends RuntimeException { - private static final long serialVersionUID = -4512668277873760500L; + private static final long serialVersionUID = -4512668277873760500L; - public ServiceUnavailableException(String message) { - super(message); - } + public ServiceUnavailableException(String message) { + super(message); + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/TcpIngestor.java b/pdb-ui/src/main/java/org/lucares/pdbui/TcpIngestor.java index d98dbe1..184b682 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/TcpIngestor.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/TcpIngestor.java @@ -44,391 +44,391 @@ import com.fasterxml.jackson.core.JsonParseException; @Component public class TcpIngestor implements Ingestor, AutoCloseable, DisposableBean { - private static final Logger LOGGER = LoggerFactory.getLogger(TcpIngestor.class); + private static final Logger LOGGER = LoggerFactory.getLogger(TcpIngestor.class); - public static final int PORT = 17347; + public static final int PORT = 17347; - private final AtomicBoolean acceptNewConnections = new AtomicBoolean(true); + private final AtomicBoolean acceptNewConnections = new AtomicBoolean(true); - private final ExecutorService serverThreadPool = Executors.newFixedThreadPool(2); - - private final ExecutorService workerThreadPool = Executors.newCachedThreadPool(); - - private final PerformanceDb db; - - public final static class Handler implements Callable { + private final ExecutorService serverThreadPool = Executors.newFixedThreadPool(2); + + private final ExecutorService workerThreadPool = Executors.newCachedThreadPool(); + + private final PerformanceDb db; + + public final static class Handler implements Callable { - /** - * Column header names starting with "-" will be ignored. - */ - static final String COLUM_IGNORE_PREFIX = "-"; - private static final int IGNORE_COLUMN = 0; - final Socket clientSocket; - private final ArrayBlockingQueue queue; - - public Handler(final Socket clientSocket, final ArrayBlockingQueue queue) { - this.clientSocket = clientSocket; - this.queue = queue; - } - - @Override - public Void call() throws Exception { - final SocketAddress clientAddress = clientSocket.getRemoteSocketAddress(); - Thread.currentThread().setName("worker-" + clientAddress); - LOGGER.debug("opening streams to client"); - try (PrintWriter out = new PrintWriter(clientSocket.getOutputStream(), true); - InputStream in = new BufferedInputStream(clientSocket.getInputStream());) { - - LOGGER.debug("reading from stream"); - redirectInputStream(in); - - LOGGER.debug("connection closed: " + clientAddress); - } catch (final Throwable e) { - LOGGER.warn("Stream handling failed", e); - throw e; - } - - return null; - } - - private void redirectInputStream(final InputStream in) throws IOException, InterruptedException { - in.mark(1); - final byte firstByte = (byte) in.read(); - if (firstByte == '{') { - readJSON(in); - } else if (firstByte == PdbExport.MAGIC_BYTE) { - - readCustomExportFormat(in); - } else if (isGZIP(firstByte)) { - in.reset(); - final GZIPInputStream gzip = new GZIPInputStream(in); - - redirectInputStream(gzip); - } else { - readCSV(in, firstByte); - } - } - - private boolean isGZIP(final byte firstByte) { - // GZIP starts with 0x1f, 0x8b, see https://www.ietf.org/rfc/rfc1952.txt section - // 2.3.1 - // I am cheap and only check the first byte - return firstByte == 0x1f; - } - - private void readCustomExportFormat(final InputStream in) throws IOException { - - final CustomExportFormatToEntryTransformer transformer = new CustomExportFormatToEntryTransformer(); - - final BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); - transformer.read(reader, queue); - - } - - private void readCSV(final InputStream in, final byte firstByte) throws IOException, InterruptedException { - final int chunksize = 1000; - Entries entries = new Entries(chunksize); - - final byte newline = '\n'; - final byte[] line = new byte[4096]; // max line length - line[0] = firstByte; - int offsetInLine = 1; // because the first byte is already set - int offsetInBuffer = 0; - final IntList separatorPositions = new IntList(); - - int read = 0; - int bytesInLine = 0; - - int[] columns = null; - final byte[] buffer = new byte[4096 * 16]; - final int keyTimestamp = Tags.STRING_COMPRESSOR.put("@timestamp"); - final int keyDuration = Tags.STRING_COMPRESSOR.put("duration"); - final FastISODateParser dateParser = new FastISODateParser(); - - while ((read = in.read(buffer)) >= 0) { - offsetInBuffer = 0; - - for (int i = 0; i < read; i++) { - if (buffer[i] == newline) { - final int length = i - offsetInBuffer; - System.arraycopy(buffer, offsetInBuffer, line, offsetInLine, length); - bytesInLine = offsetInLine + length; - separatorPositions.add(offsetInLine + i - offsetInBuffer); - - if (columns != null) { - - final Entry entry = handleCsvLine(columns, line, bytesInLine, separatorPositions, - keyTimestamp, keyDuration, dateParser); - if (entry != null) { - entries.add(entry); - } - if (entries.size() >= chunksize) { - queue.put(entries); - entries = new Entries(chunksize); - } - } else { - columns = handleCsvHeaderLine(line, bytesInLine, separatorPositions); - } - - offsetInBuffer = i + 1; - offsetInLine = 0; - bytesInLine = 0; - separatorPositions.clear(); - } else if (buffer[i] == ',') { - separatorPositions.add(offsetInLine + i - offsetInBuffer); - } - } - if (offsetInBuffer < read) { - final int length = read - offsetInBuffer; - System.arraycopy(buffer, offsetInBuffer, line, offsetInLine, length); - bytesInLine = offsetInLine + length; - offsetInLine += length; - offsetInBuffer = 0; - - } - } - final Entry entry = handleCsvLine(columns, line, bytesInLine, separatorPositions, keyTimestamp, keyDuration, - dateParser); - if (entry != null) { - entries.add(entry); - } - queue.put(entries); - } - - private int[] handleCsvHeaderLine(final byte[] line, final int bytesInLine, final IntList separatorPositions) { - - final int[] columns = new int[separatorPositions.size()]; - - int lastSeparatorPosition = -1; - final int size = separatorPositions.size(); - for (int i = 0; i < size; i++) { - final int separatorPosition = separatorPositions.get(i); - - final int compressedString = Tags.STRING_COMPRESSOR.put(line, lastSeparatorPosition + 1, - separatorPosition); - final String columnName = Tags.STRING_COMPRESSOR.get(compressedString); - - columns[i] = ignoreColum(columnName) ? IGNORE_COLUMN : compressedString; - - lastSeparatorPosition = separatorPosition; - } - return columns; - } - - private boolean ignoreColum(final String columnName) { - return columnName.startsWith(COLUM_IGNORE_PREFIX); - } - - private static Entry handleCsvLine(final int[] columns, final byte[] line, final int bytesInLine, - final IntList separatorPositions, final int keyTimestamp, final int keyDuration, - final FastISODateParser dateParser) { - try { - if (separatorPositions.size() != columns.length) { - return null; - } - final TagsBuilder tagsBuilder = new TagsBuilder(); - int lastSeparatorPosition = -1; - final int size = separatorPositions.size(); - long epochMilli = -1; - long duration = -1; - for (int i = 0; i < size; i++) { - final int separatorPosition = separatorPositions.get(i); - final int key = columns[i]; - - if (key == IGNORE_COLUMN) { - // this column's value will not be ingested - } else if (key == keyTimestamp) { - epochMilli = dateParser.parseAsEpochMilli(line, lastSeparatorPosition + 1); - } else if (key == keyDuration) { - duration = parseLong(line, lastSeparatorPosition + 1, separatorPosition); - } else if (lastSeparatorPosition + 1 < separatorPosition) { // value is not empty - final int value = Tags.STRING_COMPRESSOR.put(line, lastSeparatorPosition + 1, - separatorPosition); - - tagsBuilder.add(key, value); - } - lastSeparatorPosition = separatorPosition; - } - final Tags tags = tagsBuilder.build(); - return new Entry(epochMilli, duration, tags); - } catch (final RuntimeException e) { - LOGGER.debug("ignoring invalid line '" + new String(line, 0, bytesInLine, StandardCharsets.UTF_8) + "'", - e); - } - return null; - } - - private static long parseLong(final byte[] bytes, final int start, int endExclusive) { - long result = 0; - int i = start; - int c = bytes[i]; - int sign = 1; - if (c == '-') { - sign = -1; - i++; - } - while (i < endExclusive && (c = bytes[i]) >= 48 && c <= 57) { - result = result * 10 + (c - 48); - i++; - } - return sign * result; - } - - private void readJSON(final InputStream in) throws IOException, InterruptedException { - final int chunksize = 100; - Entries entries = new Entries(chunksize); - - final BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); - - String line = "{" + reader.readLine(); - - final JsonToEntryTransformer transformer = new JsonToEntryTransformer(); - final Optional firstEntry = transformer.toEntry(line); - if (firstEntry.isPresent()) { - LOGGER.debug("adding entry to queue: {}", firstEntry); - entries.add(firstEntry.get()); - } - - while ((line = reader.readLine()) != null) { - - try { - final Optional entry = transformer.toEntry(line); - - if (entry.isPresent()) { - LOGGER.debug("adding entry to queue: {}", entry); - entries.add(entry.get()); - } - } catch (final JsonParseException e) { - LOGGER.info("json parse error in line '" + line + "'", e); - } - - if (entries.size() == chunksize) { - queue.put(entries); - entries = new Entries(chunksize); - } - } - queue.put(entries); - - } - } - - public TcpIngestor(final Path dataDirectory) throws IOException { - LOGGER.info("opening performance db: " + dataDirectory); - db = new PerformanceDb(dataDirectory); - LOGGER.debug("performance db open"); - } - - @Autowired - public TcpIngestor(final PerformanceDb db) { - this.db = db; - } - - public PerformanceDb getDb() { - return db; - } - - @Async - @Override - public void start() throws Exception { - - final ArrayBlockingQueue queue = new ArrayBlockingQueue<>(10); - - serverThreadPool.submit(() -> { - Thread.currentThread().setName("db-ingestion"); - - boolean finished = false; - while (!finished) { - try { - db.putEntries(new BlockingQueueIterator<>(queue, Entries.POISON)); - finished = true; - } catch (final Exception e) { - LOGGER.warn("Write to database failed. Will retry with the next element.", e); - } - } - return null; - }); - - serverThreadPool.submit(() -> listen(queue)); - } - - private Void listen(final ArrayBlockingQueue queue) throws IOException { - Thread.currentThread().setName("socket-listener"); - try (ServerSocket serverSocket = new ServerSocket(PORT);) { - LOGGER.info("listening on port " + PORT); - - serverSocket.setSoTimeout((int) TimeUnit.MILLISECONDS.toMillis(2)); - - while (acceptNewConnections.get()) { - try { - final Socket clientSocket = serverSocket.accept(); - LOGGER.debug("accepted connection: " + clientSocket.getRemoteSocketAddress()); - - workerThreadPool.submit(new Handler(clientSocket, queue)); - LOGGER.debug("handler submitted"); - } catch (final SocketTimeoutException e) { - // expected every 100ms - // needed to be able to stop the server - } catch (final Exception e) { - LOGGER.warn("Exception caught while waiting for a new connection. " - + "We'll ignore this error and keep going.", e); - } - } - LOGGER.info("not accepting new connections. "); - - LOGGER.info("stopping worker pool"); - workerThreadPool.shutdown(); - try { - workerThreadPool.awaitTermination(10, TimeUnit.MINUTES); - LOGGER.debug("workers stopped"); - } catch (final InterruptedException e) { - Thread.interrupted(); - } - LOGGER.debug("adding poison"); - queue.put(Entries.POISON); - } catch (final InterruptedException e) { - LOGGER.info("Listener thread interrupted. Likely while adding the poison. " - + "That would mean that the db-ingestion thread will not terminate. "); - Thread.interrupted(); - } catch (final Exception e) { - LOGGER.error("", e); - throw e; - } - return null; - } - - @Override - @PreDestroy - public void destroy() { - close(); - } - - @Override - public void close() { - LOGGER.debug("stopping accept thread"); - acceptNewConnections.set(false); - serverThreadPool.shutdown(); - try { - serverThreadPool.awaitTermination(10, TimeUnit.MINUTES); - } catch (final InterruptedException e) { - Thread.interrupted(); - } - LOGGER.debug("closing database"); - db.close(); - LOGGER.info("destroyed"); - } - - public static void main(final String[] args) throws Exception { - - Runtime.getRuntime().addShutdownHook(new Thread() { - @Override - public void run() { - LOGGER.info("shutdown hook"); - } - }); - - try (final TcpIngestor ingestor = new TcpIngestor(Config.DATA_DIR)) { - ingestor.start(); - TimeUnit.MILLISECONDS.sleep(Long.MAX_VALUE); - } - } + /** + * Column header names starting with "-" will be ignored. + */ + static final String COLUM_IGNORE_PREFIX = "-"; + private static final int IGNORE_COLUMN = 0; + final Socket clientSocket; + private final ArrayBlockingQueue queue; + + public Handler(final Socket clientSocket, final ArrayBlockingQueue queue) { + this.clientSocket = clientSocket; + this.queue = queue; + } + + @Override + public Void call() throws Exception { + final SocketAddress clientAddress = clientSocket.getRemoteSocketAddress(); + Thread.currentThread().setName("worker-" + clientAddress); + LOGGER.debug("opening streams to client"); + try (PrintWriter out = new PrintWriter(clientSocket.getOutputStream(), true); + InputStream in = new BufferedInputStream(clientSocket.getInputStream());) { + + LOGGER.debug("reading from stream"); + redirectInputStream(in); + + LOGGER.debug("connection closed: " + clientAddress); + } catch (final Throwable e) { + LOGGER.warn("Stream handling failed", e); + throw e; + } + + return null; + } + + private void redirectInputStream(final InputStream in) throws IOException, InterruptedException { + in.mark(1); + final byte firstByte = (byte) in.read(); + if (firstByte == '{') { + readJSON(in); + } else if (firstByte == PdbExport.MAGIC_BYTE) { + + readCustomExportFormat(in); + } else if (isGZIP(firstByte)) { + in.reset(); + final GZIPInputStream gzip = new GZIPInputStream(in); + + redirectInputStream(gzip); + } else { + readCSV(in, firstByte); + } + } + + private boolean isGZIP(final byte firstByte) { + // GZIP starts with 0x1f, 0x8b, see https://www.ietf.org/rfc/rfc1952.txt section + // 2.3.1 + // I am cheap and only check the first byte + return firstByte == 0x1f; + } + + private void readCustomExportFormat(final InputStream in) throws IOException { + + final CustomExportFormatToEntryTransformer transformer = new CustomExportFormatToEntryTransformer(); + + final BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); + transformer.read(reader, queue); + + } + + private void readCSV(final InputStream in, final byte firstByte) throws IOException, InterruptedException { + final int chunksize = 1000; + Entries entries = new Entries(chunksize); + + final byte newline = '\n'; + final byte[] line = new byte[4096]; // max line length + line[0] = firstByte; + int offsetInLine = 1; // because the first byte is already set + int offsetInBuffer = 0; + final IntList separatorPositions = new IntList(); + + int read = 0; + int bytesInLine = 0; + + int[] columns = null; + final byte[] buffer = new byte[4096 * 16]; + final int keyTimestamp = Tags.STRING_COMPRESSOR.put("@timestamp"); + final int keyDuration = Tags.STRING_COMPRESSOR.put("duration"); + final FastISODateParser dateParser = new FastISODateParser(); + + while ((read = in.read(buffer)) >= 0) { + offsetInBuffer = 0; + + for (int i = 0; i < read; i++) { + if (buffer[i] == newline) { + final int length = i - offsetInBuffer; + System.arraycopy(buffer, offsetInBuffer, line, offsetInLine, length); + bytesInLine = offsetInLine + length; + separatorPositions.add(offsetInLine + i - offsetInBuffer); + + if (columns != null) { + + final Entry entry = handleCsvLine(columns, line, bytesInLine, separatorPositions, + keyTimestamp, keyDuration, dateParser); + if (entry != null) { + entries.add(entry); + } + if (entries.size() >= chunksize) { + queue.put(entries); + entries = new Entries(chunksize); + } + } else { + columns = handleCsvHeaderLine(line, bytesInLine, separatorPositions); + } + + offsetInBuffer = i + 1; + offsetInLine = 0; + bytesInLine = 0; + separatorPositions.clear(); + } else if (buffer[i] == ',') { + separatorPositions.add(offsetInLine + i - offsetInBuffer); + } + } + if (offsetInBuffer < read) { + final int length = read - offsetInBuffer; + System.arraycopy(buffer, offsetInBuffer, line, offsetInLine, length); + bytesInLine = offsetInLine + length; + offsetInLine += length; + offsetInBuffer = 0; + + } + } + final Entry entry = handleCsvLine(columns, line, bytesInLine, separatorPositions, keyTimestamp, keyDuration, + dateParser); + if (entry != null) { + entries.add(entry); + } + queue.put(entries); + } + + private int[] handleCsvHeaderLine(final byte[] line, final int bytesInLine, final IntList separatorPositions) { + + final int[] columns = new int[separatorPositions.size()]; + + int lastSeparatorPosition = -1; + final int size = separatorPositions.size(); + for (int i = 0; i < size; i++) { + final int separatorPosition = separatorPositions.get(i); + + final int compressedString = Tags.STRING_COMPRESSOR.put(line, lastSeparatorPosition + 1, + separatorPosition); + final String columnName = Tags.STRING_COMPRESSOR.get(compressedString); + + columns[i] = ignoreColum(columnName) ? IGNORE_COLUMN : compressedString; + + lastSeparatorPosition = separatorPosition; + } + return columns; + } + + private boolean ignoreColum(final String columnName) { + return columnName.startsWith(COLUM_IGNORE_PREFIX); + } + + private static Entry handleCsvLine(final int[] columns, final byte[] line, final int bytesInLine, + final IntList separatorPositions, final int keyTimestamp, final int keyDuration, + final FastISODateParser dateParser) { + try { + if (separatorPositions.size() != columns.length) { + return null; + } + final TagsBuilder tagsBuilder = new TagsBuilder(); + int lastSeparatorPosition = -1; + final int size = separatorPositions.size(); + long epochMilli = -1; + long duration = -1; + for (int i = 0; i < size; i++) { + final int separatorPosition = separatorPositions.get(i); + final int key = columns[i]; + + if (key == IGNORE_COLUMN) { + // this column's value will not be ingested + } else if (key == keyTimestamp) { + epochMilli = dateParser.parseAsEpochMilli(line, lastSeparatorPosition + 1); + } else if (key == keyDuration) { + duration = parseLong(line, lastSeparatorPosition + 1, separatorPosition); + } else if (lastSeparatorPosition + 1 < separatorPosition) { // value is not empty + final int value = Tags.STRING_COMPRESSOR.put(line, lastSeparatorPosition + 1, + separatorPosition); + + tagsBuilder.add(key, value); + } + lastSeparatorPosition = separatorPosition; + } + final Tags tags = tagsBuilder.build(); + return new Entry(epochMilli, duration, tags); + } catch (final RuntimeException e) { + LOGGER.debug("ignoring invalid line '" + new String(line, 0, bytesInLine, StandardCharsets.UTF_8) + "'", + e); + } + return null; + } + + private static long parseLong(final byte[] bytes, final int start, int endExclusive) { + long result = 0; + int i = start; + int c = bytes[i]; + int sign = 1; + if (c == '-') { + sign = -1; + i++; + } + while (i < endExclusive && (c = bytes[i]) >= 48 && c <= 57) { + result = result * 10 + (c - 48); + i++; + } + return sign * result; + } + + private void readJSON(final InputStream in) throws IOException, InterruptedException { + final int chunksize = 100; + Entries entries = new Entries(chunksize); + + final BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); + + String line = "{" + reader.readLine(); + + final JsonToEntryTransformer transformer = new JsonToEntryTransformer(); + final Optional firstEntry = transformer.toEntry(line); + if (firstEntry.isPresent()) { + LOGGER.debug("adding entry to queue: {}", firstEntry); + entries.add(firstEntry.get()); + } + + while ((line = reader.readLine()) != null) { + + try { + final Optional entry = transformer.toEntry(line); + + if (entry.isPresent()) { + LOGGER.debug("adding entry to queue: {}", entry); + entries.add(entry.get()); + } + } catch (final JsonParseException e) { + LOGGER.info("json parse error in line '" + line + "'", e); + } + + if (entries.size() == chunksize) { + queue.put(entries); + entries = new Entries(chunksize); + } + } + queue.put(entries); + + } + } + + public TcpIngestor(final Path dataDirectory) throws IOException { + LOGGER.info("opening performance db: " + dataDirectory); + db = new PerformanceDb(dataDirectory); + LOGGER.debug("performance db open"); + } + + @Autowired + public TcpIngestor(final PerformanceDb db) { + this.db = db; + } + + public PerformanceDb getDb() { + return db; + } + + @Async + @Override + public void start() throws Exception { + + final ArrayBlockingQueue queue = new ArrayBlockingQueue<>(10); + + serverThreadPool.submit(() -> { + Thread.currentThread().setName("db-ingestion"); + + boolean finished = false; + while (!finished) { + try { + db.putEntries(new BlockingQueueIterator<>(queue, Entries.POISON)); + finished = true; + } catch (final Exception e) { + LOGGER.warn("Write to database failed. Will retry with the next element.", e); + } + } + return null; + }); + + serverThreadPool.submit(() -> listen(queue)); + } + + private Void listen(final ArrayBlockingQueue queue) throws IOException { + Thread.currentThread().setName("socket-listener"); + try (ServerSocket serverSocket = new ServerSocket(PORT);) { + LOGGER.info("listening on port " + PORT); + + serverSocket.setSoTimeout((int) TimeUnit.MILLISECONDS.toMillis(2)); + + while (acceptNewConnections.get()) { + try { + final Socket clientSocket = serverSocket.accept(); + LOGGER.debug("accepted connection: " + clientSocket.getRemoteSocketAddress()); + + workerThreadPool.submit(new Handler(clientSocket, queue)); + LOGGER.debug("handler submitted"); + } catch (final SocketTimeoutException e) { + // expected every 100ms + // needed to be able to stop the server + } catch (final Exception e) { + LOGGER.warn("Exception caught while waiting for a new connection. " + + "We'll ignore this error and keep going.", e); + } + } + LOGGER.info("not accepting new connections. "); + + LOGGER.info("stopping worker pool"); + workerThreadPool.shutdown(); + try { + workerThreadPool.awaitTermination(10, TimeUnit.MINUTES); + LOGGER.debug("workers stopped"); + } catch (final InterruptedException e) { + Thread.interrupted(); + } + LOGGER.debug("adding poison"); + queue.put(Entries.POISON); + } catch (final InterruptedException e) { + LOGGER.info("Listener thread interrupted. Likely while adding the poison. " + + "That would mean that the db-ingestion thread will not terminate. "); + Thread.interrupted(); + } catch (final Exception e) { + LOGGER.error("", e); + throw e; + } + return null; + } + + @Override + @PreDestroy + public void destroy() { + close(); + } + + @Override + public void close() { + LOGGER.debug("stopping accept thread"); + acceptNewConnections.set(false); + serverThreadPool.shutdown(); + try { + serverThreadPool.awaitTermination(10, TimeUnit.MINUTES); + } catch (final InterruptedException e) { + Thread.interrupted(); + } + LOGGER.debug("closing database"); + db.close(); + LOGGER.info("destroyed"); + } + + public static void main(final String[] args) throws Exception { + + Runtime.getRuntime().addShutdownHook(new Thread() { + @Override + public void run() { + LOGGER.info("shutdown hook"); + } + }); + + try (final TcpIngestor ingestor = new TcpIngestor(Config.DATA_DIR)) { + ingestor.start(); + TimeUnit.MILLISECONDS.sleep(Long.MAX_VALUE); + } + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/WebConfiguration.java b/pdb-ui/src/main/java/org/lucares/pdbui/WebConfiguration.java index 89dabd2..75649e9 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/WebConfiguration.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/WebConfiguration.java @@ -10,18 +10,18 @@ import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; @Configuration public class WebConfiguration implements WebMvcConfigurer, HardcodedValues, PropertyKeys { - private final String outputDir; + private final String outputDir; - public WebConfiguration(@Value("${" + PATH_GENERATED_IMAGES + "}") final String outputDir) { - this.outputDir = outputDir; - } + public WebConfiguration(@Value("${" + PATH_GENERATED_IMAGES + "}") final String outputDir) { + this.outputDir = outputDir; + } - @Override - public void addResourceHandlers(final ResourceHandlerRegistry registry) { + @Override + public void addResourceHandlers(final ResourceHandlerRegistry registry) { - final String pathPattern = "/" + WEB_IMAGE_OUTPUT_PATH + "/**"; - final String resourceLocation = "file:" + Paths.get(outputDir).toAbsolutePath() + "/"; + final String pathPattern = "/" + WEB_IMAGE_OUTPUT_PATH + "/**"; + final String resourceLocation = "file:" + Paths.get(outputDir).toAbsolutePath() + "/"; - registry.addResourceHandler(pathPattern).addResourceLocations(resourceLocation); - } + registry.addResourceHandler(pathPattern).addResourceLocations(resourceLocation); + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/date/FastISODateParser.java b/pdb-ui/src/main/java/org/lucares/pdbui/date/FastISODateParser.java index 91d6d7c..7418e25 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/date/FastISODateParser.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/date/FastISODateParser.java @@ -14,271 +14,271 @@ import java.util.concurrent.ConcurrentHashMap; */ public class FastISODateParser { - private final static ConcurrentHashMap EPOCH_MILLI_MONTH_OFFSETS = new ConcurrentHashMap<>(); + private final static ConcurrentHashMap EPOCH_MILLI_MONTH_OFFSETS = new ConcurrentHashMap<>(); - private int cached_epochMilliMonthOffsetKey = 0; - private long cached_epochMilliMonthOffset = 0; + private int cached_epochMilliMonthOffsetKey = 0; + private long cached_epochMilliMonthOffset = 0; - /** - * Parsing ISO-8601 like dates, e.g. 2011-12-03T10:15:30.123Z or - * 2011-12-03T10:15:30+01:00. - * - * @param date in ISO-8601 format - * @return {@link OffsetDateTime} - */ - public OffsetDateTime parse(final String date) { - try { - final int year = Integer.parseInt(date, 0, 4, 10); - final int month = Integer.parseInt(date, 5, 7, 10); - final int dayOfMonth = Integer.parseInt(date, 8, 10, 10); - final int hour = Integer.parseInt(date, 11, 13, 10); - final int minute = Integer.parseInt(date, 14, 16, 10); - final int second = Integer.parseInt(date, 17, 19, 10); + /** + * Parsing ISO-8601 like dates, e.g. 2011-12-03T10:15:30.123Z or + * 2011-12-03T10:15:30+01:00. + * + * @param date in ISO-8601 format + * @return {@link OffsetDateTime} + */ + public OffsetDateTime parse(final String date) { + try { + final int year = Integer.parseInt(date, 0, 4, 10); + final int month = Integer.parseInt(date, 5, 7, 10); + final int dayOfMonth = Integer.parseInt(date, 8, 10, 10); + final int hour = Integer.parseInt(date, 11, 13, 10); + final int minute = Integer.parseInt(date, 14, 16, 10); + final int second = Integer.parseInt(date, 17, 19, 10); - final int[] nanosAndCharsRead = parseMilliseconds(date, 19); - final int nanos = nanosAndCharsRead[0]; - final int offsetTimezone = 19 + nanosAndCharsRead[1]; + final int[] nanosAndCharsRead = parseMilliseconds(date, 19); + final int nanos = nanosAndCharsRead[0]; + final int offsetTimezone = 19 + nanosAndCharsRead[1]; - final ZoneOffset offset = date.charAt(offsetTimezone) == 'Z' ? ZoneOffset.UTC - : parseZone(date.subSequence(offsetTimezone, date.length())); - return OffsetDateTime.of(year, month, dayOfMonth, hour, minute, second, nanos, offset); - } catch (final RuntimeException e) { - throw new IllegalArgumentException("'" + date + "' is not an ISO-8601 that can be parsed with " - + FastISODateParser.class.getCanonicalName(), e); - } - } + final ZoneOffset offset = date.charAt(offsetTimezone) == 'Z' ? ZoneOffset.UTC + : parseZone(date.subSequence(offsetTimezone, date.length())); + return OffsetDateTime.of(year, month, dayOfMonth, hour, minute, second, nanos, offset); + } catch (final RuntimeException e) { + throw new IllegalArgumentException("'" + date + "' is not an ISO-8601 that can be parsed with " + + FastISODateParser.class.getCanonicalName(), e); + } + } - public long parseAsEpochMilli(final String date) { - try { - final long year = parseLong(date, 0, 4); - final long month = parseLong(date, 5, 7); - final long dayOfMonth = parseLong(date, 8, 10); - final long hour = parseLong(date, 11, 13); - final long minute = parseLong(date, 14, 16); - final long second = parseLong(date, 17, 19); + public long parseAsEpochMilli(final String date) { + try { + final long year = parseLong(date, 0, 4); + final long month = parseLong(date, 5, 7); + final long dayOfMonth = parseLong(date, 8, 10); + final long hour = parseLong(date, 11, 13); + final long minute = parseLong(date, 14, 16); + final long second = parseLong(date, 17, 19); - final int[] nanosAndCharsRead = parseMilliseconds(date, 19); - final long nanos = nanosAndCharsRead[0]; - final int offsetTimezone = 19 + nanosAndCharsRead[1]; + final int[] nanosAndCharsRead = parseMilliseconds(date, 19); + final long nanos = nanosAndCharsRead[0]; + final int offsetTimezone = 19 + nanosAndCharsRead[1]; - final long zoneOffsetMillis = date.charAt(offsetTimezone) == 'Z' ? 0 - : parseZoneToMillis(date.subSequence(offsetTimezone, date.length())); + final long zoneOffsetMillis = date.charAt(offsetTimezone) == 'Z' ? 0 + : parseZoneToMillis(date.subSequence(offsetTimezone, date.length())); - final int epochMilliMonthOffsetKey = (int) (year * 12 + month - 1); - final long epochMilliMonthOffset; + final int epochMilliMonthOffsetKey = (int) (year * 12 + month - 1); + final long epochMilliMonthOffset; - if (cached_epochMilliMonthOffsetKey == epochMilliMonthOffsetKey) { - epochMilliMonthOffset = cached_epochMilliMonthOffset; - } else { - epochMilliMonthOffset = EPOCH_MILLI_MONTH_OFFSETS.computeIfAbsent(epochMilliMonthOffsetKey, - FastISODateParser::computeEpochMilliMonthOffset); - cached_epochMilliMonthOffsetKey = epochMilliMonthOffsetKey; - cached_epochMilliMonthOffset = epochMilliMonthOffset; - } + if (cached_epochMilliMonthOffsetKey == epochMilliMonthOffsetKey) { + epochMilliMonthOffset = cached_epochMilliMonthOffset; + } else { + epochMilliMonthOffset = EPOCH_MILLI_MONTH_OFFSETS.computeIfAbsent(epochMilliMonthOffsetKey, + FastISODateParser::computeEpochMilliMonthOffset); + cached_epochMilliMonthOffsetKey = epochMilliMonthOffsetKey; + cached_epochMilliMonthOffset = epochMilliMonthOffset; + } - final long epochMilli = epochMilliMonthOffset // - + (dayOfMonth - 1) * 86_400_000 // - + hour * 3_600_000 // - + minute * 60_000 // - + second * 1_000 // - + nanos / 1_000_000// - - zoneOffsetMillis; - return epochMilli; + final long epochMilli = epochMilliMonthOffset // + + (dayOfMonth - 1) * 86_400_000 // + + hour * 3_600_000 // + + minute * 60_000 // + + second * 1_000 // + + nanos / 1_000_000// + - zoneOffsetMillis; + return epochMilli; - } catch (final RuntimeException e) { - throw new IllegalArgumentException("'" + date + "' is not an ISO-8601 that can be parsed with " - + FastISODateParser.class.getCanonicalName(), e); - } - } + } catch (final RuntimeException e) { + throw new IllegalArgumentException("'" + date + "' is not an ISO-8601 that can be parsed with " + + FastISODateParser.class.getCanonicalName(), e); + } + } - private static Long computeEpochMilliMonthOffset(final int key) { + private static Long computeEpochMilliMonthOffset(final int key) { - final int year = key / 12; - final int month = key % 12 + 1; + final int year = key / 12; + final int month = key % 12 + 1; - final OffsetDateTime date = OffsetDateTime.of(year, month, 1, 0, 0, 0, 0, ZoneOffset.UTC); + final OffsetDateTime date = OffsetDateTime.of(year, month, 1, 0, 0, 0, 0, ZoneOffset.UTC); - return date.toInstant().toEpochMilli(); - } + return date.toInstant().toEpochMilli(); + } - private long parseLong(final String string, final int start, final int end) { - long result = 0; - for (int i = start; i < end; i++) { - // final int c = string.charAt(i); - final int c = string.codePointAt(i); - if (c < '0' || c > '9') { - throw new NumberFormatException(c + " is not a number at offset " + i); - } - result = result * 10 + (c - '0'); - } - return result; - } + private long parseLong(final String string, final int start, final int end) { + long result = 0; + for (int i = start; i < end; i++) { + // final int c = string.charAt(i); + final int c = string.codePointAt(i); + if (c < '0' || c > '9') { + throw new NumberFormatException(c + " is not a number at offset " + i); + } + result = result * 10 + (c - '0'); + } + return result; + } - private int[] parseMilliseconds(final String date, final int start) { - int result = 0; - int i = start; - while (i < date.length()) { - final char c = date.charAt(i); - i++; - if (c == '.') { - continue; - } - if (c < '0' || c > '9') { - break; - } - result = result * 10 + (c - '0'); - } - final int readChars = i - start - 1; + private int[] parseMilliseconds(final String date, final int start) { + int result = 0; + int i = start; + while (i < date.length()) { + final char c = date.charAt(i); + i++; + if (c == '.') { + continue; + } + if (c < '0' || c > '9') { + break; + } + result = result * 10 + (c - '0'); + } + final int readChars = i - start - 1; - while (i <= start + 10) { - result *= 10; - i++; - } + while (i <= start + 10) { + result *= 10; + i++; + } - return new int[] { result, readChars }; - } + return new int[] { result, readChars }; + } - private ZoneOffset parseZone(final CharSequence zoneString) { + private ZoneOffset parseZone(final CharSequence zoneString) { - final int hours = Integer.parseInt(zoneString, 0, 3, 10); - int minutes = Integer.parseInt(zoneString, 4, 6, 10); + final int hours = Integer.parseInt(zoneString, 0, 3, 10); + int minutes = Integer.parseInt(zoneString, 4, 6, 10); - // if hours is negative,then minutes must be too - minutes = (hours < 0 ? -1 : 1) * minutes; - return ZoneOffset.ofHoursMinutes(hours, minutes); - } + // if hours is negative,then minutes must be too + minutes = (hours < 0 ? -1 : 1) * minutes; + return ZoneOffset.ofHoursMinutes(hours, minutes); + } - private long parseZoneToMillis(final CharSequence zoneString) { + private long parseZoneToMillis(final CharSequence zoneString) { - final int hours = Integer.parseInt(zoneString, 0, 3, 10); - int minutes = Integer.parseInt(zoneString, 4, 6, 10); + final int hours = Integer.parseInt(zoneString, 0, 3, 10); + int minutes = Integer.parseInt(zoneString, 4, 6, 10); - // if hours is negative,then minutes must be too - minutes = (hours < 0 ? -1 : 1) * minutes; - return hours * 3_600_000 + minutes * 60_000; - } + // if hours is negative,then minutes must be too + minutes = (hours < 0 ? -1 : 1) * minutes; + return hours * 3_600_000 + minutes * 60_000; + } - public long parseAsEpochMilli(final byte[] date) { - return parseAsEpochMilli(date, 0); - } + public long parseAsEpochMilli(final byte[] date) { + return parseAsEpochMilli(date, 0); + } - public long parseAsEpochMilli(final byte[] date, final int beginIndex) { - try { - final int yearBegin = beginIndex + 0; - final int yearEnd = yearBegin + 4; - final int monthBegin = yearEnd + 1; - final int dayBegin = monthBegin + 3; - final int hourBegin = dayBegin + 3; - final int minuteBegin = hourBegin + 3; - final int secondBegin = minuteBegin + 3; - final int secondEnd = secondBegin + 2; + public long parseAsEpochMilli(final byte[] date, final int beginIndex) { + try { + final int yearBegin = beginIndex + 0; + final int yearEnd = yearBegin + 4; + final int monthBegin = yearEnd + 1; + final int dayBegin = monthBegin + 3; + final int hourBegin = dayBegin + 3; + final int minuteBegin = hourBegin + 3; + final int secondBegin = minuteBegin + 3; + final int secondEnd = secondBegin + 2; - final long year = parseLong(date, yearBegin, yearEnd); - final long month = parse2ByteLong(date, monthBegin); - final long dayOfMonth = parse2ByteLong(date, dayBegin); - final long hour = parse2ByteLong(date, hourBegin); - final long minute = parse2ByteLong(date, minuteBegin); - final long second = parse2ByteLong(date, secondBegin); + final long year = parseLong(date, yearBegin, yearEnd); + final long month = parse2ByteLong(date, monthBegin); + final long dayOfMonth = parse2ByteLong(date, dayBegin); + final long hour = parse2ByteLong(date, hourBegin); + final long minute = parse2ByteLong(date, minuteBegin); + final long second = parse2ByteLong(date, secondBegin); - final int[] nanosAndCharsRead = parseMilliseconds(date, secondEnd); - final long nanos = nanosAndCharsRead[0]; - final int offsetTimezone = beginIndex + 19 + nanosAndCharsRead[1]; + final int[] nanosAndCharsRead = parseMilliseconds(date, secondEnd); + final long nanos = nanosAndCharsRead[0]; + final int offsetTimezone = beginIndex + 19 + nanosAndCharsRead[1]; - final long zoneOffsetMillis = date[offsetTimezone] == 'Z' ? 0 : parseZoneToMillis(date, offsetTimezone); + final long zoneOffsetMillis = date[offsetTimezone] == 'Z' ? 0 : parseZoneToMillis(date, offsetTimezone); - final int epochMilliMonthOffsetKey = (int) (year * 12 + month - 1); - final long epochMilliMonthOffset; + final int epochMilliMonthOffsetKey = (int) (year * 12 + month - 1); + final long epochMilliMonthOffset; - if (cached_epochMilliMonthOffsetKey == epochMilliMonthOffsetKey) { - epochMilliMonthOffset = cached_epochMilliMonthOffset; - } else { - epochMilliMonthOffset = EPOCH_MILLI_MONTH_OFFSETS.computeIfAbsent(epochMilliMonthOffsetKey, - FastISODateParser::computeEpochMilliMonthOffset); - cached_epochMilliMonthOffsetKey = epochMilliMonthOffsetKey; - cached_epochMilliMonthOffset = epochMilliMonthOffset; - } + if (cached_epochMilliMonthOffsetKey == epochMilliMonthOffsetKey) { + epochMilliMonthOffset = cached_epochMilliMonthOffset; + } else { + epochMilliMonthOffset = EPOCH_MILLI_MONTH_OFFSETS.computeIfAbsent(epochMilliMonthOffsetKey, + FastISODateParser::computeEpochMilliMonthOffset); + cached_epochMilliMonthOffsetKey = epochMilliMonthOffsetKey; + cached_epochMilliMonthOffset = epochMilliMonthOffset; + } - final long epochMilli = epochMilliMonthOffset // - + (dayOfMonth - 1) * 86_400_000 // - + hour * 3_600_000 // - + minute * 60_000 // - + second * 1_000 // - + nanos / 1_000_000// - - zoneOffsetMillis; - return epochMilli; + final long epochMilli = epochMilliMonthOffset // + + (dayOfMonth - 1) * 86_400_000 // + + hour * 3_600_000 // + + minute * 60_000 // + + second * 1_000 // + + nanos / 1_000_000// + - zoneOffsetMillis; + return epochMilli; - } catch (final RuntimeException e) { - throw new IllegalArgumentException("'" - + new String(date, beginIndex, date.length - beginIndex, StandardCharsets.UTF_8) - + "' is not an ISO-8601 that can be parsed with " + FastISODateParser.class.getCanonicalName(), e); - } - } + } catch (final RuntimeException e) { + throw new IllegalArgumentException("'" + + new String(date, beginIndex, date.length - beginIndex, StandardCharsets.UTF_8) + + "' is not an ISO-8601 that can be parsed with " + FastISODateParser.class.getCanonicalName(), e); + } + } - private long parseLong(final byte[] bytes, final int start, final int end) { - long result = 0; - for (int i = start; i < end; i++) { - final int c = bytes[i]; - if (c < '0' || c > '9') // (byte)48 = '0' and (byte)57 = '9' - { - throw new NumberFormatException(c + " is not a number at offset " + i); - } - result = result * 10 + (c - '0'); - } - return result; - } + private long parseLong(final byte[] bytes, final int start, final int end) { + long result = 0; + for (int i = start; i < end; i++) { + final int c = bytes[i]; + if (c < '0' || c > '9') // (byte)48 = '0' and (byte)57 = '9' + { + throw new NumberFormatException(c + " is not a number at offset " + i); + } + result = result * 10 + (c - '0'); + } + return result; + } - private long parse2ByteLong(final byte[] bytes, final int start) { + private long parse2ByteLong(final byte[] bytes, final int start) { - final int c0 = bytes[start]; - if (c0 < 48 || c0 > 57) // (byte)48 = '0' and (byte)57 = '9' - { - throw new NumberFormatException(c0 + " is not a number at offset " + start); - // throw new NumberFormatException(); - } - long result = c0 - 48; + final int c0 = bytes[start]; + if (c0 < 48 || c0 > 57) // (byte)48 = '0' and (byte)57 = '9' + { + throw new NumberFormatException(c0 + " is not a number at offset " + start); + // throw new NumberFormatException(); + } + long result = c0 - 48; - final int c1 = bytes[start + 1]; - if (c1 < 48 || c1 > 57) { - throw new NumberFormatException(c1 + " is not a number at offset " + (start + 1)); - // throw new NumberFormatException(); - } - result = result * 10 + (c1 - 48); + final int c1 = bytes[start + 1]; + if (c1 < 48 || c1 > 57) { + throw new NumberFormatException(c1 + " is not a number at offset " + (start + 1)); + // throw new NumberFormatException(); + } + result = result * 10 + (c1 - 48); - return result; - } + return result; + } - private int[] parseMilliseconds(final byte[] date, final int start) { - int result = 0; - int i = start; - while (i < date.length) { - final byte c = date[i]; - i++; - if (c == '.') { - continue; - } - if (c < '0' || c > '9') { - break; - } - result = result * 10 + (c - '0'); - } - final int readChars = i - start - 1; + private int[] parseMilliseconds(final byte[] date, final int start) { + int result = 0; + int i = start; + while (i < date.length) { + final byte c = date[i]; + i++; + if (c == '.') { + continue; + } + if (c < '0' || c > '9') { + break; + } + result = result * 10 + (c - '0'); + } + final int readChars = i - start - 1; - while (i <= start + 10) { - result *= 10; - i++; - } + while (i <= start + 10) { + result *= 10; + i++; + } - return new int[] { result, readChars }; - } + return new int[] { result, readChars }; + } - private long parseZoneToMillis(final byte[] zoneBytes, final int beginIndex) { + private long parseZoneToMillis(final byte[] zoneBytes, final int beginIndex) { - final String zoneString = new String(zoneBytes, beginIndex, zoneBytes.length - beginIndex); - final int hours = Integer.parseInt(zoneString, 0, 3, 10); - int minutes = Integer.parseInt(zoneString, 4, 6, 10); - // if hours is negative,then minutes must be too - minutes = (hours < 0 ? -1 : 1) * minutes; - return hours * 3_600_000 + minutes * 60_000; - } + final String zoneString = new String(zoneBytes, beginIndex, zoneBytes.length - beginIndex); + final int hours = Integer.parseInt(zoneString, 0, 3, 10); + int minutes = Integer.parseInt(zoneString, 4, 6, 10); + // if hours is negative,then minutes must be too + minutes = (hours < 0 ? -1 : 1) * minutes; + return hours * 3_600_000 + minutes * 60_000; + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/domain/AutocompleteProposal.java b/pdb-ui/src/main/java/org/lucares/pdbui/domain/AutocompleteProposal.java index f9f0ed6..5b87e94 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/domain/AutocompleteProposal.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/domain/AutocompleteProposal.java @@ -1,36 +1,36 @@ package org.lucares.pdbui.domain; public class AutocompleteProposal { - private String value; - private String newQuery; - private int newCaretPosition; + private String value; + private String newQuery; + private int newCaretPosition; - public String getValue() { - return value; - } + public String getValue() { + return value; + } - public void setValue(final String value) { - this.value = value; - } + public void setValue(final String value) { + this.value = value; + } - @Override - public String toString() { - return value; - } + @Override + public String toString() { + return value; + } - public int getNewCaretPosition() { - return newCaretPosition; - } + public int getNewCaretPosition() { + return newCaretPosition; + } - public void setNewCaretPosition(final int newCaretPosition) { - this.newCaretPosition = newCaretPosition; - } + public void setNewCaretPosition(final int newCaretPosition) { + this.newCaretPosition = newCaretPosition; + } - public String getNewQuery() { - return newQuery; - } + public String getNewQuery() { + return newQuery; + } - public void setNewQuery(final String newQuery) { - this.newQuery = newQuery; - } + public void setNewQuery(final String newQuery) { + this.newQuery = newQuery; + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/domain/AutocompleteProposalByValue.java b/pdb-ui/src/main/java/org/lucares/pdbui/domain/AutocompleteProposalByValue.java index cd3d4f1..6df2286 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/domain/AutocompleteProposalByValue.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/domain/AutocompleteProposalByValue.java @@ -4,9 +4,9 @@ import java.util.Comparator; public class AutocompleteProposalByValue implements Comparator { - @Override - public int compare(final AutocompleteProposal o1, final AutocompleteProposal o2) { - return o1.getValue().compareToIgnoreCase(o2.getValue()); - } + @Override + public int compare(final AutocompleteProposal o1, final AutocompleteProposal o2) { + return o1.getValue().compareToIgnoreCase(o2.getValue()); + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/domain/AutocompleteResponse.java b/pdb-ui/src/main/java/org/lucares/pdbui/domain/AutocompleteResponse.java index a55b66a..292c73c 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/domain/AutocompleteResponse.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/domain/AutocompleteResponse.java @@ -3,19 +3,19 @@ package org.lucares.pdbui.domain; import java.util.List; public class AutocompleteResponse { - private List proposals; + private List proposals; - public List getProposals() { - return proposals; - } + public List getProposals() { + return proposals; + } - public void setProposals(final List proposals) { - this.proposals = proposals; - } + public void setProposals(final List proposals) { + this.proposals = proposals; + } - @Override - public String toString() { - return String.valueOf(proposals); - } + @Override + public String toString() { + return String.valueOf(proposals); + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/domain/DataSeriesStats.java b/pdb-ui/src/main/java/org/lucares/pdbui/domain/DataSeriesStats.java index 335ec64..69628e5 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/domain/DataSeriesStats.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/domain/DataSeriesStats.java @@ -3,62 +3,62 @@ package org.lucares.pdbui.domain; import java.util.Collection; public class DataSeriesStats { - private final int values; - private final long maxValue; - private final double average; - private final int plottedValues; + private final int values; + private final long maxValue; + private final double average; + private final int plottedValues; - public DataSeriesStats(final int values, final int plottedValues, final long maxValue, final double average) { - this.values = values; - this.plottedValues = plottedValues; - this.maxValue = maxValue; - this.average = average; - } + public DataSeriesStats(final int values, final int plottedValues, final long maxValue, final double average) { + this.values = values; + this.plottedValues = plottedValues; + this.maxValue = maxValue; + this.average = average; + } - /** - * The number of values in the date range, without applying the y-range. - * - * @return total number of values - */ - public int getValues() { - return values; - } + /** + * The number of values in the date range, without applying the y-range. + * + * @return total number of values + */ + public int getValues() { + return values; + } - /** - * The number of values in the date range and the y-range. - * - * @return number of plotted values - */ - public int getPlottedValues() { - return plottedValues; - } + /** + * The number of values in the date range and the y-range. + * + * @return number of plotted values + */ + public int getPlottedValues() { + return plottedValues; + } - public long getMaxValue() { - return maxValue; - } + public long getMaxValue() { + return maxValue; + } - public double getAverage() { - return average; - } + public double getAverage() { + return average; + } - @Override - public String toString() { - return "[values=" + values + ", maxValue=" + maxValue + ", average=" + average + "]"; - } + @Override + public String toString() { + return "[values=" + values + ", maxValue=" + maxValue + ", average=" + average + "]"; + } - public static double average(final Collection stats) { - long n = 0; - double average = 0; + public static double average(final Collection stats) { + long n = 0; + double average = 0; - for (final DataSeriesStats stat : stats) { - final int newValues = stat.getValues(); - final double newAverage = stat.getAverage(); - if (newValues > 0) { - average = (average * n + newAverage * newValues) / (n + newValues); - n += newValues; - } - } + for (final DataSeriesStats stat : stats) { + final int newValues = stat.getValues(); + final double newAverage = stat.getAverage(); + if (newValues > 0) { + average = (average * n + newAverage * newValues) / (n + newValues); + n += newValues; + } + } - return average; - } + return average; + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/domain/DateRange.java b/pdb-ui/src/main/java/org/lucares/pdbui/domain/DateRange.java index 04cb364..e250996 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/domain/DateRange.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/domain/DateRange.java @@ -9,68 +9,68 @@ import org.lucares.pdb.api.DateTimeRange; public class DateRange { - private static final DateTimeFormatter DATE_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); + private static final DateTimeFormatter DATE_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); - private String startDate; - private String endDate; + private String startDate; + private String endDate; - DateRange() { - super(); - } + DateRange() { + super(); + } - /** - * - * @param startDate date in format 'yyyy-MM-dd HH:mm:ss' - * @param endDate date in format 'yyyy-MM-dd HH:mm:ss' - */ - public DateRange(final String startDate, final String endDate) { - this.startDate = startDate; - this.endDate = endDate; - } + /** + * + * @param startDate date in format 'yyyy-MM-dd HH:mm:ss' + * @param endDate date in format 'yyyy-MM-dd HH:mm:ss' + */ + public DateRange(final String startDate, final String endDate) { + this.startDate = startDate; + this.endDate = endDate; + } - /** - * - * @return date in format 'yyyy-MM-dd HH:mm:ss' - */ - public String getStartDate() { - return startDate; - } + /** + * + * @return date in format 'yyyy-MM-dd HH:mm:ss' + */ + public String getStartDate() { + return startDate; + } - /** - * - * @param startDate date in format 'yyyy-MM-dd HH:mm:ss' - */ - public void setStartDate(final String startDate) { - this.startDate = startDate; - } + /** + * + * @param startDate date in format 'yyyy-MM-dd HH:mm:ss' + */ + public void setStartDate(final String startDate) { + this.startDate = startDate; + } - /** - * - * @return date in format 'yyyy-MM-dd HH:mm:ss' - */ - public String getEndDate() { - return endDate; - } + /** + * + * @return date in format 'yyyy-MM-dd HH:mm:ss' + */ + public String getEndDate() { + return endDate; + } - /** - * - * @param endDate date in format 'yyyy-MM-dd HH:mm:ss' - */ - public void setEndDate(final String endDate) { - this.endDate = endDate; - } + /** + * + * @param endDate date in format 'yyyy-MM-dd HH:mm:ss' + */ + public void setEndDate(final String endDate) { + this.endDate = endDate; + } - @Override - public String toString() { - return startDate + " - " + endDate; - } + @Override + public String toString() { + return startDate + " - " + endDate; + } - public DateTimeRange toDateTimeRange() { + public DateTimeRange toDateTimeRange() { - final OffsetDateTime start = LocalDateTime.parse(startDate, DATE_FORMAT).atOffset(ZoneOffset.UTC); - final OffsetDateTime end = LocalDateTime.parse(endDate, DATE_FORMAT).atOffset(ZoneOffset.UTC); + final OffsetDateTime start = LocalDateTime.parse(startDate, DATE_FORMAT).atOffset(ZoneOffset.UTC); + final OffsetDateTime end = LocalDateTime.parse(endDate, DATE_FORMAT).atOffset(ZoneOffset.UTC); - return new DateTimeRange(start, end); - } + return new DateTimeRange(start, end); + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/domain/FilterDefaults.java b/pdb-ui/src/main/java/org/lucares/pdbui/domain/FilterDefaults.java index 9514b93..4aff896 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/domain/FilterDefaults.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/domain/FilterDefaults.java @@ -6,39 +6,39 @@ import java.util.List; import java.util.Set; public class FilterDefaults { - private Set groupBy; - - private List fields; + private Set groupBy; - private String splitBy; + private List fields; - public FilterDefaults(final List fields, final Set groupBy, final String splitBy) { - this.fields = new ArrayList(fields); - this.groupBy = new HashSet(groupBy); - this.splitBy = splitBy; - } + private String splitBy; - public Set getGroupBy() { - return new HashSet(groupBy); - } + public FilterDefaults(final List fields, final Set groupBy, final String splitBy) { + this.fields = new ArrayList(fields); + this.groupBy = new HashSet(groupBy); + this.splitBy = splitBy; + } - public void setGroupBy(Set groupBy) { - this.groupBy = new HashSet(groupBy); - } + public Set getGroupBy() { + return new HashSet(groupBy); + } - public List getFields() { - return new ArrayList(fields); - } + public void setGroupBy(Set groupBy) { + this.groupBy = new HashSet(groupBy); + } - public void setFields(List fields) { - this.fields = new ArrayList(fields); - } + public List getFields() { + return new ArrayList(fields); + } - public String getSplitBy() { - return splitBy; - } + public void setFields(List fields) { + this.fields = new ArrayList(fields); + } - public void setSplitBy(String splitBy) { - this.splitBy = splitBy; - } + public String getSplitBy() { + return splitBy; + } + + public void setSplitBy(String splitBy) { + this.splitBy = splitBy; + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/domain/PlotRequest.java b/pdb-ui/src/main/java/org/lucares/pdbui/domain/PlotRequest.java index f9ce5b3..7ee1bb6 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/domain/PlotRequest.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/domain/PlotRequest.java @@ -8,166 +8,166 @@ import org.lucares.pdb.plot.api.AxisScale; import org.lucares.pdb.plot.api.Limit; public class PlotRequest { - private String query; + private String query; - private int height = 1000; + private int height = 1000; - private int width = 1000; + private int width = 1000; - private int thumbnailMaxWidth = 0; + private int thumbnailMaxWidth = 0; - private int thumbnailMaxHeight = 0; + private int thumbnailMaxHeight = 0; - private List groupBy; + private List groupBy; - private Limit limitBy = Limit.NO_LIMIT; + private Limit limitBy = Limit.NO_LIMIT; - private AxisScale yAxisScale = AxisScale.LINEAR; + private AxisScale yAxisScale = AxisScale.LINEAR; - private int limit = Integer.MAX_VALUE; + private int limit = Integer.MAX_VALUE; - private String dateRange; + private String dateRange; - private List aggregates = new ArrayList<>(); + private List aggregates = new ArrayList<>(); - private int yRangeMin; - private int yRangeMax; - private TimeRangeUnit yRangeUnit = TimeRangeUnit.AUTOMATIC; + private int yRangeMin; + private int yRangeMax; + private TimeRangeUnit yRangeUnit = TimeRangeUnit.AUTOMATIC; - private boolean keyOutside; + private boolean keyOutside; - private boolean generateThumbnail; + private boolean generateThumbnail; - public String getQuery() { - return query; - } + public String getQuery() { + return query; + } - public void setQuery(final String query) { - this.query = query; - } + public void setQuery(final String query) { + this.query = query; + } - public int getWidth() { - return width; - } + public int getWidth() { + return width; + } - public void setWidth(final int width) { - this.width = width; - } + public void setWidth(final int width) { + this.width = width; + } - public int getHeight() { - return height; - } + public int getHeight() { + return height; + } - public void setHeight(final int height) { - this.height = height; - } + public void setHeight(final int height) { + this.height = height; + } - public int getThumbnailMaxWidth() { - return thumbnailMaxWidth; - } + public int getThumbnailMaxWidth() { + return thumbnailMaxWidth; + } - public void setThumbnailMaxWidth(final int thumbnailMaxWidth) { - this.thumbnailMaxWidth = thumbnailMaxWidth; - } + public void setThumbnailMaxWidth(final int thumbnailMaxWidth) { + this.thumbnailMaxWidth = thumbnailMaxWidth; + } - public int getThumbnailMaxHeight() { - return thumbnailMaxHeight; - } + public int getThumbnailMaxHeight() { + return thumbnailMaxHeight; + } - public void setThumbnailMaxHeight(final int thumbnailMaxHeight) { - this.thumbnailMaxHeight = thumbnailMaxHeight; - } + public void setThumbnailMaxHeight(final int thumbnailMaxHeight) { + this.thumbnailMaxHeight = thumbnailMaxHeight; + } - @Override - public String toString() { - return query + ":" + height + "x" + width; - } + @Override + public String toString() { + return query + ":" + height + "x" + width; + } - public List getGroupBy() { - return groupBy; - } + public List getGroupBy() { + return groupBy; + } - public void setGroupBy(final List groupBy) { - this.groupBy = groupBy; - } + public void setGroupBy(final List groupBy) { + this.groupBy = groupBy; + } - public Limit getLimitBy() { - return limitBy; - } + public Limit getLimitBy() { + return limitBy; + } - public void setLimitBy(final Limit limitBy) { - this.limitBy = limitBy; - } + public void setLimitBy(final Limit limitBy) { + this.limitBy = limitBy; + } - public int getLimit() { - return limit; - } + public int getLimit() { + return limit; + } - public void setLimit(final int limit) { - this.limit = limit; - } + public void setLimit(final int limit) { + this.limit = limit; + } - public String getDateRange() { - return dateRange; - } + public String getDateRange() { + return dateRange; + } - public void setDateRange(final String dateRange) { - this.dateRange = dateRange; - } + public void setDateRange(final String dateRange) { + this.dateRange = dateRange; + } - public AxisScale getAxisScale() { - return yAxisScale; - } + public AxisScale getAxisScale() { + return yAxisScale; + } - public void setAxisScale(final AxisScale yAxis) { - this.yAxisScale = yAxis; - } + public void setAxisScale(final AxisScale yAxis) { + this.yAxisScale = yAxis; + } - public void setAggregate(final List aggregates) { - this.aggregates = aggregates; - } + public void setAggregate(final List aggregates) { + this.aggregates = aggregates; + } - public List getAggregates() { - return aggregates; - } + public List getAggregates() { + return aggregates; + } - public void setKeyOutside(final boolean keyOutside) { - this.keyOutside = keyOutside; - } + public void setKeyOutside(final boolean keyOutside) { + this.keyOutside = keyOutside; + } - public boolean isKeyOutside() { - return keyOutside; - } + public boolean isKeyOutside() { + return keyOutside; + } - public boolean isGenerateThumbnail() { - return generateThumbnail; - } + public boolean isGenerateThumbnail() { + return generateThumbnail; + } - public void setGenerateThumbnail(final boolean generateThumbnail) { - this.generateThumbnail = generateThumbnail; - } + public void setGenerateThumbnail(final boolean generateThumbnail) { + this.generateThumbnail = generateThumbnail; + } - public int getyRangeMin() { - return yRangeMin; - } + public int getyRangeMin() { + return yRangeMin; + } - public void setyRangeMin(final int yRangeMin) { - this.yRangeMin = yRangeMin; - } + public void setyRangeMin(final int yRangeMin) { + this.yRangeMin = yRangeMin; + } - public int getyRangeMax() { - return yRangeMax; - } + public int getyRangeMax() { + return yRangeMax; + } - public void setyRangeMax(final int yRangeMax) { - this.yRangeMax = yRangeMax; - } + public void setyRangeMax(final int yRangeMax) { + this.yRangeMax = yRangeMax; + } - public TimeRangeUnit getyRangeUnit() { - return yRangeUnit; - } + public TimeRangeUnit getyRangeUnit() { + return yRangeUnit; + } - public void setyRangeUnit(final TimeRangeUnit yRangeUnit) { - this.yRangeUnit = yRangeUnit; - } + public void setyRangeUnit(final TimeRangeUnit yRangeUnit) { + this.yRangeUnit = yRangeUnit; + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/domain/PlotResponse.java b/pdb-ui/src/main/java/org/lucares/pdbui/domain/PlotResponse.java index d9a6de0..920218a 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/domain/PlotResponse.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/domain/PlotResponse.java @@ -1,43 +1,43 @@ package org.lucares.pdbui.domain; public class PlotResponse { - private String imageUrl = ""; - private PlotResponseStats stats; - private String thumbnailUrl; + private String imageUrl = ""; + private PlotResponseStats stats; + private String thumbnailUrl; - public PlotResponse(final PlotResponseStats stats, final String imageUrl, final String thumbnailUrl) { - this.stats = stats; - this.imageUrl = imageUrl; - this.thumbnailUrl = thumbnailUrl; - } + public PlotResponse(final PlotResponseStats stats, final String imageUrl, final String thumbnailUrl) { + this.stats = stats; + this.imageUrl = imageUrl; + this.thumbnailUrl = thumbnailUrl; + } - public String getImageUrl() { - return imageUrl; - } + public String getImageUrl() { + return imageUrl; + } - public void setImageUrl(final String imageUrl) { - this.imageUrl = imageUrl; - } + public void setImageUrl(final String imageUrl) { + this.imageUrl = imageUrl; + } - public String getThumbnailUrl() { - return thumbnailUrl; - } + public String getThumbnailUrl() { + return thumbnailUrl; + } - public PlotResponseStats getStats() { - return stats; - } + public PlotResponseStats getStats() { + return stats; + } - public void setStats(final PlotResponseStats stats) { - this.stats = stats; - } + public void setStats(final PlotResponseStats stats) { + this.stats = stats; + } - public void setThumbnailUrl(final String thumbnailUrl) { - this.thumbnailUrl = thumbnailUrl; - } + public void setThumbnailUrl(final String thumbnailUrl) { + this.thumbnailUrl = thumbnailUrl; + } - @Override - public String toString() { - return "PlotResponse [imageUrl=" + imageUrl + ", stats=" + stats + ", thumbnailUrl=" + thumbnailUrl + "]"; - } + @Override + public String toString() { + return "PlotResponse [imageUrl=" + imageUrl + ", stats=" + stats + ", thumbnailUrl=" + thumbnailUrl + "]"; + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/domain/PlotResponseStats.java b/pdb-ui/src/main/java/org/lucares/pdbui/domain/PlotResponseStats.java index e6cd888..add718b 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/domain/PlotResponseStats.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/domain/PlotResponseStats.java @@ -6,94 +6,94 @@ import java.util.List; import org.lucares.recommind.logs.DataSeries; public class PlotResponseStats { - private long maxValue; + private long maxValue; - private int values; + private int values; - private double average; + private double average; - private int plottedValues; + private int plottedValues; - private List dataSeriesStats; + private List dataSeriesStats; - public PlotResponseStats() { - super(); - } + public PlotResponseStats() { + super(); + } - public PlotResponseStats(final long maxValue, final int values, final int plottedValues, final double average, - final List dataSeriesStats) { + public PlotResponseStats(final long maxValue, final int values, final int plottedValues, final double average, + final List dataSeriesStats) { - this.maxValue = maxValue; - this.values = values; - this.plottedValues = plottedValues; - this.average = average; - this.dataSeriesStats = dataSeriesStats; - } + this.maxValue = maxValue; + this.values = values; + this.plottedValues = plottedValues; + this.average = average; + this.dataSeriesStats = dataSeriesStats; + } - public long getMaxValue() { - return maxValue; - } + public long getMaxValue() { + return maxValue; + } - public void setMaxValue(final long maxValue) { - this.maxValue = maxValue; - } + public void setMaxValue(final long maxValue) { + this.maxValue = maxValue; + } - public int getValues() { - return values; - } + public int getValues() { + return values; + } - public void setValues(final int values) { - this.values = values; - } + public void setValues(final int values) { + this.values = values; + } - public int getPlottedValues() { - return plottedValues; - } + public int getPlottedValues() { + return plottedValues; + } - public void setPlottedValues(final int plottedValues) { - this.plottedValues = plottedValues; - } + public void setPlottedValues(final int plottedValues) { + this.plottedValues = plottedValues; + } - public double getAverage() { - return average; - } + public double getAverage() { + return average; + } - public void setAverage(final double average) { - this.average = average; - } + public void setAverage(final double average) { + this.average = average; + } - public List getDataSeriesStats() { - return dataSeriesStats; - } + public List getDataSeriesStats() { + return dataSeriesStats; + } - public void setDataSeriesStats(final List dataSeriesStats) { - this.dataSeriesStats = dataSeriesStats; - } + public void setDataSeriesStats(final List dataSeriesStats) { + this.dataSeriesStats = dataSeriesStats; + } - @Override - public String toString() { - return "PlotResponseStats [maxValue=" + maxValue + ", values=" + values + ", average=" + average - + ", plottedValues=" + plottedValues + ", dataSeriesStats=" + dataSeriesStats + "]"; - } + @Override + public String toString() { + return "PlotResponseStats [maxValue=" + maxValue + ", values=" + values + ", average=" + average + + ", plottedValues=" + plottedValues + ", dataSeriesStats=" + dataSeriesStats + "]"; + } - public static PlotResponseStats fromDataSeries(final List dataSeries) { + public static PlotResponseStats fromDataSeries(final List dataSeries) { - int values = 0; - int plottedValues = 0; - long maxValue = 0; - final List dataSeriesStats = new ArrayList<>(); + int values = 0; + int plottedValues = 0; + long maxValue = 0; + final List dataSeriesStats = new ArrayList<>(); - for (final DataSeries dataSerie : dataSeries) { - values += dataSerie.getValues(); - plottedValues += dataSerie.getPlottedValues(); - maxValue = Math.max(maxValue, dataSerie.getMaxValue()); + for (final DataSeries dataSerie : dataSeries) { + values += dataSerie.getValues(); + plottedValues += dataSerie.getPlottedValues(); + maxValue = Math.max(maxValue, dataSerie.getMaxValue()); - dataSeriesStats.add(new DataSeriesStats(dataSerie.getValues(), dataSerie.getPlottedValues(), - dataSerie.getMaxValue(), dataSerie.getAverage())); - } + dataSeriesStats.add(new DataSeriesStats(dataSerie.getValues(), dataSerie.getPlottedValues(), + dataSerie.getMaxValue(), dataSerie.getAverage())); + } - final double average = Math.round(DataSeriesStats.average(dataSeriesStats)); + final double average = Math.round(DataSeriesStats.average(dataSeriesStats)); - return new PlotResponseStats(maxValue, values, plottedValues, average, dataSeriesStats); - } + return new PlotResponseStats(maxValue, values, plottedValues, average, dataSeriesStats); + } } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/domain/TimeRangeUnit.java b/pdb-ui/src/main/java/org/lucares/pdbui/domain/TimeRangeUnit.java index 406ab36..e9785ec 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/domain/TimeRangeUnit.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/domain/TimeRangeUnit.java @@ -1,5 +1,5 @@ package org.lucares.pdbui.domain; public enum TimeRangeUnit { - AUTOMATIC, MILLISECONDS, SECONDS, MINUTES, HOURS, DAYS + AUTOMATIC, MILLISECONDS, SECONDS, MINUTES, HOURS, DAYS } diff --git a/pdb-ui/src/test/java/org/lucares/pdbui/CsvToEntryTransformerPerformanceTest.java b/pdb-ui/src/test/java/org/lucares/pdbui/CsvToEntryTransformerPerformanceTest.java index c081ab7..51aff23 100644 --- a/pdb-ui/src/test/java/org/lucares/pdbui/CsvToEntryTransformerPerformanceTest.java +++ b/pdb-ui/src/test/java/org/lucares/pdbui/CsvToEntryTransformerPerformanceTest.java @@ -14,88 +14,88 @@ import java.util.List; public class CsvToEntryTransformerPerformanceTest { - private static final byte NEWLINE = '\n'; + private static final byte NEWLINE = '\n'; - @SuppressWarnings("unused") - public static void main(final String[] args) throws Exception { - // final Path csvFile = - // Paths.get("/home/andi/ws/performanceDb/data/production/1k.csv"); - final Path csvFile = Paths.get("/home/andi/ws/performanceDb/data/production/logs_2018-09-05_2018-09-05.csv"); + @SuppressWarnings("unused") + public static void main(final String[] args) throws Exception { + // final Path csvFile = + // Paths.get("/home/andi/ws/performanceDb/data/production/1k.csv"); + final Path csvFile = Paths.get("/home/andi/ws/performanceDb/data/production/logs_2018-09-05_2018-09-05.csv"); - final int skip = 0; + final int skip = 0; - final List times = new ArrayList<>(); - for (int i = 0; i < 105; i++) { - final long start = System.nanoTime(); - runtest(csvFile); - final double duration = (System.nanoTime() - start) / 1_000_000.0; - times.add(duration); - // System.out.println("duration: " + duration + "ms"); - if (i >= skip) { - System.out.println((int) Math.round(duration * 1000)); - } - } + final List times = new ArrayList<>(); + for (int i = 0; i < 105; i++) { + final long start = System.nanoTime(); + runtest(csvFile); + final double duration = (System.nanoTime() - start) / 1_000_000.0; + times.add(duration); + // System.out.println("duration: " + duration + "ms"); + if (i >= skip) { + System.out.println((int) Math.round(duration * 1000)); + } + } - final DoubleSummaryStatistics summaryStatisticsPut = times.stream().skip(skip).mapToDouble(d -> (double) d) - .summaryStatistics(); - // System.out.println("summary: " + summaryStatisticsPut); - } + final DoubleSummaryStatistics summaryStatisticsPut = times.stream().skip(skip).mapToDouble(d -> (double) d) + .summaryStatistics(); + // System.out.println("summary: " + summaryStatisticsPut); + } - @SuppressWarnings("unused") - private static void runtest(final Path csvFile) throws IOException, FileNotFoundException { - final byte newline = NEWLINE; + @SuppressWarnings("unused") + private static void runtest(final Path csvFile) throws IOException, FileNotFoundException { + final byte newline = NEWLINE; - byte[] line = new byte[4096]; // max line length - int offsetInLine = 0; - int offsetInBuffer = 0; - int linecount = 0; + byte[] line = new byte[4096]; // max line length + int offsetInLine = 0; + int offsetInBuffer = 0; + int linecount = 0; - try (final FileChannel channel = FileChannel.open(csvFile, StandardOpenOption.READ)) { - int read = 0; - int bytesInLine = 0; + try (final FileChannel channel = FileChannel.open(csvFile, StandardOpenOption.READ)) { + int read = 0; + int bytesInLine = 0; - final ByteBuffer buffer = ByteBuffer.allocate(4096 * 4); - while ((read = channel.read(buffer)) >= 0) { - offsetInBuffer = 0; + final ByteBuffer buffer = ByteBuffer.allocate(4096 * 4); + while ((read = channel.read(buffer)) >= 0) { + offsetInBuffer = 0; - final byte[] b = buffer.array(); + final byte[] b = buffer.array(); - for (int i = 0; i < read; i++) { - if (b[i] == newline) { - final int length = i - offsetInBuffer; - System.arraycopy(b, offsetInBuffer, line, offsetInLine, length); - bytesInLine = offsetInLine + length; + for (int i = 0; i < read; i++) { + if (b[i] == newline) { + final int length = i - offsetInBuffer; + System.arraycopy(b, offsetInBuffer, line, offsetInLine, length); + bytesInLine = offsetInLine + length; - linecount++; - handleLine(line, bytesInLine); - line = new byte[4096]; + linecount++; + handleLine(line, bytesInLine); + line = new byte[4096]; - offsetInBuffer = i + 1; - offsetInLine = 0; - bytesInLine = 0; - } - } - if (offsetInBuffer < read) { - final int length = read - offsetInBuffer; - System.arraycopy(b, offsetInBuffer, line, offsetInLine, length); - bytesInLine = offsetInLine + length; - offsetInLine += length; - offsetInBuffer = 0; + offsetInBuffer = i + 1; + offsetInLine = 0; + bytesInLine = 0; + } + } + if (offsetInBuffer < read) { + final int length = read - offsetInBuffer; + System.arraycopy(b, offsetInBuffer, line, offsetInLine, length); + bytesInLine = offsetInLine + length; + offsetInLine += length; + offsetInBuffer = 0; - } - buffer.rewind(); - } + } + buffer.rewind(); + } - linecount++; - handleLine(line, bytesInLine); - } - // System.out.println("lines: " + linecount); - } + linecount++; + handleLine(line, bytesInLine); + } + // System.out.println("lines: " + linecount); + } - private static void handleLine(final byte[] line, final int bytesInLine) { + private static void handleLine(final byte[] line, final int bytesInLine) { - @SuppressWarnings("unused") - final String x = new String(line, 0, bytesInLine, StandardCharsets.UTF_8); - // System.out.println(">" + x + "<"); - } + @SuppressWarnings("unused") + final String x = new String(line, 0, bytesInLine, StandardCharsets.UTF_8); + // System.out.println(">" + x + "<"); + } } diff --git a/pdb-ui/src/test/java/org/lucares/pdbui/LongPair.java b/pdb-ui/src/test/java/org/lucares/pdbui/LongPair.java index 941a37b..7664378 100644 --- a/pdb-ui/src/test/java/org/lucares/pdbui/LongPair.java +++ b/pdb-ui/src/test/java/org/lucares/pdbui/LongPair.java @@ -8,65 +8,65 @@ import java.util.List; import org.lucares.collections.LongList; final class LongPair implements Comparable { - private final long a, b; + private final long a, b; - public LongPair(final long a, final long b) { - super(); - this.a = a; - this.b = b; - } + public LongPair(final long a, final long b) { + super(); + this.a = a; + this.b = b; + } - public static List fromLongList(final LongList longList) { - final List result = new ArrayList<>(); - for (int i = 0; i < longList.size(); i += 2) { + public static List fromLongList(final LongList longList) { + final List result = new ArrayList<>(); + for (int i = 0; i < longList.size(); i += 2) { - result.add(new LongPair(longList.get(i), longList.get(i + 1))); + result.add(new LongPair(longList.get(i), longList.get(i + 1))); - } - Collections.sort(result); - return result; - } + } + Collections.sort(result); + return result; + } - public long getA() { - return a; - } + public long getA() { + return a; + } - public long getB() { - return b; - } + public long getB() { + return b; + } - @Override - public String toString() { - return a + "," + b; - } + @Override + public String toString() { + return a + "," + b; + } - @Override - public int compareTo(final LongPair o) { - return Comparator.comparing(LongPair::getA).thenComparing(LongPair::getB).compare(this, o); - } + @Override + public int compareTo(final LongPair o) { + return Comparator.comparing(LongPair::getA).thenComparing(LongPair::getB).compare(this, o); + } - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (int) (a ^ (a >>> 32)); - result = prime * result + (int) (b ^ (b >>> 32)); - return result; - } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (int) (a ^ (a >>> 32)); + result = prime * result + (int) (b ^ (b >>> 32)); + return result; + } - @Override - public boolean equals(final Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final LongPair other = (LongPair) obj; - if (a != other.a) - return false; - if (b != other.b) - return false; - return true; - } + @Override + public boolean equals(final Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + final LongPair other = (LongPair) obj; + if (a != other.a) + return false; + if (b != other.b) + return false; + return true; + } } \ No newline at end of file diff --git a/pdb-ui/src/test/java/org/lucares/pdbui/PdbTestUtil.java b/pdb-ui/src/test/java/org/lucares/pdbui/PdbTestUtil.java index 22eb401..f80457f 100644 --- a/pdb-ui/src/test/java/org/lucares/pdbui/PdbTestUtil.java +++ b/pdb-ui/src/test/java/org/lucares/pdbui/PdbTestUtil.java @@ -28,154 +28,153 @@ import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.ObjectMapper; public class PdbTestUtil { - private static final Logger LOGGER = LoggerFactory.getLogger(PdbTestUtil.class); + private static final Logger LOGGER = LoggerFactory.getLogger(PdbTestUtil.class); - static final Map POISON = new HashMap<>(); + static final Map POISON = new HashMap<>(); - public static final void send(final String format, final Collection> entries) - throws IOException, InterruptedException { - switch (format) { - case "csv": - sendAsCsv(entries); - break; - case "json": - sendAsJson(entries); - break; - default: - throw new IllegalStateException("unhandled format: " + format); - } - } - - @SafeVarargs - public static final void sendAsCsv(final Map... entries) throws IOException, InterruptedException { - sendAsCsv(Arrays.asList(entries)); - } - - public static final void sendAsCsv(final Collection> entries) - throws IOException, InterruptedException { - - final Set keys = entries.stream().map(Map::keySet).flatMap(Set::stream).collect(Collectors.toSet()); - - sendAsCsv(keys, entries); - } - - public static final void sendAsCsv(Collection keys, final Collection> entries) - throws IOException, InterruptedException { - - - final StringBuilder csv = new StringBuilder(); - - csv.append(String.join(",", keys)); - csv.append("\n"); - - for (final Map entry : entries) { - final List line = new ArrayList<>(); - for (final String key : keys) { - final String value = String.valueOf(entry.getOrDefault(key, "")); - line.add(value); - } - csv.append(String.join(",", line)); - csv.append("\n"); - } - System.out.println("sending: " + csv); - send(csv.toString()); - } - - @SafeVarargs - public static final void sendAsJson(final Map... entries) throws IOException, InterruptedException { - - sendAsJson(Arrays.asList(entries)); - } - - public static final void sendAsJson(final Collection> entries) - throws IOException, InterruptedException { - final LinkedBlockingDeque> queue = new LinkedBlockingDeque<>(entries); - queue.put(POISON); - sendAsJson(queue); - } - - public static final void sendAsJson(final BlockingQueue> aEntriesSupplier) throws IOException { - - final ObjectMapper mapper = new ObjectMapper(); - final SocketChannel channel = connect(); - - Map entry; - while ((entry = aEntriesSupplier.poll()) != POISON) { - - final StringBuilder streamData = new StringBuilder(); - streamData.append(mapper.writeValueAsString(entry)); - streamData.append("\n"); - - final ByteBuffer src = ByteBuffer.wrap(streamData.toString().getBytes(StandardCharsets.UTF_8)); - channel.write(src); + public static final void send(final String format, final Collection> entries) + throws IOException, InterruptedException { + switch (format) { + case "csv": + sendAsCsv(entries); + break; + case "json": + sendAsJson(entries); + break; + default: + throw new IllegalStateException("unhandled format: " + format); + } } - try { - // ugly workaround: the channel was closed too early and not all - // data was received - TimeUnit.MILLISECONDS.sleep(10); - } catch (final InterruptedException e) { - throw new IllegalStateException(e); - } - channel.close(); - LOGGER.trace("closed sender connection"); - } - - public static final void send(final String data) throws IOException { - - final SocketChannel channel = connect(); - - final StringBuilder streamData = new StringBuilder(); - streamData.append(data); - - final ByteBuffer src = ByteBuffer.wrap(streamData.toString().getBytes(StandardCharsets.UTF_8)); - channel.write(src); - - try { - // ugly workaround: the channel was closed too early and not all - // data was received - TimeUnit.MILLISECONDS.sleep(10); - } catch (final InterruptedException e) { - throw new IllegalStateException(e); - } - channel.close(); - LOGGER.trace("closed sender connection"); - } - - public static void send(final Path file) throws IOException { - final SocketChannel outputChannel = connect(); - - try (final FileChannel inputChannel = FileChannel.open(file, StandardOpenOption.READ)) { - inputChannel.transferTo(0, Long.MAX_VALUE, outputChannel); + @SafeVarargs + public static final void sendAsCsv(final Map... entries) throws IOException, InterruptedException { + sendAsCsv(Arrays.asList(entries)); } - try { - // ugly workaround: the channel was closed too early and not all - // data was received - TimeUnit.MILLISECONDS.sleep(10); - } catch (final InterruptedException e) { - throw new IllegalStateException(e); - } - outputChannel.close(); - LOGGER.trace("closed sender connection"); - } + public static final void sendAsCsv(final Collection> entries) + throws IOException, InterruptedException { - private static SocketChannel connect() throws IOException { + final Set keys = entries.stream().map(Map::keySet).flatMap(Set::stream).collect(Collectors.toSet()); - SocketChannel result = null; - - while (true) { - try { - result = SocketChannel.open(); - result.configureBlocking(true); - result.connect(new InetSocketAddress("127.0.0.1", TcpIngestor.PORT)); - break; - } catch (final ConnectException e) { - // server socket not yet ready, it should be ready any time soon - } + sendAsCsv(keys, entries); } - return result; - } + public static final void sendAsCsv(Collection keys, final Collection> entries) + throws IOException, InterruptedException { + + final StringBuilder csv = new StringBuilder(); + + csv.append(String.join(",", keys)); + csv.append("\n"); + + for (final Map entry : entries) { + final List line = new ArrayList<>(); + for (final String key : keys) { + final String value = String.valueOf(entry.getOrDefault(key, "")); + line.add(value); + } + csv.append(String.join(",", line)); + csv.append("\n"); + } + System.out.println("sending: " + csv); + send(csv.toString()); + } + + @SafeVarargs + public static final void sendAsJson(final Map... entries) throws IOException, InterruptedException { + + sendAsJson(Arrays.asList(entries)); + } + + public static final void sendAsJson(final Collection> entries) + throws IOException, InterruptedException { + final LinkedBlockingDeque> queue = new LinkedBlockingDeque<>(entries); + queue.put(POISON); + sendAsJson(queue); + } + + public static final void sendAsJson(final BlockingQueue> aEntriesSupplier) throws IOException { + + final ObjectMapper mapper = new ObjectMapper(); + final SocketChannel channel = connect(); + + Map entry; + while ((entry = aEntriesSupplier.poll()) != POISON) { + + final StringBuilder streamData = new StringBuilder(); + streamData.append(mapper.writeValueAsString(entry)); + streamData.append("\n"); + + final ByteBuffer src = ByteBuffer.wrap(streamData.toString().getBytes(StandardCharsets.UTF_8)); + channel.write(src); + } + + try { + // ugly workaround: the channel was closed too early and not all + // data was received + TimeUnit.MILLISECONDS.sleep(10); + } catch (final InterruptedException e) { + throw new IllegalStateException(e); + } + channel.close(); + LOGGER.trace("closed sender connection"); + } + + public static final void send(final String data) throws IOException { + + final SocketChannel channel = connect(); + + final StringBuilder streamData = new StringBuilder(); + streamData.append(data); + + final ByteBuffer src = ByteBuffer.wrap(streamData.toString().getBytes(StandardCharsets.UTF_8)); + channel.write(src); + + try { + // ugly workaround: the channel was closed too early and not all + // data was received + TimeUnit.MILLISECONDS.sleep(10); + } catch (final InterruptedException e) { + throw new IllegalStateException(e); + } + channel.close(); + LOGGER.trace("closed sender connection"); + } + + public static void send(final Path file) throws IOException { + final SocketChannel outputChannel = connect(); + + try (final FileChannel inputChannel = FileChannel.open(file, StandardOpenOption.READ)) { + inputChannel.transferTo(0, Long.MAX_VALUE, outputChannel); + } + + try { + // ugly workaround: the channel was closed too early and not all + // data was received + TimeUnit.MILLISECONDS.sleep(10); + } catch (final InterruptedException e) { + throw new IllegalStateException(e); + } + outputChannel.close(); + LOGGER.trace("closed sender connection"); + } + + private static SocketChannel connect() throws IOException { + + SocketChannel result = null; + + while (true) { + try { + result = SocketChannel.open(); + result.configureBlocking(true); + result.connect(new InetSocketAddress("127.0.0.1", TcpIngestor.PORT)); + break; + } catch (final ConnectException e) { + // server socket not yet ready, it should be ready any time soon + } + } + + return result; + } } diff --git a/pdb-ui/src/test/java/org/lucares/pdbui/TcpIngestorTest.java b/pdb-ui/src/test/java/org/lucares/pdbui/TcpIngestorTest.java index 5b949e3..45c1f01 100644 --- a/pdb-ui/src/test/java/org/lucares/pdbui/TcpIngestorTest.java +++ b/pdb-ui/src/test/java/org/lucares/pdbui/TcpIngestorTest.java @@ -37,285 +37,284 @@ import com.fasterxml.jackson.databind.ObjectMapper; @Test public class TcpIngestorTest { - private static final Logger LOGGER = LoggerFactory.getLogger(TcpIngestorTest.class); + private static final Logger LOGGER = LoggerFactory.getLogger(TcpIngestorTest.class); - private Path dataDirectory; + private Path dataDirectory; - @BeforeMethod - public void beforeMethod() throws IOException { - dataDirectory = Files.createTempDirectory("pdb"); - } + @BeforeMethod + public void beforeMethod() throws IOException { + dataDirectory = Files.createTempDirectory("pdb"); + } - @AfterMethod - public void afterMethod() throws IOException { - FileUtils.delete(dataDirectory); - } + @AfterMethod + public void afterMethod() throws IOException { + FileUtils.delete(dataDirectory); + } - public void testIngestDataViaTcpStream() throws Exception { + public void testIngestDataViaTcpStream() throws Exception { - final OffsetDateTime dateA = OffsetDateTime.now(); - final OffsetDateTime dateB = OffsetDateTime.now(); - final String host = "someHost"; + final OffsetDateTime dateA = OffsetDateTime.now(); + final OffsetDateTime dateB = OffsetDateTime.now(); + final String host = "someHost"; - try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) { + try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) { - ingestor.start(); + ingestor.start(); - final Map entryA = new HashMap<>(); - entryA.put("duration", 1); - entryA.put("@timestamp", dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); - entryA.put("host", host); - entryA.put("tags", Collections.emptyList()); + final Map entryA = new HashMap<>(); + entryA.put("duration", 1); + entryA.put("@timestamp", dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); + entryA.put("host", host); + entryA.put("tags", Collections.emptyList()); - final Map entryB = new HashMap<>(); - entryB.put("duration", 2); - entryB.put("@timestamp", dateB.format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); - entryB.put("host", host); - entryB.put("tags", Collections.emptyList()); + final Map entryB = new HashMap<>(); + entryB.put("duration", 2); + entryB.put("@timestamp", dateB.format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); + entryB.put("host", host); + entryB.put("tags", Collections.emptyList()); - PdbTestUtil.sendAsJson(entryA, entryB); - } catch (final Exception e) { - LOGGER.error("", e); - throw e; - } + PdbTestUtil.sendAsJson(entryA, entryB); + } catch (final Exception e) { + LOGGER.error("", e); + throw e; + } - try (PerformanceDb db = new PerformanceDb(dataDirectory)) { - final LongList result = db.get(new Query("host=" + host, DateTimeRange.ofDay(dateA))).singleGroup() - .flatMap(); - Assert.assertEquals(result.size(), 4); + try (PerformanceDb db = new PerformanceDb(dataDirectory)) { + final LongList result = db.get(new Query("host=" + host, DateTimeRange.ofDay(dateA))).singleGroup() + .flatMap(); + Assert.assertEquals(result.size(), 4); - Assert.assertEquals(result.get(0), dateA.toInstant().toEpochMilli()); - Assert.assertEquals(result.get(1), 1); + Assert.assertEquals(result.get(0), dateA.toInstant().toEpochMilli()); + Assert.assertEquals(result.get(1), 1); - Assert.assertEquals(result.get(2), dateB.toInstant().truncatedTo(ChronoUnit.MILLIS).toEpochMilli()); - Assert.assertEquals(result.get(3), 2); - } - } + Assert.assertEquals(result.get(2), dateB.toInstant().truncatedTo(ChronoUnit.MILLIS).toEpochMilli()); + Assert.assertEquals(result.get(3), 2); + } + } - public void testIngestDataViaTcpStream_CustomFormat() throws Exception { + public void testIngestDataViaTcpStream_CustomFormat() throws Exception { - final long dateA = Instant.now().toEpochMilli(); - final long dateB = Instant.now().toEpochMilli() + 1; - final long dateC = Instant.now().toEpochMilli() - 1; - final DateTimeRange dateRange = DateTimeRange.relativeMinutes(1); - final String host = "someHost"; + final long dateA = Instant.now().toEpochMilli(); + final long dateB = Instant.now().toEpochMilli() + 1; + final long dateC = Instant.now().toEpochMilli() - 1; + final DateTimeRange dateRange = DateTimeRange.relativeMinutes(1); + final String host = "someHost"; - // 1. insert some data - try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) { + // 1. insert some data + try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) { - ingestor.start(); + ingestor.start(); - final long deltaEpochMilliB = dateB - dateA; - final long deltaEpochMilliC = dateC - dateB; + final long deltaEpochMilliB = dateB - dateA; + final long deltaEpochMilliC = dateC - dateB; - final String data = "#$0:host=someHost,pod=somePod\n"// - + dateA + ",1,0\n"// previous date is 0, therefore the delta is dateA / using tags with id 0 - + "$1:host=someHost,pod=otherPod\n" // - + deltaEpochMilliB + ",2,1\n" // dates are the delta the the previous date / using tags with id 1 - + deltaEpochMilliC + ",3,0"; // dates are the delta the the previous date / using tags with id 0 + final String data = "#$0:host=someHost,pod=somePod\n"// + + dateA + ",1,0\n"// previous date is 0, therefore the delta is dateA / using tags with id 0 + + "$1:host=someHost,pod=otherPod\n" // + + deltaEpochMilliB + ",2,1\n" // dates are the delta the the previous date / using tags with id 1 + + deltaEpochMilliC + ",3,0"; // dates are the delta the the previous date / using tags with id 0 - PdbTestUtil.send(data); - } catch (final Exception e) { - LOGGER.error("", e); - throw e; - } + PdbTestUtil.send(data); + } catch (final Exception e) { + LOGGER.error("", e); + throw e; + } - // 2. export the data - final List exportFiles = PdbExport.export(dataDirectory, dataDirectory.resolve("export")); + // 2. export the data + final List exportFiles = PdbExport.export(dataDirectory, dataDirectory.resolve("export")); - // 3. delete database - FileUtils.delete(dataDirectory.resolve(DataStore.SUBDIR_STORAGE)); + // 3. delete database + FileUtils.delete(dataDirectory.resolve(DataStore.SUBDIR_STORAGE)); - // 4. create a new database - try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) { - ingestor.start(); - for (final Path exportFile : exportFiles) { - PdbTestUtil.send(exportFile); - } - } + // 4. create a new database + try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) { + ingestor.start(); + for (final Path exportFile : exportFiles) { + PdbTestUtil.send(exportFile); + } + } - // 5. check that the data is correctly inserted - try (PerformanceDb db = new PerformanceDb(dataDirectory)) { - final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap(); - Assert.assertEquals(result.size(), 6); + // 5. check that the data is correctly inserted + try (PerformanceDb db = new PerformanceDb(dataDirectory)) { + final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap(); + Assert.assertEquals(result.size(), 6); - Assert.assertEquals(result.get(0), dateA); - Assert.assertEquals(result.get(1), 1); + Assert.assertEquals(result.get(0), dateA); + Assert.assertEquals(result.get(1), 1); - Assert.assertEquals(result.get(2), dateC); - Assert.assertEquals(result.get(3), 3); + Assert.assertEquals(result.get(2), dateC); + Assert.assertEquals(result.get(3), 3); - Assert.assertEquals(result.get(4), dateB); - Assert.assertEquals(result.get(5), 2); - } - } + Assert.assertEquals(result.get(4), dateB); + Assert.assertEquals(result.get(5), 2); + } + } - @Test - public void testIngestionThreadDoesNotDieOnErrors() throws Exception { - final OffsetDateTime dateA = OffsetDateTime.ofInstant(Instant.ofEpochMilli(-1), ZoneOffset.UTC); - final OffsetDateTime dateB = OffsetDateTime.now(); - final DateTimeRange dateRange = new DateTimeRange(dateA, dateB); - final String host = "someHost"; + @Test + public void testIngestionThreadDoesNotDieOnErrors() throws Exception { + final OffsetDateTime dateA = OffsetDateTime.ofInstant(Instant.ofEpochMilli(-1), ZoneOffset.UTC); + final OffsetDateTime dateB = OffsetDateTime.now(); + final DateTimeRange dateRange = new DateTimeRange(dateA, dateB); + final String host = "someHost"; - try (TcpIngestor tcpIngestor = new TcpIngestor(dataDirectory)) { - tcpIngestor.start(); + try (TcpIngestor tcpIngestor = new TcpIngestor(dataDirectory)) { + tcpIngestor.start(); - // has a negative epoch time milli and negative value - final Map entryA = new HashMap<>(); - entryA.put("duration", -1); - entryA.put("@timestamp", dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); - entryA.put("host", host); - entryA.put("tags", Collections.emptyList()); + // has a negative epoch time milli and negative value + final Map entryA = new HashMap<>(); + entryA.put("duration", -1); + entryA.put("@timestamp", dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); + entryA.put("host", host); + entryA.put("tags", Collections.emptyList()); - // skipped, because it is not valid json - final String corrupEntry = "{\"corrupt..."; + // skipped, because it is not valid json + final String corrupEntry = "{\"corrupt..."; - // valid entry - final Map entryB = new HashMap<>(); - entryB.put("duration", 2); - entryB.put("@timestamp", dateB.format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); - entryB.put("host", host); - entryB.put("tags", Collections.emptyList()); + // valid entry + final Map entryB = new HashMap<>(); + entryB.put("duration", 2); + entryB.put("@timestamp", dateB.format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); + entryB.put("host", host); + entryB.put("tags", Collections.emptyList()); - final ObjectMapper objectMapper = new ObjectMapper(); - final String data = String.join("\n", // - objectMapper.writeValueAsString(entryA), // - corrupEntry, // - objectMapper.writeValueAsString(entryB)// - )// - + "\n"; + final ObjectMapper objectMapper = new ObjectMapper(); + final String data = String.join("\n", // + objectMapper.writeValueAsString(entryA), // + corrupEntry, // + objectMapper.writeValueAsString(entryB)// + )// + + "\n"; - PdbTestUtil.send(data); - } + PdbTestUtil.send(data); + } - try (PerformanceDb db = new PerformanceDb(dataDirectory)) { - final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap(); - Assert.assertEquals(result.size(), 4); + try (PerformanceDb db = new PerformanceDb(dataDirectory)) { + final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap(); + Assert.assertEquals(result.size(), 4); - Assert.assertEquals(result.get(0), dateA.toInstant().truncatedTo(ChronoUnit.MILLIS).toEpochMilli()); - Assert.assertEquals(result.get(1), -1); + Assert.assertEquals(result.get(0), dateA.toInstant().truncatedTo(ChronoUnit.MILLIS).toEpochMilli()); + Assert.assertEquals(result.get(1), -1); - Assert.assertEquals(result.get(2), dateB.toInstant().truncatedTo(ChronoUnit.MILLIS).toEpochMilli()); - Assert.assertEquals(result.get(3), 2); - } - } + Assert.assertEquals(result.get(2), dateB.toInstant().truncatedTo(ChronoUnit.MILLIS).toEpochMilli()); + Assert.assertEquals(result.get(3), 2); + } + } - @DataProvider - public Object[][] providerSendingFormats() { - final List data = new ArrayList<>(); + @DataProvider + public Object[][] providerSendingFormats() { + final List data = new ArrayList<>(); - data.add(new Object[] { "csv" }); - data.add(new Object[] { "json" }); + data.add(new Object[] { "csv" }); + data.add(new Object[] { "json" }); - return data.toArray(Object[][]::new); - } + return data.toArray(Object[][]::new); + } - @Test(dataProvider = "providerSendingFormats") - public void testRandomOrder(final String format) throws Exception { + @Test(dataProvider = "providerSendingFormats") + public void testRandomOrder(final String format) throws Exception { - final ThreadLocalRandom rnd = ThreadLocalRandom.current(); - final String host = "someHost"; - final List additionalTagValues = Arrays.asList("foo", "bar", "baz"); - final DateTimeRange dateRange = new DateTimeRange(Instant.ofEpochMilli(-100000L).atOffset(ZoneOffset.UTC), - Instant.ofEpochMilli(10000000L).atOffset(ZoneOffset.UTC)); + final ThreadLocalRandom rnd = ThreadLocalRandom.current(); + final String host = "someHost"; + final List additionalTagValues = Arrays.asList("foo", "bar", "baz"); + final DateTimeRange dateRange = new DateTimeRange(Instant.ofEpochMilli(-100000L).atOffset(ZoneOffset.UTC), + Instant.ofEpochMilli(10000000L).atOffset(ZoneOffset.UTC)); - final LongList expected = new LongList(); + final LongList expected = new LongList(); - try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) { + try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) { - ingestor.start(); + ingestor.start(); - final LinkedBlockingDeque> queue = new LinkedBlockingDeque<>(); + final LinkedBlockingDeque> queue = new LinkedBlockingDeque<>(); - for (int i = 0; i < 103; i++) // use number of rows that is not a multiple of a page size - { + for (int i = 0; i < 103; i++) // use number of rows that is not a multiple of a page size + { - final long duration = rnd.nextLong(-100000L, 100000L); - final long timestamp = rnd.nextLong(-100000L, 10000000L); + final long duration = rnd.nextLong(-100000L, 100000L); + final long timestamp = rnd.nextLong(-100000L, 10000000L); - final Map entry = new HashMap<>(); - entry.put("@timestamp", Instant.ofEpochMilli(timestamp).atOffset(ZoneOffset.UTC) - .format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); - entry.put("duration", duration); - entry.put("host", host); - entry.put("additionalKey", additionalTagValues.get(rnd.nextInt(additionalTagValues.size()))); + final Map entry = new HashMap<>(); + entry.put("@timestamp", Instant.ofEpochMilli(timestamp).atOffset(ZoneOffset.UTC) + .format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); + entry.put("duration", duration); + entry.put("host", host); + entry.put("additionalKey", additionalTagValues.get(rnd.nextInt(additionalTagValues.size()))); - queue.put(entry); - expected.addAll(timestamp, duration); - } + queue.put(entry); + expected.addAll(timestamp, duration); + } - PdbTestUtil.send(format, queue); - } catch (final Exception e) { - LOGGER.error("", e); - throw e; - } + PdbTestUtil.send(format, queue); + } catch (final Exception e) { + LOGGER.error("", e); + throw e; + } - try (PerformanceDb db = new PerformanceDb(dataDirectory)) { - final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap(); - Assert.assertEquals(LongPair.fromLongList(result), LongPair.fromLongList(expected)); - } - } + try (PerformanceDb db = new PerformanceDb(dataDirectory)) { + final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap(); + Assert.assertEquals(LongPair.fromLongList(result), LongPair.fromLongList(expected)); + } + } - public void testCsvIngestorIgnoresColumns() throws Exception { + public void testCsvIngestorIgnoresColumns() throws Exception { - try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) { + try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) { - ingestor.start(); + ingestor.start(); - final Map entry = new HashMap<>(); - entry.put("@timestamp", - Instant.ofEpochMilli(1).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); - entry.put("duration", 1); - entry.put("host", "someHost"); - entry.put(TcpIngestor.Handler.COLUM_IGNORE_PREFIX + "ignored", "ignoredValue"); + final Map entry = new HashMap<>(); + entry.put("@timestamp", + Instant.ofEpochMilli(1).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); + entry.put("duration", 1); + entry.put("host", "someHost"); + entry.put(TcpIngestor.Handler.COLUM_IGNORE_PREFIX + "ignored", "ignoredValue"); - PdbTestUtil.sendAsCsv(entry); - } catch (final Exception e) { - LOGGER.error("", e); - throw e; - } + PdbTestUtil.sendAsCsv(entry); + } catch (final Exception e) { + LOGGER.error("", e); + throw e; + } - try (PerformanceDb db = new PerformanceDb(dataDirectory)) { - final List availableFields = db.getFields(DateTimeRange.max()); - Assert.assertEquals(availableFields.toString(), List.of("host").toString(), - "the ignored field is not returned"); - } - } - - - public void testCsvIngestorHandlesDurationAtEnd() throws Exception { + try (PerformanceDb db = new PerformanceDb(dataDirectory)) { + final List availableFields = db.getFields(DateTimeRange.max()); + Assert.assertEquals(availableFields.toString(), List.of("host").toString(), + "the ignored field is not returned"); + } + } - String host = "someHost"; - long value1 = 222; - long value2= 1; - try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) { + public void testCsvIngestorHandlesDurationAtEnd() throws Exception { - ingestor.start(); + String host = "someHost"; + long value1 = 222; + long value2 = 1; + try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) { - final Map entry1 = new HashMap<>(); - entry1.put("@timestamp", - Instant.ofEpochMilli(1).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); - entry1.put("host", host); - entry1.put("duration", value1); - - final Map entry2 = new HashMap<>(); - entry2.put("@timestamp", - Instant.ofEpochMilli(2).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); - entry2.put("host", host); - entry2.put("duration", value2); + ingestor.start(); - PdbTestUtil.sendAsCsv(List.of("@timestamp","host","duration"), List.of(entry1, entry2)); - } catch (final Exception e) { - LOGGER.error("", e); - throw e; - } + final Map entry1 = new HashMap<>(); + entry1.put("@timestamp", + Instant.ofEpochMilli(1).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); + entry1.put("host", host); + entry1.put("duration", value1); - try (PerformanceDb db = new PerformanceDb(dataDirectory)) { - final LongList result = db.get(new Query("host=" + host, DateTimeRange.max())).singleGroup().flatMap(); - Assert.assertEquals(result.size(), 4); + final Map entry2 = new HashMap<>(); + entry2.put("@timestamp", + Instant.ofEpochMilli(2).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME)); + entry2.put("host", host); + entry2.put("duration", value2); - Assert.assertEquals(result.get(1), value1); - Assert.assertEquals(result.get(3), value2); - } - } + PdbTestUtil.sendAsCsv(List.of("@timestamp", "host", "duration"), List.of(entry1, entry2)); + } catch (final Exception e) { + LOGGER.error("", e); + throw e; + } + + try (PerformanceDb db = new PerformanceDb(dataDirectory)) { + final LongList result = db.get(new Query("host=" + host, DateTimeRange.max())).singleGroup().flatMap(); + Assert.assertEquals(result.size(), 4); + + Assert.assertEquals(result.get(1), value1); + Assert.assertEquals(result.get(3), value2); + } + } } diff --git a/pdb-ui/src/test/java/org/lucares/pdbui/date/FastISODateParserTest.java b/pdb-ui/src/test/java/org/lucares/pdbui/date/FastISODateParserTest.java index 007753d..520e5fb 100644 --- a/pdb-ui/src/test/java/org/lucares/pdbui/date/FastISODateParserTest.java +++ b/pdb-ui/src/test/java/org/lucares/pdbui/date/FastISODateParserTest.java @@ -20,195 +20,195 @@ import org.testng.annotations.Test; @Test public class FastISODateParserTest { - @DataProvider(name = "providerValidDate") - public Object[][] providerValidDate() { - return new Object[][] { // - { "2018-11-18T14:42:49.123456789Z" }, // - { "2018-11-18T14:42:49.123456789+12:34" }, // - { "2018-11-18T14:42:49.12345678Z" }, // - { "2018-11-18T14:42:49.12345678+12:34" }, // - { "2018-11-18T14:42:49.1234567Z" }, // - { "2018-11-18T14:42:49.1234567+12:34" }, // - { "2018-11-18T14:42:49.123456Z" }, // - { "2018-11-18T14:42:49.123456+12:34" }, // - { "2018-11-18T14:42:49.33256Z" }, // - { "2018-11-18T14:42:49.33256+12:34" }, // - { "2018-11-18T14:42:49.3325Z" }, // - { "2018-11-18T14:42:49.3325+12:34" }, // - { "2018-11-18T14:42:49.332Z" }, // - { "2018-11-18T14:42:49.332+00:00" }, // - { "2018-11-18T14:42:49.332+12:34" }, // - { "2018-11-18T14:42:49.332-01:23" }, // - { "2018-11-18T14:55:49.44Z" }, // - { "2018-11-18T14:55:49.55-01:23" }, // - { "2018-11-18T14:55:49.4Z" }, // - { "2018-11-18T14:55:49.5-01:23" }, // - { "2018-11-18T14:55:49.Z" }, // - { "2018-11-18T14:55:49.-01:23" }, // - { "2018-11-18T14:55:49Z" }, // - { "2018-11-18T14:55:49-01:23" },// - }; - } + @DataProvider(name = "providerValidDate") + public Object[][] providerValidDate() { + return new Object[][] { // + { "2018-11-18T14:42:49.123456789Z" }, // + { "2018-11-18T14:42:49.123456789+12:34" }, // + { "2018-11-18T14:42:49.12345678Z" }, // + { "2018-11-18T14:42:49.12345678+12:34" }, // + { "2018-11-18T14:42:49.1234567Z" }, // + { "2018-11-18T14:42:49.1234567+12:34" }, // + { "2018-11-18T14:42:49.123456Z" }, // + { "2018-11-18T14:42:49.123456+12:34" }, // + { "2018-11-18T14:42:49.33256Z" }, // + { "2018-11-18T14:42:49.33256+12:34" }, // + { "2018-11-18T14:42:49.3325Z" }, // + { "2018-11-18T14:42:49.3325+12:34" }, // + { "2018-11-18T14:42:49.332Z" }, // + { "2018-11-18T14:42:49.332+00:00" }, // + { "2018-11-18T14:42:49.332+12:34" }, // + { "2018-11-18T14:42:49.332-01:23" }, // + { "2018-11-18T14:55:49.44Z" }, // + { "2018-11-18T14:55:49.55-01:23" }, // + { "2018-11-18T14:55:49.4Z" }, // + { "2018-11-18T14:55:49.5-01:23" }, // + { "2018-11-18T14:55:49.Z" }, // + { "2018-11-18T14:55:49.-01:23" }, // + { "2018-11-18T14:55:49Z" }, // + { "2018-11-18T14:55:49-01:23" },// + }; + } - @Test(dataProvider = "providerValidDate") - public void testParseValidDate(final String date) { + @Test(dataProvider = "providerValidDate") + public void testParseValidDate(final String date) { - final OffsetDateTime actualDate = new FastISODateParser().parse(date); + final OffsetDateTime actualDate = new FastISODateParser().parse(date); - final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date)); - Assert.assertEquals(actualDate, expectedDate); - } + final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date)); + Assert.assertEquals(actualDate, expectedDate); + } - @Test(dataProvider = "providerValidDate") - public void testParseValidDateAsEpochMilli(final String date) { + @Test(dataProvider = "providerValidDate") + public void testParseValidDateAsEpochMilli(final String date) { - final long actualDate = new FastISODateParser().parseAsEpochMilli(date); + final long actualDate = new FastISODateParser().parseAsEpochMilli(date); - final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date)); - Assert.assertEquals(actualDate, expectedDate.toInstant().toEpochMilli()); - } + final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date)); + Assert.assertEquals(actualDate, expectedDate.toInstant().toEpochMilli()); + } - @DataProvider(name = "providerParseInvalidDate") - public Object[][] providerParseInvalidDate() { - return new Object[][] { // - { "a2018-11-18T14:42:49.332Z" }, // - { "a018-11-18T14:42:49.332Z" }, // - { "2a18-11-18T14:42:49.332Z" }, // - { "20a8-11-18T14:42:49.332Z" }, // - { "201a-11-18T14:42:49.332Z" }, // - { "2018-a1-18T14:42:49.332Z" }, // - { "2018-1a-18T14:42:49.332Z" }, // - { "2018-11-a8T14:42:49.332Z" }, // - { "2018-11-1aT14:42:49.332Z" }, // - { "2018-11-18Ta4:42:49.332Z" }, // - { "2018-11-18T1a:42:49.332Z" }, // - { "2018-11-18T14:a2:49.332Z" }, // - { "2018-11-18T14:4a:49.332Z" }, // - { "2018-11-18T14:42:a9.332Z" }, // - { "2018-11-18T14:42:4a.332Z" }, // - { "2018-11-18T14:42:49.a32Z" }, // - { "2018-11-18T14:42:49.3a2Z" }, // - { "2018-11-18T14:42:49.33aZ" }, // - { "2018-11-18T14:42:49.332a" }, // - { "2018-11-18T14:42:49.332a00:00" }, // - { "2018-11-18T14:42:49.332+a0:00" }, // - { "2018-11-18T14:42:49.332+0a:00" }, // - { "2018-11-18T14:42:49.332+00:a0" }, // - { "2018-11-18T14:42:49.332+00:0a" }// - }; - } + @DataProvider(name = "providerParseInvalidDate") + public Object[][] providerParseInvalidDate() { + return new Object[][] { // + { "a2018-11-18T14:42:49.332Z" }, // + { "a018-11-18T14:42:49.332Z" }, // + { "2a18-11-18T14:42:49.332Z" }, // + { "20a8-11-18T14:42:49.332Z" }, // + { "201a-11-18T14:42:49.332Z" }, // + { "2018-a1-18T14:42:49.332Z" }, // + { "2018-1a-18T14:42:49.332Z" }, // + { "2018-11-a8T14:42:49.332Z" }, // + { "2018-11-1aT14:42:49.332Z" }, // + { "2018-11-18Ta4:42:49.332Z" }, // + { "2018-11-18T1a:42:49.332Z" }, // + { "2018-11-18T14:a2:49.332Z" }, // + { "2018-11-18T14:4a:49.332Z" }, // + { "2018-11-18T14:42:a9.332Z" }, // + { "2018-11-18T14:42:4a.332Z" }, // + { "2018-11-18T14:42:49.a32Z" }, // + { "2018-11-18T14:42:49.3a2Z" }, // + { "2018-11-18T14:42:49.33aZ" }, // + { "2018-11-18T14:42:49.332a" }, // + { "2018-11-18T14:42:49.332a00:00" }, // + { "2018-11-18T14:42:49.332+a0:00" }, // + { "2018-11-18T14:42:49.332+0a:00" }, // + { "2018-11-18T14:42:49.332+00:a0" }, // + { "2018-11-18T14:42:49.332+00:0a" }// + }; + } - @Test(expectedExceptions = IllegalArgumentException.class, dataProvider = "providerParseInvalidDate") - public void testParseInvalidDate(final String invalidDate) { - new FastISODateParser().parse(invalidDate); - } + @Test(expectedExceptions = IllegalArgumentException.class, dataProvider = "providerParseInvalidDate") + public void testParseInvalidDate(final String invalidDate) { + new FastISODateParser().parse(invalidDate); + } - @DataProvider(name = "providerDateToTimestamp") - public Object[][] providerDateToTimestamp() { - return new Object[][] { // - { "2018-11-18T14:42:49.123Z" }, // - // There are no leap seconds in java-time: - // In reality, UTC has a leap second 2016-12-31T23:59:60Z, but java handles - // this differently. This makes it a little bit easier for us, because we do not - // have to handle this. - { "2016-12-31T23:59:59.999Z" }, // before leap second - { "2017-01-01T00:00:00.000Z" }, // after leap second + @DataProvider(name = "providerDateToTimestamp") + public Object[][] providerDateToTimestamp() { + return new Object[][] { // + { "2018-11-18T14:42:49.123Z" }, // + // There are no leap seconds in java-time: + // In reality, UTC has a leap second 2016-12-31T23:59:60Z, but java handles + // this differently. This makes it a little bit easier for us, because we do not + // have to handle this. + { "2016-12-31T23:59:59.999Z" }, // before leap second + { "2017-01-01T00:00:00.000Z" }, // after leap second - // normal leap days exist - { "2016-02-28T23:59:59.999Z" }, // before leap day - { "2016-02-29T00:00:00.000Z" }, // leap day - { "2016-02-29T23:59:59.999Z" }, // leap day - { "2016-03-01T00:00:00.000Z" }, // after leap day + // normal leap days exist + { "2016-02-28T23:59:59.999Z" }, // before leap day + { "2016-02-29T00:00:00.000Z" }, // leap day + { "2016-02-29T23:59:59.999Z" }, // leap day + { "2016-03-01T00:00:00.000Z" }, // after leap day - // dates with non-UTC timezones - { "2018-11-18T14:42:49.123+12:34" }, // - { "2018-11-18T02:34:56.123+12:34" }, // + // dates with non-UTC timezones + { "2018-11-18T14:42:49.123+12:34" }, // + { "2018-11-18T02:34:56.123+12:34" }, // - // dates with non-UTC timezones and leap days - { "2016-02-29T00:59:59.999+01:00" }, // before leap day - { "2016-02-29T01:00:00.000+01:00" }, // leap day - { "2016-03-01T00:59:59.999+01:00" }, // leap day - { "2016-03-01T01:00:00.000+01:00" }, // after leap day - }; - } + // dates with non-UTC timezones and leap days + { "2016-02-29T00:59:59.999+01:00" }, // before leap day + { "2016-02-29T01:00:00.000+01:00" }, // leap day + { "2016-03-01T00:59:59.999+01:00" }, // leap day + { "2016-03-01T01:00:00.000+01:00" }, // after leap day + }; + } - @Test(dataProvider = "providerDateToTimestamp") - public void testDateToTimestamp(final String date) { + @Test(dataProvider = "providerDateToTimestamp") + public void testDateToTimestamp(final String date) { - final long actualEpochMilli = new FastISODateParser().parseAsEpochMilli(date); + final long actualEpochMilli = new FastISODateParser().parseAsEpochMilli(date); - final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date)); - final long expectedEpochMilli = expectedDate.toInstant().toEpochMilli(); - Assert.assertEquals(actualEpochMilli, expectedEpochMilli); - } + final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date)); + final long expectedEpochMilli = expectedDate.toInstant().toEpochMilli(); + Assert.assertEquals(actualEpochMilli, expectedEpochMilli); + } - @Test(dataProvider = "providerDateToTimestamp") - public void testDateToTimestampWithBytes(final String date) { + @Test(dataProvider = "providerDateToTimestamp") + public void testDateToTimestampWithBytes(final String date) { - final byte[] dateAsBytes = date.getBytes(StandardCharsets.UTF_8); - final long actualEpochMilli = new FastISODateParser().parseAsEpochMilli(dateAsBytes, 0); + final byte[] dateAsBytes = date.getBytes(StandardCharsets.UTF_8); + final long actualEpochMilli = new FastISODateParser().parseAsEpochMilli(dateAsBytes, 0); - final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date)); - final long expectedEpochMilli = expectedDate.toInstant().toEpochMilli(); - Assert.assertEquals(actualEpochMilli, expectedEpochMilli); - } + final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date)); + final long expectedEpochMilli = expectedDate.toInstant().toEpochMilli(); + Assert.assertEquals(actualEpochMilli, expectedEpochMilli); + } - @Test(enabled = false) - public void test() { + @Test(enabled = false) + public void test() { - final OffsetDateTime expectedDate = OffsetDateTime - .from(DateTimeFormatter.ISO_DATE_TIME.parse("2016-12-31T23:00:00.000Z")); + final OffsetDateTime expectedDate = OffsetDateTime + .from(DateTimeFormatter.ISO_DATE_TIME.parse("2016-12-31T23:00:00.000Z")); - final long epochMilli = expectedDate.toInstant().toEpochMilli(); + final long epochMilli = expectedDate.toInstant().toEpochMilli(); - for (int i = 0; i < 1000; i++) { + for (int i = 0; i < 1000; i++) { - final long timestamp = epochMilli + i * 10000; - final OffsetDateTime date = Instant.ofEpochMilli(timestamp).atOffset(ZoneOffset.UTC); + final long timestamp = epochMilli + i * 10000; + final OffsetDateTime date = Instant.ofEpochMilli(timestamp).atOffset(ZoneOffset.UTC); - System.out.println(timestamp + " " + date.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)); - } - } + System.out.println(timestamp + " " + date.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)); + } + } - public static void main(final String[] args) throws IOException, InterruptedException { - final Path path = Path.of("/home/andi/ws/performanceDb/data/production/dates.csv"); + public static void main(final String[] args) throws IOException, InterruptedException { + final Path path = Path.of("/home/andi/ws/performanceDb/data/production/dates.csv"); - final List dates = new ArrayList<>(); + final List dates = new ArrayList<>(); - try (final BufferedReader reader = new BufferedReader(new FileReader(path.toFile(), StandardCharsets.UTF_8))) { - String line; - while ((line = reader.readLine()) != null) { - dates.add(line.getBytes()); - } - } + try (final BufferedReader reader = new BufferedReader(new FileReader(path.toFile(), StandardCharsets.UTF_8))) { + String line; + while ((line = reader.readLine()) != null) { + dates.add(line.getBytes()); + } + } - for (int i = 0; i < 20; i++) { + for (int i = 0; i < 20; i++) { - System.gc(); - TimeUnit.MILLISECONDS.sleep(100); - System.gc(); - TimeUnit.MILLISECONDS.sleep(100); - System.gc(); - TimeUnit.MILLISECONDS.sleep(100); - System.gc(); + System.gc(); + TimeUnit.MILLISECONDS.sleep(100); + System.gc(); + TimeUnit.MILLISECONDS.sleep(100); + System.gc(); + TimeUnit.MILLISECONDS.sleep(100); + System.gc(); - TimeUnit.SECONDS.sleep(1); + TimeUnit.SECONDS.sleep(1); - final long start = System.nanoTime(); - final FastISODateParser fastISODateParser = new FastISODateParser(); + final long start = System.nanoTime(); + final FastISODateParser fastISODateParser = new FastISODateParser(); - for (final byte[] date : dates) { - fastISODateParser.parseAsEpochMilli(date, 0); - // final long timestamp = - // fastISODateParser.parse(date).toInstant().toEpochMilli(); + for (final byte[] date : dates) { + fastISODateParser.parseAsEpochMilli(date, 0); + // final long timestamp = + // fastISODateParser.parse(date).toInstant().toEpochMilli(); // final long timestamp = OffsetDateTime.parse(date, DateTimeFormatter.ISO_OFFSET_DATE_TIME) // .toInstant().toEpochMilli(); - // sum += timestamp; - } + // sum += timestamp; + } - final double millis = (System.nanoTime() - start) / 1_000_000.0; - final long datesPerSecond = (long) (dates.size() / (millis / 1000.0)); - System.out.println("duration: " + millis + "ms ; speed: " + datesPerSecond + " dates/s"); - } - } + final double millis = (System.nanoTime() - start) / 1_000_000.0; + final long datesPerSecond = (long) (dates.size() / (millis / 1000.0)); + System.out.println("duration: " + millis + "ms ; speed: " + datesPerSecond + " dates/s"); + } + } } diff --git a/pdb-ui/src/test/java/org/lucares/pdbui/domain/DataSeriesStatsTest.java b/pdb-ui/src/test/java/org/lucares/pdbui/domain/DataSeriesStatsTest.java index f90ac32..d8a7bf9 100644 --- a/pdb-ui/src/test/java/org/lucares/pdbui/domain/DataSeriesStatsTest.java +++ b/pdb-ui/src/test/java/org/lucares/pdbui/domain/DataSeriesStatsTest.java @@ -11,50 +11,50 @@ import org.testng.annotations.Test; @Test public class DataSeriesStatsTest { - @DataProvider - public Object[][] providerAverage() { - final List result = new ArrayList<>(); + @DataProvider + public Object[][] providerAverage() { + final List result = new ArrayList<>(); - { - final List stats = Arrays.asList(// - new DataSeriesStats(10, 0, 0, 5.0)// - ); - final double expected = 5.0; - result.add(new Object[] { stats, expected }); - } - { - final List stats = Arrays.asList(// - new DataSeriesStats(0, 0, 0, 5.0)// - ); - final double expected = 0.0; // no values - result.add(new Object[] { stats, expected }); - } + { + final List stats = Arrays.asList(// + new DataSeriesStats(10, 0, 0, 5.0)// + ); + final double expected = 5.0; + result.add(new Object[] { stats, expected }); + } + { + final List stats = Arrays.asList(// + new DataSeriesStats(0, 0, 0, 5.0)// + ); + final double expected = 0.0; // no values + result.add(new Object[] { stats, expected }); + } - { - final List stats = Arrays.asList(// - new DataSeriesStats(10, 0, 0, 5.0), // - new DataSeriesStats(40, 0, 0, 1.0)// - ); - final double expected = 1.8; // 90 / 50 - result.add(new Object[] { stats, expected }); - } - { - final List stats = Arrays.asList(// - new DataSeriesStats(5, 0, 0, 7.0), // - new DataSeriesStats(0, 0, 0, 5.0), // // no values - new DataSeriesStats(20, 0, 0, 2.0)// - ); - final double expected = 3.0; // (35+40) / 25 - result.add(new Object[] { stats, expected }); - } + { + final List stats = Arrays.asList(// + new DataSeriesStats(10, 0, 0, 5.0), // + new DataSeriesStats(40, 0, 0, 1.0)// + ); + final double expected = 1.8; // 90 / 50 + result.add(new Object[] { stats, expected }); + } + { + final List stats = Arrays.asList(// + new DataSeriesStats(5, 0, 0, 7.0), // + new DataSeriesStats(0, 0, 0, 5.0), // // no values + new DataSeriesStats(20, 0, 0, 2.0)// + ); + final double expected = 3.0; // (35+40) / 25 + result.add(new Object[] { stats, expected }); + } - return result.toArray(new Object[0][]); - } + return result.toArray(new Object[0][]); + } - @Test(dataProvider = "providerAverage") - public void testAverage(final Collection stats, final double expected) { + @Test(dataProvider = "providerAverage") + public void testAverage(final Collection stats, final double expected) { - final double actual = DataSeriesStats.average(stats); - Assert.assertEquals(actual, expected, 0.01); - } + final double actual = DataSeriesStats.average(stats); + Assert.assertEquals(actual, expected, 0.01); + } } diff --git a/pdb-utils/src/main/java/org/lucares/utils/CollectionUtils.java b/pdb-utils/src/main/java/org/lucares/utils/CollectionUtils.java index d396cb8..33f0bfe 100644 --- a/pdb-utils/src/main/java/org/lucares/utils/CollectionUtils.java +++ b/pdb-utils/src/main/java/org/lucares/utils/CollectionUtils.java @@ -15,144 +15,143 @@ import java.util.stream.Collectors; import java.util.stream.Stream; public class CollectionUtils { - - public interface Compare { - public boolean test(T valueA); + public interface Compare { - public static Compare compare(Function keyExtractor, V value) { - Objects.requireNonNull(keyExtractor); - return t -> Objects.equals(keyExtractor.apply(t), value); - } + public boolean test(T valueA); - default Compare thenCompare(Compare other) { - Objects.requireNonNull(other); - return t -> { - final boolean res = test(t); - return res ? other.test(t) : false; - }; - } - - default Compare thenCompare(Function keyExtractor, V value) { - return thenCompare(compare(keyExtractor, value)); - } - } - - - public static List copySort(Collection collection, Comparator comparator){ - final List result = new ArrayList(collection); - Collections.sort(result, comparator); - return result; - } - - public static void mapInPlace(final List list, final Function mapper) { - for (int i = 0; i < list.size(); i++) { - final T value = list.get(i); - final T newValue = mapper.apply(value); - list.set(i, newValue); - } - } - - public static List map(final Collection list, final Function mapper) { - final List result = new ArrayList<>(list.size()); - - for (final T t : list) { - result.add(mapper.apply(t)); - } - - return result; - } - - public static List map(final T[] input, final Function mapper) { - return Stream.of(input).map(mapper).collect(Collectors.toList()); - } - - public static , T, R> O map(final Collection input, final O result, - final Function mapper) { - - for (final T t : input) { - final R e = mapper.apply(t); - result.add(e); - } - - return result; - } - - public static Map createMapFromValues(final Iterable iterable, final Function keyMapper) { - final Map result = new HashMap<>(); - - for (final V value : iterable) { - final T key = keyMapper.apply(value); - - result.put(key, value); - } - - return result; - } - - public static Map createMapFromKeys(final Iterable iterable, - final Function valueMapper) { - final Map result = new HashMap<>(); - - for (final KEY key : iterable) { - final VALUE value = valueMapper.apply(key); - - result.put(key, value); - } - - return result; - } - - public static List filter(final Collection collection, final Predicate predicate) { - return collection.stream().filter(predicate).collect(Collectors.toList()); - } - - public static int indexOf(final List list, final Predicate predicate) { - for (int i = 0; i < list.size(); i++) { - if (predicate.test(list.get(i))) { - return i; - } - } - return -1; - } - - public static boolean contains(Collection collection, final Compare compare) { - for (T t : collection) { - boolean found = compare.test(t); - if (found ) { - return true; + public static Compare compare(Function keyExtractor, V value) { + Objects.requireNonNull(keyExtractor); + return t -> Objects.equals(keyExtractor.apply(t), value); } - } - return false; - } - - public static long count(Collection collection, final Compare compare) { - long count = 0; - for (T t : collection) { - boolean found = compare.test(t); - if (found ) { - count++; + + default Compare thenCompare(Compare other) { + Objects.requireNonNull(other); + return t -> { + final boolean res = test(t); + return res ? other.test(t) : false; + }; + } + + default Compare thenCompare(Function keyExtractor, V value) { + return thenCompare(compare(keyExtractor, value)); } - } - return count; } - public static > T removeAll(final T collection, final T remove, - final Supplier generator) { + public static List copySort(Collection collection, Comparator comparator) { + final List result = new ArrayList(collection); + Collections.sort(result, comparator); + return result; + } - final T result = generator.get(); - result.addAll(collection); - result.removeAll(remove); - return result; - } + public static void mapInPlace(final List list, final Function mapper) { + for (int i = 0; i < list.size(); i++) { + final T value = list.get(i); + final T newValue = mapper.apply(value); + list.set(i, newValue); + } + } - public static > T retainAll(final T collection, final T retain, - final Supplier generator) { + public static List map(final Collection list, final Function mapper) { + final List result = new ArrayList<>(list.size()); - final T result = generator.get(); - result.addAll(collection); - result.retainAll(retain); - return result; - } + for (final T t : list) { + result.add(mapper.apply(t)); + } + + return result; + } + + public static List map(final T[] input, final Function mapper) { + return Stream.of(input).map(mapper).collect(Collectors.toList()); + } + + public static , T, R> O map(final Collection input, final O result, + final Function mapper) { + + for (final T t : input) { + final R e = mapper.apply(t); + result.add(e); + } + + return result; + } + + public static Map createMapFromValues(final Iterable iterable, final Function keyMapper) { + final Map result = new HashMap<>(); + + for (final V value : iterable) { + final T key = keyMapper.apply(value); + + result.put(key, value); + } + + return result; + } + + public static Map createMapFromKeys(final Iterable iterable, + final Function valueMapper) { + final Map result = new HashMap<>(); + + for (final KEY key : iterable) { + final VALUE value = valueMapper.apply(key); + + result.put(key, value); + } + + return result; + } + + public static List filter(final Collection collection, final Predicate predicate) { + return collection.stream().filter(predicate).collect(Collectors.toList()); + } + + public static int indexOf(final List list, final Predicate predicate) { + for (int i = 0; i < list.size(); i++) { + if (predicate.test(list.get(i))) { + return i; + } + } + return -1; + } + + public static boolean contains(Collection collection, final Compare compare) { + for (T t : collection) { + boolean found = compare.test(t); + if (found) { + return true; + } + } + return false; + } + + public static long count(Collection collection, final Compare compare) { + long count = 0; + for (T t : collection) { + boolean found = compare.test(t); + if (found) { + count++; + } + } + return count; + } + + public static > T removeAll(final T collection, final T remove, + final Supplier generator) { + + final T result = generator.get(); + result.addAll(collection); + result.removeAll(remove); + return result; + } + + public static > T retainAll(final T collection, final T retain, + final Supplier generator) { + + final T result = generator.get(); + result.addAll(collection); + result.retainAll(retain); + return result; + } } diff --git a/pdb-utils/src/main/java/org/lucares/utils/DateUtils.java b/pdb-utils/src/main/java/org/lucares/utils/DateUtils.java index 6da633d..ebb3cfa 100644 --- a/pdb-utils/src/main/java/org/lucares/utils/DateUtils.java +++ b/pdb-utils/src/main/java/org/lucares/utils/DateUtils.java @@ -5,14 +5,14 @@ import java.time.ZoneOffset; public class DateUtils { - public static OffsetDateTime getDate(final int year, final int month, final int day, final int hour, - final int minute, final int second) { + public static OffsetDateTime getDate(final int year, final int month, final int day, final int hour, + final int minute, final int second) { - final OffsetDateTime result = OffsetDateTime.of(year, month, day, hour, minute, second, 0, ZoneOffset.UTC); - return result; - } + final OffsetDateTime result = OffsetDateTime.of(year, month, day, hour, minute, second, 0, ZoneOffset.UTC); + return result; + } - public static OffsetDateTime nowInUtc() { - return OffsetDateTime.now(ZoneOffset.UTC); - } + public static OffsetDateTime nowInUtc() { + return OffsetDateTime.now(ZoneOffset.UTC); + } } diff --git a/pdb-utils/src/main/java/org/lucares/utils/Preconditions.java b/pdb-utils/src/main/java/org/lucares/utils/Preconditions.java index 45b59e5..7ce29e5 100644 --- a/pdb-utils/src/main/java/org/lucares/utils/Preconditions.java +++ b/pdb-utils/src/main/java/org/lucares/utils/Preconditions.java @@ -4,98 +4,98 @@ import java.text.MessageFormat; import java.util.Objects; public class Preconditions { - public static void checkEven(final long value, final String message) { - if (value % 2 != 0) { - throw new IllegalStateException(message + ". Was: " + value); - } - } + public static void checkEven(final long value, final String message) { + if (value % 2 != 0) { + throw new IllegalStateException(message + ". Was: " + value); + } + } - /** - * - * @param a - * @param b - * @param message formatted with {@link MessageFormat} - * @param args - */ - public static void checkGreater(final long a, final long b, final String message, final Object... args) { + /** + * + * @param a + * @param b + * @param message formatted with {@link MessageFormat} + * @param args + */ + public static void checkGreater(final long a, final long b, final String message, final Object... args) { - if (a <= b) { - throw new IllegalStateException(MessageFormat.format(message, args) + " Expected: " + a + " > " + b); - } - } + if (a <= b) { + throw new IllegalStateException(MessageFormat.format(message, args) + " Expected: " + a + " > " + b); + } + } - /** - * - * @param a - * @param b - * @param message formatted with {@link MessageFormat} - * @param args - * @throws IllegalStateException if {@code a} is not greater or equal to - * {@code b} - */ - public static void checkGreaterOrEqual(final long a, final long b, final String message, final Object... args) { - if (a < b) { - throw new IllegalStateException(MessageFormat.format(message, args) + " Expected: " + a + " >= " + b); - } - } + /** + * + * @param a + * @param b + * @param message formatted with {@link MessageFormat} + * @param args + * @throws IllegalStateException if {@code a} is not greater or equal to + * {@code b} + */ + public static void checkGreaterOrEqual(final long a, final long b, final String message, final Object... args) { + if (a < b) { + throw new IllegalStateException(MessageFormat.format(message, args) + " Expected: " + a + " >= " + b); + } + } - public static void checkSmaller(final long a, final long b, final String message, final Object... args) { - if (a >= b) { - throw new IllegalStateException(MessageFormat.format(message, args) + " Expected: " + a + " < " + b); - } - } + public static void checkSmaller(final long a, final long b, final String message, final Object... args) { + if (a >= b) { + throw new IllegalStateException(MessageFormat.format(message, args) + " Expected: " + a + " < " + b); + } + } - public static void checkEqual(final Object actual, final Object expected) { - checkEqual(actual, expected, "expected {0} is equal to {1}", actual, expected); - } + public static void checkEqual(final Object actual, final Object expected) { + checkEqual(actual, expected, "expected {0} is equal to {1}", actual, expected); + } - /** - * Check that the given values are equal. The check is done with - * {@link Objects#equals(Object, Object)} - * - * @param actual the actual value - * @param expected the expected value - * @param message formatted with {@link MessageFormat} - * @param args arguments for the message - * @throws IllegalStateException if {@code actual} is not equal to - * {@code expected} - */ - public static void checkEqual(final Object actual, final Object expected, final String message, - final Object... args) { - if (!Objects.equals(actual, expected)) { - throw new IllegalStateException( - MessageFormat.format(message, args) + " Expected: " + actual + " equals " + expected); - } - } + /** + * Check that the given values are equal. The check is done with + * {@link Objects#equals(Object, Object)} + * + * @param actual the actual value + * @param expected the expected value + * @param message formatted with {@link MessageFormat} + * @param args arguments for the message + * @throws IllegalStateException if {@code actual} is not equal to + * {@code expected} + */ + public static void checkEqual(final Object actual, final Object expected, final String message, + final Object... args) { + if (!Objects.equals(actual, expected)) { + throw new IllegalStateException( + MessageFormat.format(message, args) + " Expected: " + actual + " equals " + expected); + } + } - /** - * Check that the given value is true. - * - * @param actual must be true - * @param message formatted with {@link MessageFormat} - * @param args arguments for the message - * @throws IllegalStateException if {@code actual} is not true - */ - public static void checkTrue(final boolean actual, final String message, final Object... args) { - checkEqual(actual, true, message, args); - } + /** + * Check that the given value is true. + * + * @param actual must be true + * @param message formatted with {@link MessageFormat} + * @param args arguments for the message + * @throws IllegalStateException if {@code actual} is not true + */ + public static void checkTrue(final boolean actual, final String message, final Object... args) { + checkEqual(actual, true, message, args); + } - /** - * Check that the given value is false. - * - * @param actual must be false - * @param message formatted with {@link MessageFormat} - * @param args arguments for the message - * @throws IllegalStateException if {@code actual} is not false - */ - public static void checkFalse(final boolean actual, final String message, final Object... args) { - checkEqual(actual, false, message, args); - } + /** + * Check that the given value is false. + * + * @param actual must be false + * @param message formatted with {@link MessageFormat} + * @param args arguments for the message + * @throws IllegalStateException if {@code actual} is not false + */ + public static void checkFalse(final boolean actual, final String message, final Object... args) { + checkEqual(actual, false, message, args); + } - public static void checkNull(final Object actual, final String message, final Object... args) { - if (actual != null) { - throw new IllegalStateException(MessageFormat.format(message, args)); - } - } + public static void checkNull(final Object actual, final String message, final Object... args) { + if (actual != null) { + throw new IllegalStateException(MessageFormat.format(message, args)); + } + } } diff --git a/pdb-utils/src/main/java/org/lucares/utils/cache/HotEntryCache.java b/pdb-utils/src/main/java/org/lucares/utils/cache/HotEntryCache.java index b4665b2..1ffbb61 100644 --- a/pdb-utils/src/main/java/org/lucares/utils/cache/HotEntryCache.java +++ b/pdb-utils/src/main/java/org/lucares/utils/cache/HotEntryCache.java @@ -43,521 +43,521 @@ import org.slf4j.LoggerFactory; */ public class HotEntryCache { - private static final Logger LOGGER = LoggerFactory.getLogger(HotEntryCache.class); - - public interface EventListener { - public void onRemove(K key, V value); - } - - public static class Event { - private final K key; - private final V value; - - public Event(final K key, final V value) { - super(); - this.key = key; - this.value = value; - } - - public K getKey() { - return key; - } - - public V getValue() { - return value; - } - - @Override - public String toString() { - return "Event [key=" + key + ", value=" + value + "]"; - } - } - - private final static class Entry { - private volatile long lastAccessed; - - private V value; - - public Entry(final V value, final long creationTime) { - this.value = value; - lastAccessed = creationTime; - } - - public V getValue() { - return value; - } - - public void setValue(final V value) { - this.value = value; - } - - public long getLastAccessed() { - return lastAccessed; - } - - public void touch(final long instant) { - lastAccessed = instant; - } - } - - private static final class TimeUpdaterThread extends Thread { - - private final WeakHashMap, Void> weakCaches = new WeakHashMap<>(); - private volatile long updateInterval = Duration.ofSeconds(1).toMillis(); - private final Object lock = new Object(); - - public TimeUpdaterThread() { - setDaemon(true); - setName("HotEntryCache-time"); - } - - public void addCache(final HotEntryCache cache) { - synchronized (lock) { - weakCaches.put(cache, null); - } - } - - public void setUpdateInterval(final long updateInterval) { - this.updateInterval = Math.max(updateInterval, 1); - interrupt(); - } - - @Override - public void run() { - while (true) { - try { - TimeUnit.MILLISECONDS.sleep(updateInterval); - } catch (final InterruptedException e) { - // interrupted: update the 'now' instants of all caches - } - try { - final Set> keySet = new HashSet<>(); - synchronized (lock) { - keySet.addAll(weakCaches.keySet()); - } - // LOGGER.trace("update time"); - for (final HotEntryCache cache : keySet) { - cache.updateTime(); - } - } catch (final ConcurrentModificationException e) { - // ignore: might happen if an entry in weakCaches is garbage collected - // while we are iterating - } - } - } - } - - private static final class EvictionThread extends Thread { - - private final WeakHashMap, Void> weakCaches = new WeakHashMap<>(); - private final AtomicReference> future = new AtomicReference<>(null); - private final Object lock = new Object(); - - private long minSleepPeriodInMs = Duration.ofSeconds(5).toMillis(); - private long maxSleepPeriodInMs = Duration.ofDays(1).toMillis(); - - public EvictionThread() { - setDaemon(true); - setName("HotEntryCache-eviction"); - } - - public void addCache(final HotEntryCache cache) { - synchronized (lock) { - weakCaches.put(cache, null); - } - } - - private long getMinSleepPeriod() { - return minSleepPeriodInMs; - } - - private long getMaxSleepPeriod() { - return maxSleepPeriodInMs; - } - - @Override - public void run() { - long timeToNextEviction = maxSleepPeriodInMs; - - while (true) { - sleepToNextEviction(timeToNextEviction); - - final CompletableFuture future = this.future.getAcquire(); - - try { - final long minNextEvictionTime = evictStaleEntries(); - - timeToNextEviction = normalizeDurationToNextEviction(minNextEvictionTime); - - if (future != null) { - this.future.set(null); - future.complete(null); - } - } catch (final ConcurrentModificationException e) { - // ignore: might happen if an entry in weakCaches is garbage collected - // while we are iterating - } - } - } - - private long normalizeDurationToNextEviction(final long minNextEvictionTime) { - long timeToNextEviction; - if (minNextEvictionTime != Long.MAX_VALUE) { - timeToNextEviction = minSleepPeriodInMs; - } else { - final long now = System.currentTimeMillis(); - timeToNextEviction = minNextEvictionTime - now; - } - return timeToNextEviction; - } - - private long evictStaleEntries() { - long minNextEvictionTime = Long.MAX_VALUE; - final Set> caches = new HashSet<>(); - synchronized (lock) { - caches.addAll(weakCaches.keySet()); - } - for (final HotEntryCache cache : caches) { - final long nextEvictionTime = cache.evict(); - minNextEvictionTime = Math.min(minNextEvictionTime, nextEvictionTime); - } - return minNextEvictionTime; - } - - private void sleepToNextEviction(final long timeToNextEviction) { - try { - final long timeToSleep = Math.min(timeToNextEviction, maxSleepPeriodInMs); - final long timeToSleepMS = Math.max(timeToSleep, minSleepPeriodInMs); - LOGGER.trace("sleeping {}ms", timeToSleepMS); - TimeUnit.MILLISECONDS.sleep(timeToSleepMS); - } catch (final InterruptedException e) { - // interrupted: evict stale elements from all caches and compute the delay until - // the next check - } - } - - public void nextEvictionChanged() { - interrupt(); - } - - Future nextEvictionChangedWithFuture() { - final CompletableFuture result = new CompletableFuture<>(); - final boolean hasBeenSet = this.future.compareAndSet(null, result); - if (!hasBeenSet) { - throw new IllegalStateException( - "Future was already set. This method is expected to be called only in tests and only one at a time."); - } - - interrupt(); - - return result; - } - - public void setMinSleepPeriod(final long minSleepPeriodInMs) { - this.minSleepPeriodInMs = minSleepPeriodInMs; - } - - public void setMaxSleepPeriod(final long maxSleepPeriodInMs) { - this.maxSleepPeriodInMs = maxSleepPeriodInMs; - } - } - - private static final EvictionThread EVICTER = new EvictionThread(); - - private static final TimeUpdaterThread TIME_UPDATER = new TimeUpdaterThread(); - - static { - EVICTER.start(); - TIME_UPDATER.start(); - } - - /** - * Mapping of the key to the value. - *

- * The value is stored together with the last access time. - */ - private final ConcurrentHashMap> cache = new ConcurrentHashMap<>(); - - private final CopyOnWriteArrayList> listeners = new CopyOnWriteArrayList<>(); - - private final long timeToLive; - - private volatile long now = 0; - - private Clock clock; - - private final String name; - - private int maxSize; - - private long nextProbableEvictionTime; - - HotEntryCache(final Duration timeToLive, final int maxSize, final Clock clock, final String name) { - this.timeToLive = timeToLive.toMillis(); - this.maxSize = maxSize; - this.clock = clock; - this.name = name; - now = clock.millis(); - - EVICTER.addCache(this); - TIME_UPDATER.addCache(this); - } - - HotEntryCache(final Duration timeToLive, final int maxSize, final Clock clock) { - this(timeToLive, maxSize, clock, UUID.randomUUID().toString()); - } - - public HotEntryCache(final Duration timeToLive, final int maxSize, final String name) { - this(timeToLive, maxSize, Clock.systemDefaultZone(), name); - } - - public HotEntryCache(final Duration timeToLive, final int maxSize) { - this(timeToLive, maxSize, Clock.systemDefaultZone(), UUID.randomUUID().toString()); - } - - public int size() { - return cache.size(); - } - - public String getName() { - return name; - } - - public void addListener(final EventListener listener) { - listeners.add(listener); - } - - static void setMinSleepPeriod(final long minSleepPeriodInMs) { - EVICTER.setMinSleepPeriod(minSleepPeriodInMs); - TIME_UPDATER.setUpdateInterval(minSleepPeriodInMs / 2); - } - - static void setMaxSleepPeriod(final long maxSleepPeriodInMs) { - EVICTER.setMaxSleepPeriod(maxSleepPeriodInMs); - TIME_UPDATER.setUpdateInterval(EVICTER.getMinSleepPeriod()); - } - - static long getMinSleepPeriod() { - return EVICTER.getMinSleepPeriod(); - } - - static long getMaxSleepPeriod() { - return EVICTER.getMaxSleepPeriod(); - } - - public V get(final K key) { - final Entry entry = cache.computeIfPresent(key, (k, e) -> { - touch(key, e); - return e; - }); - return entry != null ? entry.getValue() : null; - } - - public V put(final K key, final V value) { - - removeEldestIfCacheTooBig(); - - final boolean wasEmptyBefore = cache.isEmpty(); - final AtomicReference oldValueAtomicReference = new AtomicReference<>(); - cache.compute(key, (k, oldEntry) -> { - final V oldValue = oldEntry != null ? oldEntry.getValue() : null; - oldValueAtomicReference.set(oldValue); - - final Entry entry; - if (oldEntry != null) { - oldEntry.setValue(value); - entry = oldEntry; - } else { - final long creationTime = now(); - entry = new Entry<>(value, creationTime); - nextProbableEvictionTime = Math.min(nextProbableEvictionTime, now + timeToLive); - } - touch(k, entry); - return entry; - }); - - if (wasEmptyBefore) { - // The eviction thread sleeps very long if there are no elements. - // We have to wake it, so that it can compute a new time to sleep. - EVICTER.nextEvictionChanged(); - } - return oldValueAtomicReference.get(); - } - - /** - * Puts the value supplied by the mappingFunction, if the key does not already - * exist in the map. The operation is done atomically, that is the function is - * executed at most once. This method is blocking while other threads are - * computing the mapping function. Therefore the computation should be short and - * simple. - * - * @param key key of the value - * @param mappingFunction a function that returns the value that should be - * inserted - * @return the newly inserted or existing value, or null if - * {@code mappingFunction} returned {@code null} - */ - public V putIfAbsent(final K key, final Function mappingFunction) { - - removeEldestIfCacheTooBig(); - - final boolean wasEmptyBefore = cache.isEmpty(); - final Entry entry = cache.computeIfAbsent(key, (k) -> { - final V value = mappingFunction.apply(k); - final long creationTime = now; - final Entry e = new Entry<>(value, creationTime); - nextProbableEvictionTime = Math.min(nextProbableEvictionTime, now + timeToLive); - touch(key, e); - return e; - }); - if (wasEmptyBefore) { - // The eviction thread sleeps very long if there are no elements. - // We have to wake it, so that it can compute a new time to sleep. - EVICTER.nextEvictionChanged(); - } - - return entry != null ? entry.getValue() : null; - } - - public V remove(final K key) { - - final AtomicReference> oldValue = new AtomicReference<>(); - cache.computeIfPresent(key, (k, e) -> { - oldValue.set(e); - handleEvent(k, e.getValue()); - return null; - }); - return oldValue.get() != null ? oldValue.get().getValue() : null; - } - - public void clear() { - for (final K key : cache.keySet()) { - remove(key); - } - } - - public void forEach(final Consumer consumer) { - - cache.forEachEntry(Long.MAX_VALUE, entry -> { - touch(entry.getKey(), entry.getValue()); - consumer.accept(entry.getValue().getValue()); - }); - } - - private void removeEldestIfCacheTooBig() { - if (cache.size() >= maxSize) { - removeEldest(); - } - } - - private synchronized void removeEldest() { - - if (cache.size() >= maxSize) { - final LongList lastAccessTimes = new LongList(cache.size()); - for (final java.util.Map.Entry> mapEntry : cache.entrySet()) { - final Entry entry = mapEntry.getValue(); - final long lastAccessed = entry.getLastAccessed(); - lastAccessTimes.add(lastAccessed); - } - - lastAccessTimes.sort(); - - final int numEntriesToRemove = Math.max((int) (maxSize * 0.2), 1); - - final long oldestValuesToKeep = lastAccessTimes.get(numEntriesToRemove - 1) + 1; - nextProbableEvictionTime = evictInternal(oldestValuesToKeep, numEntriesToRemove); - } - } - - private long evict() { - final long now = now(); - - if (nextProbableEvictionTime <= now) { - - final long oldestValuesToKeep = now - timeToLive; - - nextProbableEvictionTime = evictInternal(oldestValuesToKeep, Integer.MAX_VALUE); - } else { - LOGGER.trace("{}: skip eviction - next eviction at {} (now: {})", name, nextProbableEvictionTime, now); - } - return nextProbableEvictionTime; - } - - private long evictInternal(final long oldestValuesToKeep, final int maxEntriesToRemove) { - - LOGGER.trace("{}: cache size before eviction {}", name, cache.size()); - - long oldestAccessTime = Long.MAX_VALUE; - final AtomicInteger removedEntries = new AtomicInteger(); - - for (final java.util.Map.Entry> mapEntry : cache.entrySet()) { - final Entry entry = mapEntry.getValue(); - final long lastAccessed = entry.getLastAccessed(); - oldestAccessTime = Math.min(oldestAccessTime, lastAccessed); - - if (removedEntries.get() >= maxEntriesToRemove) { - // finish iterating over all entries so that this method return the correct - // nextEvictionTime - continue; - } - - if (lastAccessed >= oldestValuesToKeep) { - continue; - } - - final K keyToBeRemoved = mapEntry.getKey(); - - cache.computeIfPresent(keyToBeRemoved, (k, e) -> { - - if (entry.getLastAccessed() < oldestValuesToKeep) { - removedEntries.incrementAndGet(); - handleEvent(k, e.getValue()); - return null; - } - return e; - }); - - } - LOGGER.trace("{}: cache size after eviction {}", name, cache.size()); - - final long nextEvictionTime = oldestAccessTime == Long.MAX_VALUE ? Long.MAX_VALUE - : oldestAccessTime + timeToLive; - return nextEvictionTime; - } - - private long now() { - return now; - } - - // visible for test - void updateTime() { - now = clock.millis(); - } + private static final Logger LOGGER = LoggerFactory.getLogger(HotEntryCache.class); + + public interface EventListener { + public void onRemove(K key, V value); + } + + public static class Event { + private final K key; + private final V value; + + public Event(final K key, final V value) { + super(); + this.key = key; + this.value = value; + } + + public K getKey() { + return key; + } + + public V getValue() { + return value; + } + + @Override + public String toString() { + return "Event [key=" + key + ", value=" + value + "]"; + } + } + + private final static class Entry { + private volatile long lastAccessed; + + private V value; + + public Entry(final V value, final long creationTime) { + this.value = value; + lastAccessed = creationTime; + } + + public V getValue() { + return value; + } + + public void setValue(final V value) { + this.value = value; + } + + public long getLastAccessed() { + return lastAccessed; + } + + public void touch(final long instant) { + lastAccessed = instant; + } + } + + private static final class TimeUpdaterThread extends Thread { + + private final WeakHashMap, Void> weakCaches = new WeakHashMap<>(); + private volatile long updateInterval = Duration.ofSeconds(1).toMillis(); + private final Object lock = new Object(); + + public TimeUpdaterThread() { + setDaemon(true); + setName("HotEntryCache-time"); + } + + public void addCache(final HotEntryCache cache) { + synchronized (lock) { + weakCaches.put(cache, null); + } + } + + public void setUpdateInterval(final long updateInterval) { + this.updateInterval = Math.max(updateInterval, 1); + interrupt(); + } + + @Override + public void run() { + while (true) { + try { + TimeUnit.MILLISECONDS.sleep(updateInterval); + } catch (final InterruptedException e) { + // interrupted: update the 'now' instants of all caches + } + try { + final Set> keySet = new HashSet<>(); + synchronized (lock) { + keySet.addAll(weakCaches.keySet()); + } + // LOGGER.trace("update time"); + for (final HotEntryCache cache : keySet) { + cache.updateTime(); + } + } catch (final ConcurrentModificationException e) { + // ignore: might happen if an entry in weakCaches is garbage collected + // while we are iterating + } + } + } + } + + private static final class EvictionThread extends Thread { + + private final WeakHashMap, Void> weakCaches = new WeakHashMap<>(); + private final AtomicReference> future = new AtomicReference<>(null); + private final Object lock = new Object(); + + private long minSleepPeriodInMs = Duration.ofSeconds(5).toMillis(); + private long maxSleepPeriodInMs = Duration.ofDays(1).toMillis(); + + public EvictionThread() { + setDaemon(true); + setName("HotEntryCache-eviction"); + } + + public void addCache(final HotEntryCache cache) { + synchronized (lock) { + weakCaches.put(cache, null); + } + } + + private long getMinSleepPeriod() { + return minSleepPeriodInMs; + } + + private long getMaxSleepPeriod() { + return maxSleepPeriodInMs; + } + + @Override + public void run() { + long timeToNextEviction = maxSleepPeriodInMs; + + while (true) { + sleepToNextEviction(timeToNextEviction); + + final CompletableFuture future = this.future.getAcquire(); + + try { + final long minNextEvictionTime = evictStaleEntries(); + + timeToNextEviction = normalizeDurationToNextEviction(minNextEvictionTime); + + if (future != null) { + this.future.set(null); + future.complete(null); + } + } catch (final ConcurrentModificationException e) { + // ignore: might happen if an entry in weakCaches is garbage collected + // while we are iterating + } + } + } + + private long normalizeDurationToNextEviction(final long minNextEvictionTime) { + long timeToNextEviction; + if (minNextEvictionTime != Long.MAX_VALUE) { + timeToNextEviction = minSleepPeriodInMs; + } else { + final long now = System.currentTimeMillis(); + timeToNextEviction = minNextEvictionTime - now; + } + return timeToNextEviction; + } + + private long evictStaleEntries() { + long minNextEvictionTime = Long.MAX_VALUE; + final Set> caches = new HashSet<>(); + synchronized (lock) { + caches.addAll(weakCaches.keySet()); + } + for (final HotEntryCache cache : caches) { + final long nextEvictionTime = cache.evict(); + minNextEvictionTime = Math.min(minNextEvictionTime, nextEvictionTime); + } + return minNextEvictionTime; + } + + private void sleepToNextEviction(final long timeToNextEviction) { + try { + final long timeToSleep = Math.min(timeToNextEviction, maxSleepPeriodInMs); + final long timeToSleepMS = Math.max(timeToSleep, minSleepPeriodInMs); + LOGGER.trace("sleeping {}ms", timeToSleepMS); + TimeUnit.MILLISECONDS.sleep(timeToSleepMS); + } catch (final InterruptedException e) { + // interrupted: evict stale elements from all caches and compute the delay until + // the next check + } + } + + public void nextEvictionChanged() { + interrupt(); + } + + Future nextEvictionChangedWithFuture() { + final CompletableFuture result = new CompletableFuture<>(); + final boolean hasBeenSet = this.future.compareAndSet(null, result); + if (!hasBeenSet) { + throw new IllegalStateException( + "Future was already set. This method is expected to be called only in tests and only one at a time."); + } + + interrupt(); + + return result; + } + + public void setMinSleepPeriod(final long minSleepPeriodInMs) { + this.minSleepPeriodInMs = minSleepPeriodInMs; + } + + public void setMaxSleepPeriod(final long maxSleepPeriodInMs) { + this.maxSleepPeriodInMs = maxSleepPeriodInMs; + } + } + + private static final EvictionThread EVICTER = new EvictionThread(); + + private static final TimeUpdaterThread TIME_UPDATER = new TimeUpdaterThread(); + + static { + EVICTER.start(); + TIME_UPDATER.start(); + } + + /** + * Mapping of the key to the value. + *

+ * The value is stored together with the last access time. + */ + private final ConcurrentHashMap> cache = new ConcurrentHashMap<>(); + + private final CopyOnWriteArrayList> listeners = new CopyOnWriteArrayList<>(); + + private final long timeToLive; + + private volatile long now = 0; + + private Clock clock; + + private final String name; + + private int maxSize; + + private long nextProbableEvictionTime; + + HotEntryCache(final Duration timeToLive, final int maxSize, final Clock clock, final String name) { + this.timeToLive = timeToLive.toMillis(); + this.maxSize = maxSize; + this.clock = clock; + this.name = name; + now = clock.millis(); + + EVICTER.addCache(this); + TIME_UPDATER.addCache(this); + } + + HotEntryCache(final Duration timeToLive, final int maxSize, final Clock clock) { + this(timeToLive, maxSize, clock, UUID.randomUUID().toString()); + } + + public HotEntryCache(final Duration timeToLive, final int maxSize, final String name) { + this(timeToLive, maxSize, Clock.systemDefaultZone(), name); + } + + public HotEntryCache(final Duration timeToLive, final int maxSize) { + this(timeToLive, maxSize, Clock.systemDefaultZone(), UUID.randomUUID().toString()); + } + + public int size() { + return cache.size(); + } + + public String getName() { + return name; + } + + public void addListener(final EventListener listener) { + listeners.add(listener); + } + + static void setMinSleepPeriod(final long minSleepPeriodInMs) { + EVICTER.setMinSleepPeriod(minSleepPeriodInMs); + TIME_UPDATER.setUpdateInterval(minSleepPeriodInMs / 2); + } + + static void setMaxSleepPeriod(final long maxSleepPeriodInMs) { + EVICTER.setMaxSleepPeriod(maxSleepPeriodInMs); + TIME_UPDATER.setUpdateInterval(EVICTER.getMinSleepPeriod()); + } + + static long getMinSleepPeriod() { + return EVICTER.getMinSleepPeriod(); + } + + static long getMaxSleepPeriod() { + return EVICTER.getMaxSleepPeriod(); + } + + public V get(final K key) { + final Entry entry = cache.computeIfPresent(key, (k, e) -> { + touch(key, e); + return e; + }); + return entry != null ? entry.getValue() : null; + } + + public V put(final K key, final V value) { + + removeEldestIfCacheTooBig(); + + final boolean wasEmptyBefore = cache.isEmpty(); + final AtomicReference oldValueAtomicReference = new AtomicReference<>(); + cache.compute(key, (k, oldEntry) -> { + final V oldValue = oldEntry != null ? oldEntry.getValue() : null; + oldValueAtomicReference.set(oldValue); + + final Entry entry; + if (oldEntry != null) { + oldEntry.setValue(value); + entry = oldEntry; + } else { + final long creationTime = now(); + entry = new Entry<>(value, creationTime); + nextProbableEvictionTime = Math.min(nextProbableEvictionTime, now + timeToLive); + } + touch(k, entry); + return entry; + }); + + if (wasEmptyBefore) { + // The eviction thread sleeps very long if there are no elements. + // We have to wake it, so that it can compute a new time to sleep. + EVICTER.nextEvictionChanged(); + } + return oldValueAtomicReference.get(); + } + + /** + * Puts the value supplied by the mappingFunction, if the key does not already + * exist in the map. The operation is done atomically, that is the function is + * executed at most once. This method is blocking while other threads are + * computing the mapping function. Therefore the computation should be short and + * simple. + * + * @param key key of the value + * @param mappingFunction a function that returns the value that should be + * inserted + * @return the newly inserted or existing value, or null if + * {@code mappingFunction} returned {@code null} + */ + public V putIfAbsent(final K key, final Function mappingFunction) { + + removeEldestIfCacheTooBig(); + + final boolean wasEmptyBefore = cache.isEmpty(); + final Entry entry = cache.computeIfAbsent(key, (k) -> { + final V value = mappingFunction.apply(k); + final long creationTime = now; + final Entry e = new Entry<>(value, creationTime); + nextProbableEvictionTime = Math.min(nextProbableEvictionTime, now + timeToLive); + touch(key, e); + return e; + }); + if (wasEmptyBefore) { + // The eviction thread sleeps very long if there are no elements. + // We have to wake it, so that it can compute a new time to sleep. + EVICTER.nextEvictionChanged(); + } + + return entry != null ? entry.getValue() : null; + } + + public V remove(final K key) { + + final AtomicReference> oldValue = new AtomicReference<>(); + cache.computeIfPresent(key, (k, e) -> { + oldValue.set(e); + handleEvent(k, e.getValue()); + return null; + }); + return oldValue.get() != null ? oldValue.get().getValue() : null; + } + + public void clear() { + for (final K key : cache.keySet()) { + remove(key); + } + } + + public void forEach(final Consumer consumer) { + + cache.forEachEntry(Long.MAX_VALUE, entry -> { + touch(entry.getKey(), entry.getValue()); + consumer.accept(entry.getValue().getValue()); + }); + } + + private void removeEldestIfCacheTooBig() { + if (cache.size() >= maxSize) { + removeEldest(); + } + } + + private synchronized void removeEldest() { + + if (cache.size() >= maxSize) { + final LongList lastAccessTimes = new LongList(cache.size()); + for (final java.util.Map.Entry> mapEntry : cache.entrySet()) { + final Entry entry = mapEntry.getValue(); + final long lastAccessed = entry.getLastAccessed(); + lastAccessTimes.add(lastAccessed); + } + + lastAccessTimes.sort(); + + final int numEntriesToRemove = Math.max((int) (maxSize * 0.2), 1); + + final long oldestValuesToKeep = lastAccessTimes.get(numEntriesToRemove - 1) + 1; + nextProbableEvictionTime = evictInternal(oldestValuesToKeep, numEntriesToRemove); + } + } + + private long evict() { + final long now = now(); + + if (nextProbableEvictionTime <= now) { + + final long oldestValuesToKeep = now - timeToLive; + + nextProbableEvictionTime = evictInternal(oldestValuesToKeep, Integer.MAX_VALUE); + } else { + LOGGER.trace("{}: skip eviction - next eviction at {} (now: {})", name, nextProbableEvictionTime, now); + } + return nextProbableEvictionTime; + } + + private long evictInternal(final long oldestValuesToKeep, final int maxEntriesToRemove) { + + LOGGER.trace("{}: cache size before eviction {}", name, cache.size()); + + long oldestAccessTime = Long.MAX_VALUE; + final AtomicInteger removedEntries = new AtomicInteger(); + + for (final java.util.Map.Entry> mapEntry : cache.entrySet()) { + final Entry entry = mapEntry.getValue(); + final long lastAccessed = entry.getLastAccessed(); + oldestAccessTime = Math.min(oldestAccessTime, lastAccessed); + + if (removedEntries.get() >= maxEntriesToRemove) { + // finish iterating over all entries so that this method return the correct + // nextEvictionTime + continue; + } + + if (lastAccessed >= oldestValuesToKeep) { + continue; + } + + final K keyToBeRemoved = mapEntry.getKey(); + + cache.computeIfPresent(keyToBeRemoved, (k, e) -> { + + if (entry.getLastAccessed() < oldestValuesToKeep) { + removedEntries.incrementAndGet(); + handleEvent(k, e.getValue()); + return null; + } + return e; + }); + + } + LOGGER.trace("{}: cache size after eviction {}", name, cache.size()); + + final long nextEvictionTime = oldestAccessTime == Long.MAX_VALUE ? Long.MAX_VALUE + : oldestAccessTime + timeToLive; + return nextEvictionTime; + } + + private long now() { + return now; + } + + // visible for test + void updateTime() { + now = clock.millis(); + } - private void touch(final K key, final Entry entry) { - if (entry != null) { - final long now = now(); - entry.touch(now); - - } - } + private void touch(final K key, final Entry entry) { + if (entry != null) { + final long now = now(); + entry.touch(now); + + } + } - private void handleEvent(final K key, final V value) { - for (final EventListener eventSubscribers : listeners) { - - eventSubscribers.onRemove(key, value); + private void handleEvent(final K key, final V value) { + for (final EventListener eventSubscribers : listeners) { + + eventSubscribers.onRemove(key, value); - } - } + } + } - // visible for test - void triggerEvictionAndWait() { - updateTime(); - final Future future = EVICTER.nextEvictionChangedWithFuture(); - try { - future.get(5, TimeUnit.MINUTES); - } catch (InterruptedException | ExecutionException | TimeoutException e) { - throw new IllegalStateException("Error while waiting for eviction thread to finish", e); - } - } + // visible for test + void triggerEvictionAndWait() { + updateTime(); + final Future future = EVICTER.nextEvictionChangedWithFuture(); + try { + future.get(5, TimeUnit.MINUTES); + } catch (InterruptedException | ExecutionException | TimeoutException e) { + throw new IllegalStateException("Error while waiting for eviction thread to finish", e); + } + } } diff --git a/pdb-utils/src/main/java/org/lucares/utils/cache/LRUCache.java b/pdb-utils/src/main/java/org/lucares/utils/cache/LRUCache.java index afee1db..24221bd 100644 --- a/pdb-utils/src/main/java/org/lucares/utils/cache/LRUCache.java +++ b/pdb-utils/src/main/java/org/lucares/utils/cache/LRUCache.java @@ -4,32 +4,32 @@ import java.util.LinkedHashMap; import java.util.Map; public class LRUCache { - private final LinkedHashMap cache; + private final LinkedHashMap cache; - public LRUCache(final int maxEntries) { - this.cache = new LinkedHashMap<>(16, 0.75f, true) { - private static final long serialVersionUID = 1L; + public LRUCache(final int maxEntries) { + this.cache = new LinkedHashMap<>(16, 0.75f, true) { + private static final long serialVersionUID = 1L; - protected boolean removeEldestEntry(final Map.Entry eldest) { - return size() > maxEntries; - } - }; - } + protected boolean removeEldestEntry(final Map.Entry eldest) { + return size() > maxEntries; + } + }; + } - public V put(final K key, final V value) { - return cache.put(key, value); - } + public V put(final K key, final V value) { + return cache.put(key, value); + } - public V get(final K key) { - return cache.get(key); - } + public V get(final K key) { + return cache.get(key); + } - public V remove(final K key) { - return cache.remove(key); - } + public V remove(final K key) { + return cache.remove(key); + } - public int size() { - return cache.size(); - } + public int size() { + return cache.size(); + } } diff --git a/pdb-utils/src/main/java/org/lucares/utils/cache/RuntimeExcecutionException.java b/pdb-utils/src/main/java/org/lucares/utils/cache/RuntimeExcecutionException.java index 5e0201a..c3088a0 100644 --- a/pdb-utils/src/main/java/org/lucares/utils/cache/RuntimeExcecutionException.java +++ b/pdb-utils/src/main/java/org/lucares/utils/cache/RuntimeExcecutionException.java @@ -4,10 +4,10 @@ import java.util.concurrent.ExecutionException; public class RuntimeExcecutionException extends RuntimeException { - private static final long serialVersionUID = -3626851728980513527L; + private static final long serialVersionUID = -3626851728980513527L; - public RuntimeExcecutionException(final ExecutionException e) { - super(e); - } + public RuntimeExcecutionException(final ExecutionException e) { + super(e); + } } diff --git a/pdb-utils/src/test/java/org/lucares/utils/cache/HotEntryCacheTest.java b/pdb-utils/src/test/java/org/lucares/utils/cache/HotEntryCacheTest.java index 862d310..f5d21c7 100644 --- a/pdb-utils/src/test/java/org/lucares/utils/cache/HotEntryCacheTest.java +++ b/pdb-utils/src/test/java/org/lucares/utils/cache/HotEntryCacheTest.java @@ -26,380 +26,380 @@ import org.testng.annotations.Test; @Test public class HotEntryCacheTest { - static { - Configurator.setRootLevel(Level.TRACE); - } + static { + Configurator.setRootLevel(Level.TRACE); + } - private static final Logger LOGGER = LoggerFactory.getLogger(HotEntryCacheTest.class); + private static final Logger LOGGER = LoggerFactory.getLogger(HotEntryCacheTest.class); - private int cacheId = 0; + private int cacheId = 0; - @Test(invocationCount = 1) - public void testRemovalListenerCalledOnExpire() throws InterruptedException { + @Test(invocationCount = 1) + public void testRemovalListenerCalledOnExpire() throws InterruptedException { - LOGGER.info(""); - LOGGER.info(""); - LOGGER.info("start: testRemovalListenerCalledOnExpire"); + LOGGER.info(""); + LOGGER.info(""); + LOGGER.info("start: testRemovalListenerCalledOnExpire"); - final long originalMinSleepPeriod = HotEntryCache.getMinSleepPeriod(); - final long originalMaxSleepPeriod = HotEntryCache.getMaxSleepPeriod(); - try { - final String key = "key"; - final String value = "value"; - final CountDownLatch latch = new CountDownLatch(1); + final long originalMinSleepPeriod = HotEntryCache.getMinSleepPeriod(); + final long originalMaxSleepPeriod = HotEntryCache.getMaxSleepPeriod(); + try { + final String key = "key"; + final String value = "value"; + final CountDownLatch latch = new CountDownLatch(1); - final HotEntryCache cache = new HotEntryCache<>(Duration.ofMillis(1), 10, - "cache-" + ++cacheId); - HotEntryCache.setMinSleepPeriod(1); - HotEntryCache.setMaxSleepPeriod(2); - cache.addListener((k, v) -> { - Assert.assertEquals(k, key); - Assert.assertEquals(v, value); - latch.countDown(); - }); + final HotEntryCache cache = new HotEntryCache<>(Duration.ofMillis(1), 10, + "cache-" + ++cacheId); + HotEntryCache.setMinSleepPeriod(1); + HotEntryCache.setMaxSleepPeriod(2); + cache.addListener((k, v) -> { + Assert.assertEquals(k, key); + Assert.assertEquals(v, value); + latch.countDown(); + }); - cache.put(key, value); - final boolean listenerCalled = latch.await(100, TimeUnit.MILLISECONDS); - Assert.assertTrue(listenerCalled, "removal listener called"); - } finally { - HotEntryCache.setMinSleepPeriod(originalMinSleepPeriod); - HotEntryCache.setMaxSleepPeriod(originalMaxSleepPeriod); - } - } + cache.put(key, value); + final boolean listenerCalled = latch.await(100, TimeUnit.MILLISECONDS); + Assert.assertTrue(listenerCalled, "removal listener called"); + } finally { + HotEntryCache.setMinSleepPeriod(originalMinSleepPeriod); + HotEntryCache.setMaxSleepPeriod(originalMaxSleepPeriod); + } + } - public void testPutAndGet() throws InterruptedException, ExecutionException, TimeoutException { - final HotEntryCache cache = new HotEntryCache<>(Duration.ofSeconds(10), 10); + public void testPutAndGet() throws InterruptedException, ExecutionException, TimeoutException { + final HotEntryCache cache = new HotEntryCache<>(Duration.ofSeconds(10), 10); - final String replacedNull = cache.put("key", "value1"); - Assert.assertEquals(replacedNull, null); + final String replacedNull = cache.put("key", "value1"); + Assert.assertEquals(replacedNull, null); - final String cachedValue1 = cache.get("key"); - Assert.assertEquals(cachedValue1, "value1"); + final String cachedValue1 = cache.get("key"); + Assert.assertEquals(cachedValue1, "value1"); - final String replacedValue1 = cache.put("key", "value2"); - Assert.assertEquals(replacedValue1, "value1"); + final String replacedValue1 = cache.put("key", "value2"); + Assert.assertEquals(replacedValue1, "value1"); - final String cachedValue2 = cache.get("key"); - Assert.assertEquals(cachedValue2, "value2"); - } + final String cachedValue2 = cache.get("key"); + Assert.assertEquals(cachedValue2, "value2"); + } - public void testPutTouches() throws InterruptedException, ExecutionException, TimeoutException { - final ModifiableFixedTimeClock clock = new ModifiableFixedTimeClock(); - final Duration timeToLive = Duration.ofSeconds(10); - final HotEntryCache cache = new HotEntryCache<>(timeToLive, 10, clock); + public void testPutTouches() throws InterruptedException, ExecutionException, TimeoutException { + final ModifiableFixedTimeClock clock = new ModifiableFixedTimeClock(); + final Duration timeToLive = Duration.ofSeconds(10); + final HotEntryCache cache = new HotEntryCache<>(timeToLive, 10, clock); - cache.put("key", "value1"); + cache.put("key", "value1"); - clock.plusSeconds(2); - cache.updateTime(); + clock.plusSeconds(2); + cache.updateTime(); - cache.put("key", "value2"); + cache.put("key", "value2"); - clock.plus(timeToLive.minusSeconds(1)); - cache.triggerEvictionAndWait(); - // at this point the entry would have been evicted it it was not touched by the - // second put. + clock.plus(timeToLive.minusSeconds(1)); + cache.triggerEvictionAndWait(); + // at this point the entry would have been evicted it it was not touched by the + // second put. - final String cachedValue2 = cache.get("key"); - Assert.assertEquals(cachedValue2, "value2"); + final String cachedValue2 = cache.get("key"); + Assert.assertEquals(cachedValue2, "value2"); - clock.plus(timeToLive.plusSeconds(1)); - // time elapsed since the last put: timeToLive +1s - cache.triggerEvictionAndWait(); + clock.plus(timeToLive.plusSeconds(1)); + // time elapsed since the last put: timeToLive +1s + cache.triggerEvictionAndWait(); - final String cachedValue1_evicted = cache.get("key"); - Assert.assertEquals(cachedValue1_evicted, null); - } + final String cachedValue1_evicted = cache.get("key"); + Assert.assertEquals(cachedValue1_evicted, null); + } - public void testGetTouches() throws Exception { - final ModifiableFixedTimeClock clock = new ModifiableFixedTimeClock(); - final Duration timeToLive = Duration.ofSeconds(10); - final HotEntryCache cache = new HotEntryCache<>(timeToLive, 10, clock); + public void testGetTouches() throws Exception { + final ModifiableFixedTimeClock clock = new ModifiableFixedTimeClock(); + final Duration timeToLive = Duration.ofSeconds(10); + final HotEntryCache cache = new HotEntryCache<>(timeToLive, 10, clock); - cache.put("key", "value1"); + cache.put("key", "value1"); - // skip forward in time, but do not yet trigger eviction - clock.plus(timeToLive.plusMillis(1)); - cache.updateTime(); + // skip forward in time, but do not yet trigger eviction + clock.plus(timeToLive.plusMillis(1)); + cache.updateTime(); - cache.get("key"); // will touch the entry + cache.get("key"); // will touch the entry - cache.triggerEvictionAndWait(); // if get didn't touch, then this will evict the entry + cache.triggerEvictionAndWait(); // if get didn't touch, then this will evict the entry - final String cachedValue1 = cache.get("key"); - Assert.assertEquals(cachedValue1, "value1"); - } + final String cachedValue1 = cache.get("key"); + Assert.assertEquals(cachedValue1, "value1"); + } - public void testEvictionByBackgroundThread() throws InterruptedException, ExecutionException, TimeoutException { - final ModifiableFixedTimeClock clock = new ModifiableFixedTimeClock(); - final Duration timeToLive = Duration.ofSeconds(10); - final HotEntryCache cache = new HotEntryCache<>(timeToLive, 10, clock); + public void testEvictionByBackgroundThread() throws InterruptedException, ExecutionException, TimeoutException { + final ModifiableFixedTimeClock clock = new ModifiableFixedTimeClock(); + final Duration timeToLive = Duration.ofSeconds(10); + final HotEntryCache cache = new HotEntryCache<>(timeToLive, 10, clock); - final CompletableFuture evictionEventFuture = new CompletableFuture<>(); - cache.addListener((key, value) -> { - evictionEventFuture.complete(value); - }); + final CompletableFuture evictionEventFuture = new CompletableFuture<>(); + cache.addListener((key, value) -> { + evictionEventFuture.complete(value); + }); - cache.put("key", "value1"); + cache.put("key", "value1"); - clock.plus(timeToLive.minusSeconds(1)); - cache.updateTime(); + clock.plus(timeToLive.minusSeconds(1)); + cache.updateTime(); - cache.put("key2", "value2"); - clock.plus(Duration.ofSeconds(1).plusMillis(1)); - cache.triggerEvictionAndWait(); + cache.put("key2", "value2"); + clock.plus(Duration.ofSeconds(1).plusMillis(1)); + cache.triggerEvictionAndWait(); - final String evictedValue1 = evictionEventFuture.get(5, TimeUnit.MINUTES); // enough time for debugging - Assert.assertEquals(evictedValue1, "value1"); - } + final String evictedValue1 = evictionEventFuture.get(5, TimeUnit.MINUTES); // enough time for debugging + Assert.assertEquals(evictedValue1, "value1"); + } - public void testRemove() throws InterruptedException, ExecutionException, TimeoutException { - LOGGER.info(""); - LOGGER.info(""); - LOGGER.info("start: testRemove"); + public void testRemove() throws InterruptedException, ExecutionException, TimeoutException { + LOGGER.info(""); + LOGGER.info(""); + LOGGER.info("start: testRemove"); - final HotEntryCache cache = new HotEntryCache<>(Duration.ofSeconds(10), 10); + final HotEntryCache cache = new HotEntryCache<>(Duration.ofSeconds(10), 10); - final List removedValues = new ArrayList<>(); - cache.addListener((key, value) -> removedValues.add(value)); + final List removedValues = new ArrayList<>(); + cache.addListener((key, value) -> removedValues.add(value)); - cache.put("key", "value1"); + cache.put("key", "value1"); - final String removedValue = cache.remove("key"); - Assert.assertEquals(removedValue, "value1"); + final String removedValue = cache.remove("key"); + Assert.assertEquals(removedValue, "value1"); - Assert.assertEquals(removedValues, Arrays.asList("value1")); + Assert.assertEquals(removedValues, Arrays.asList("value1")); - Assert.assertEquals(cache.get("key"), null); - } + Assert.assertEquals(cache.get("key"), null); + } - public void testClear() throws InterruptedException, ExecutionException, TimeoutException { - final HotEntryCache cache = new HotEntryCache<>(Duration.ofSeconds(10), 10); + public void testClear() throws InterruptedException, ExecutionException, TimeoutException { + final HotEntryCache cache = new HotEntryCache<>(Duration.ofSeconds(10), 10); - final List removedValues = new ArrayList<>(); - cache.addListener((key, value) -> removedValues.add(value)); + final List removedValues = new ArrayList<>(); + cache.addListener((key, value) -> removedValues.add(value)); - cache.put("key1", "value1"); - cache.put("key2", "value2"); + cache.put("key1", "value1"); + cache.put("key2", "value2"); - cache.clear(); + cache.clear(); - Assert.assertEquals(cache.get("key1"), null); - Assert.assertEquals(cache.get("key2"), null); + Assert.assertEquals(cache.get("key1"), null); + Assert.assertEquals(cache.get("key2"), null); - Assert.assertEquals(removedValues, Arrays.asList("value1", "value2")); - } + Assert.assertEquals(removedValues, Arrays.asList("value1", "value2")); + } - public void testForEachTouches() throws InterruptedException, ExecutionException, TimeoutException { - final ModifiableFixedTimeClock clock = new ModifiableFixedTimeClock(); - final Duration timeToLive = Duration.ofSeconds(10); - final HotEntryCache cache = new HotEntryCache<>(timeToLive, 10, clock); + public void testForEachTouches() throws InterruptedException, ExecutionException, TimeoutException { + final ModifiableFixedTimeClock clock = new ModifiableFixedTimeClock(); + final Duration timeToLive = Duration.ofSeconds(10); + final HotEntryCache cache = new HotEntryCache<>(timeToLive, 10, clock); - final CompletableFuture evictionEventFuture = new CompletableFuture<>(); - cache.addListener((key, value) -> { - evictionEventFuture.complete(value); - }); + final CompletableFuture evictionEventFuture = new CompletableFuture<>(); + cache.addListener((key, value) -> { + evictionEventFuture.complete(value); + }); - // add value - cache.put("key", "value1"); + // add value + cache.put("key", "value1"); - // seek, so that it is almost evicted - clock.plus(timeToLive.minusMillis(1)); - cache.updateTime(); + // seek, so that it is almost evicted + clock.plus(timeToLive.minusMillis(1)); + cache.updateTime(); - // the for each should touch the entries - cache.forEach(s -> { - /* no-op */}); + // the for each should touch the entries + cache.forEach(s -> { + /* no-op */}); - // seek again - clock.plus(timeToLive.minusMillis(1)); - cache.triggerEvictionAndWait(); + // seek again + clock.plus(timeToLive.minusMillis(1)); + cache.triggerEvictionAndWait(); - // if the touch didn't happen, then the value is now evicted - Assert.assertEquals(evictionEventFuture.isDone(), false); + // if the touch didn't happen, then the value is now evicted + Assert.assertEquals(evictionEventFuture.isDone(), false); - // seek again, so that the entry will get evicted - clock.plus(timeToLive.minusMillis(1)); - cache.triggerEvictionAndWait(); + // seek again, so that the entry will get evicted + clock.plus(timeToLive.minusMillis(1)); + cache.triggerEvictionAndWait(); - Assert.assertEquals(cache.get("key"), null); - } - - /** - * Checks that - * {@link HotEntryCache#putIfAbsent(Object, java.util.function.Function) - * putIfAbsent} is atomic by calling - * {@link HotEntryCache#putIfAbsent(Object, java.util.function.Function) - * putIfAbsent} in two threads and asserting that the supplier was only called - * once. - * - * @throws Exception - */ - public void testPutIfAbsentIsAtomic() throws Exception { - final HotEntryCache cache = new HotEntryCache<>(Duration.ofSeconds(10), 10); - - final ExecutorService pool = Executors.newCachedThreadPool(); - try { - final CountDownLatch latch = new CountDownLatch(1); - - final String key = "key"; - final String valueA = "A"; - final String valueB = "B"; - - pool.submit(() -> { - cache.putIfAbsent(key, k -> { - latch.countDown(); - sleep(TimeUnit.MILLISECONDS, 20); - return valueA; - }); - return null; - }); - pool.submit(() -> { - waitFor(latch); - cache.putIfAbsent(key, k -> valueB); - return null; - }); - - pool.shutdown(); - pool.awaitTermination(1, TimeUnit.MINUTES); - - final String actual = cache.get(key); - Assert.assertEquals(actual, valueA); - } finally { - pool.shutdownNow(); - } - } - - public void testPutIfAbsentReturnsExistingValue() throws Exception { - final HotEntryCache cache = new HotEntryCache<>(Duration.ofSeconds(10), 10); - - final String key = "key"; - final String valueA = "A"; - final String valueB = "B"; - - cache.put(key, valueA); - - final String returnedByPutIfAbsent = cache.putIfAbsent(key, k -> valueB); - Assert.assertEquals(returnedByPutIfAbsent, valueA); - - final String actualInCache = cache.get(key); - Assert.assertEquals(actualInCache, valueA); - } - - public void testPutIfAbsentDoesNotAddNull() throws Exception { - final HotEntryCache cache = new HotEntryCache<>(Duration.ofSeconds(10), 10); - - final String key = "key"; - final String returnedByPutIfAbsent = cache.putIfAbsent(key, k -> null); - Assert.assertNull(returnedByPutIfAbsent, null); - - final String actualInCache = cache.get(key); - Assert.assertEquals(actualInCache, null); - } - - public void testMaxSizeIsRespected() { - final int maxSize = 10; - final ModifiableFixedTimeClock clock = new ModifiableFixedTimeClock(); - final HotEntryCache cache = new HotEntryCache<>(Duration.ofHours(1), maxSize, clock); - final Set removedKeys = new LinkedHashSet<>(); - cache.addListener((key, value) -> removedKeys.add(key)); - - // fill the cache - int count = 0; - for (count = 0; count < maxSize; count++) { - - cache.put("key" + count, "value" + count); - clock.plus(2L, ChronoUnit.MILLIS); - cache.updateTime(); - } - Assert.assertEquals(cache.size(), maxSize, "cache is full"); - Assert.assertEquals(removedKeys, List.of(), "removed keys at point A"); - - // add an item to a full cache -> the oldest 20% of the entries will be evicted - // before the new entry is added - cache.put("key" + count, "value" + count); - clock.plus(2L, ChronoUnit.MILLIS); - cache.updateTime(); - count++; - - Assert.assertEquals(cache.size(), maxSize - 1, "cache was full, 20% (2 items) were removed and one added"); - Assert.assertEquals(removedKeys, Set.of("key0", "key1"), "removed keys at point B"); - } - - public void testEvictionDueToSizeLimitDoesNotRemoveMoreThan20Percent() { - final int maxSize = 10; - final ModifiableFixedTimeClock clock = new ModifiableFixedTimeClock(); - final HotEntryCache cache = new HotEntryCache<>(Duration.ofHours(1), maxSize, clock); - final Set removedKeys = new LinkedHashSet<>(); - cache.addListener((key, value) -> removedKeys.add(key)); - - // fill the cache - int count = 0; - for (count = 0; count < maxSize; count++) { - // all entries get the same eviction time due to the fixed clock - cache.put("key" + count, "value" + count); - } - Assert.assertEquals(cache.size(), maxSize, "cache is full"); - Assert.assertEquals(removedKeys, List.of(), "removed keys at point A"); - - // add an item to a full cache -> the oldest 20% of the entries will be evicted - // before the new entry is added - cache.put("key" + count, "value" + count); - count++; - - Assert.assertEquals(cache.size(), maxSize - 1, "cache was full, 20% (2 items) were removed and one added"); - Assert.assertEquals(removedKeys.size(), (int) (maxSize * 0.2), "number of removed keys at point B"); - } - - private void sleep(final TimeUnit timeUnit, final long timeout) { - try { - timeUnit.sleep(timeout); - } catch (final InterruptedException e) { - throw new IllegalStateException(e); - } - } - - private void waitFor(final CountDownLatch latch) { - try { - latch.await(1, TimeUnit.MINUTES); - } catch (final InterruptedException e) { - throw new IllegalStateException(e); - } - } - - public static void main(final String[] args) throws InterruptedException { - - Configurator.setRootLevel(Level.TRACE); - - final Duration timeToLive = Duration.ofSeconds(1); - final HotEntryCache cache = new HotEntryCache<>(timeToLive, 10); - - cache.addListener((key, value) -> { - System.out.println(Instant.now() + " evicting: " + key + " -> " + value); - }); - cache.put("key", "value that is touched"); - for (int i = 0; i < 20; i++) { - - System.out.println(Instant.now() + " putting value" + i); - cache.put("key" + i, "value" + i); - cache.put("key", "value that is touched" + i); - TimeUnit.MILLISECONDS.sleep(450); - } - - for (int i = 20; i < 23; i++) { - System.out.println(Instant.now() + " putting value" + i); - cache.put("key" + i, "value" + i); - TimeUnit.MILLISECONDS.sleep(Duration.ofSeconds(5).plusMillis(10).toMillis()); - } - - TimeUnit.MILLISECONDS.sleep(Duration.ofSeconds(5).plusMillis(10).toMillis()); - - for (int i = 23; i < 27; i++) { - System.out.println(Instant.now() + " putting value" + i); - cache.put("key" + i, "value" + i); - TimeUnit.MILLISECONDS.sleep(Duration.ofSeconds(5).plusMillis(10).toMillis()); - } - - TimeUnit.SECONDS.sleep(300); - } + Assert.assertEquals(cache.get("key"), null); + } + + /** + * Checks that + * {@link HotEntryCache#putIfAbsent(Object, java.util.function.Function) + * putIfAbsent} is atomic by calling + * {@link HotEntryCache#putIfAbsent(Object, java.util.function.Function) + * putIfAbsent} in two threads and asserting that the supplier was only called + * once. + * + * @throws Exception + */ + public void testPutIfAbsentIsAtomic() throws Exception { + final HotEntryCache cache = new HotEntryCache<>(Duration.ofSeconds(10), 10); + + final ExecutorService pool = Executors.newCachedThreadPool(); + try { + final CountDownLatch latch = new CountDownLatch(1); + + final String key = "key"; + final String valueA = "A"; + final String valueB = "B"; + + pool.submit(() -> { + cache.putIfAbsent(key, k -> { + latch.countDown(); + sleep(TimeUnit.MILLISECONDS, 20); + return valueA; + }); + return null; + }); + pool.submit(() -> { + waitFor(latch); + cache.putIfAbsent(key, k -> valueB); + return null; + }); + + pool.shutdown(); + pool.awaitTermination(1, TimeUnit.MINUTES); + + final String actual = cache.get(key); + Assert.assertEquals(actual, valueA); + } finally { + pool.shutdownNow(); + } + } + + public void testPutIfAbsentReturnsExistingValue() throws Exception { + final HotEntryCache cache = new HotEntryCache<>(Duration.ofSeconds(10), 10); + + final String key = "key"; + final String valueA = "A"; + final String valueB = "B"; + + cache.put(key, valueA); + + final String returnedByPutIfAbsent = cache.putIfAbsent(key, k -> valueB); + Assert.assertEquals(returnedByPutIfAbsent, valueA); + + final String actualInCache = cache.get(key); + Assert.assertEquals(actualInCache, valueA); + } + + public void testPutIfAbsentDoesNotAddNull() throws Exception { + final HotEntryCache cache = new HotEntryCache<>(Duration.ofSeconds(10), 10); + + final String key = "key"; + final String returnedByPutIfAbsent = cache.putIfAbsent(key, k -> null); + Assert.assertNull(returnedByPutIfAbsent, null); + + final String actualInCache = cache.get(key); + Assert.assertEquals(actualInCache, null); + } + + public void testMaxSizeIsRespected() { + final int maxSize = 10; + final ModifiableFixedTimeClock clock = new ModifiableFixedTimeClock(); + final HotEntryCache cache = new HotEntryCache<>(Duration.ofHours(1), maxSize, clock); + final Set removedKeys = new LinkedHashSet<>(); + cache.addListener((key, value) -> removedKeys.add(key)); + + // fill the cache + int count = 0; + for (count = 0; count < maxSize; count++) { + + cache.put("key" + count, "value" + count); + clock.plus(2L, ChronoUnit.MILLIS); + cache.updateTime(); + } + Assert.assertEquals(cache.size(), maxSize, "cache is full"); + Assert.assertEquals(removedKeys, List.of(), "removed keys at point A"); + + // add an item to a full cache -> the oldest 20% of the entries will be evicted + // before the new entry is added + cache.put("key" + count, "value" + count); + clock.plus(2L, ChronoUnit.MILLIS); + cache.updateTime(); + count++; + + Assert.assertEquals(cache.size(), maxSize - 1, "cache was full, 20% (2 items) were removed and one added"); + Assert.assertEquals(removedKeys, Set.of("key0", "key1"), "removed keys at point B"); + } + + public void testEvictionDueToSizeLimitDoesNotRemoveMoreThan20Percent() { + final int maxSize = 10; + final ModifiableFixedTimeClock clock = new ModifiableFixedTimeClock(); + final HotEntryCache cache = new HotEntryCache<>(Duration.ofHours(1), maxSize, clock); + final Set removedKeys = new LinkedHashSet<>(); + cache.addListener((key, value) -> removedKeys.add(key)); + + // fill the cache + int count = 0; + for (count = 0; count < maxSize; count++) { + // all entries get the same eviction time due to the fixed clock + cache.put("key" + count, "value" + count); + } + Assert.assertEquals(cache.size(), maxSize, "cache is full"); + Assert.assertEquals(removedKeys, List.of(), "removed keys at point A"); + + // add an item to a full cache -> the oldest 20% of the entries will be evicted + // before the new entry is added + cache.put("key" + count, "value" + count); + count++; + + Assert.assertEquals(cache.size(), maxSize - 1, "cache was full, 20% (2 items) were removed and one added"); + Assert.assertEquals(removedKeys.size(), (int) (maxSize * 0.2), "number of removed keys at point B"); + } + + private void sleep(final TimeUnit timeUnit, final long timeout) { + try { + timeUnit.sleep(timeout); + } catch (final InterruptedException e) { + throw new IllegalStateException(e); + } + } + + private void waitFor(final CountDownLatch latch) { + try { + latch.await(1, TimeUnit.MINUTES); + } catch (final InterruptedException e) { + throw new IllegalStateException(e); + } + } + + public static void main(final String[] args) throws InterruptedException { + + Configurator.setRootLevel(Level.TRACE); + + final Duration timeToLive = Duration.ofSeconds(1); + final HotEntryCache cache = new HotEntryCache<>(timeToLive, 10); + + cache.addListener((key, value) -> { + System.out.println(Instant.now() + " evicting: " + key + " -> " + value); + }); + cache.put("key", "value that is touched"); + for (int i = 0; i < 20; i++) { + + System.out.println(Instant.now() + " putting value" + i); + cache.put("key" + i, "value" + i); + cache.put("key", "value that is touched" + i); + TimeUnit.MILLISECONDS.sleep(450); + } + + for (int i = 20; i < 23; i++) { + System.out.println(Instant.now() + " putting value" + i); + cache.put("key" + i, "value" + i); + TimeUnit.MILLISECONDS.sleep(Duration.ofSeconds(5).plusMillis(10).toMillis()); + } + + TimeUnit.MILLISECONDS.sleep(Duration.ofSeconds(5).plusMillis(10).toMillis()); + + for (int i = 23; i < 27; i++) { + System.out.println(Instant.now() + " putting value" + i); + cache.put("key" + i, "value" + i); + TimeUnit.MILLISECONDS.sleep(Duration.ofSeconds(5).plusMillis(10).toMillis()); + } + + TimeUnit.SECONDS.sleep(300); + } } diff --git a/pdb-utils/src/test/java/org/lucares/utils/cache/ModifiableFixedTimeClock.java b/pdb-utils/src/test/java/org/lucares/utils/cache/ModifiableFixedTimeClock.java index 081c9bc..306a5ed 100644 --- a/pdb-utils/src/test/java/org/lucares/utils/cache/ModifiableFixedTimeClock.java +++ b/pdb-utils/src/test/java/org/lucares/utils/cache/ModifiableFixedTimeClock.java @@ -12,87 +12,87 @@ import java.time.temporal.TemporalUnit; * useful in tests, so that you can explicitly set the time. */ public class ModifiableFixedTimeClock extends Clock implements Serializable { - private static final long serialVersionUID = 1955332545617873736L; - private Instant instant; - private final ZoneId zone; + private static final long serialVersionUID = 1955332545617873736L; + private Instant instant; + private final ZoneId zone; - public ModifiableFixedTimeClock() { - this(Instant.now()); - } + public ModifiableFixedTimeClock() { + this(Instant.now()); + } - public ModifiableFixedTimeClock(final Instant fixedInstant) { - this(fixedInstant, ZoneId.systemDefault()); - } + public ModifiableFixedTimeClock(final Instant fixedInstant) { + this(fixedInstant, ZoneId.systemDefault()); + } - public ModifiableFixedTimeClock(final Instant fixedInstant, final ZoneId zone) { - this.instant = fixedInstant; - this.zone = zone; - } + public ModifiableFixedTimeClock(final Instant fixedInstant, final ZoneId zone) { + this.instant = fixedInstant; + this.zone = zone; + } - public void setTime(final Instant instant) { - this.instant = instant; - } + public void setTime(final Instant instant) { + this.instant = instant; + } - public void plus(final TemporalAmount amountToAdd) { - instant = instant.plus(amountToAdd); - } + public void plus(final TemporalAmount amountToAdd) { + instant = instant.plus(amountToAdd); + } - public void plus(final long amountToAdd, final TemporalUnit unit) { - instant = instant.plus(amountToAdd, unit); - } + public void plus(final long amountToAdd, final TemporalUnit unit) { + instant = instant.plus(amountToAdd, unit); + } - public void plusMillis(final long millisToAdd) { - instant = instant.plusMillis(millisToAdd); - } + public void plusMillis(final long millisToAdd) { + instant = instant.plusMillis(millisToAdd); + } - public void plusNanos(final long nanosToAdd) { - instant = instant.plusNanos(nanosToAdd); - } + public void plusNanos(final long nanosToAdd) { + instant = instant.plusNanos(nanosToAdd); + } - public void plusSeconds(final long secondsToAdd) { - instant = instant.plusSeconds(secondsToAdd); - } + public void plusSeconds(final long secondsToAdd) { + instant = instant.plusSeconds(secondsToAdd); + } - @Override - public ZoneId getZone() { - return zone; - } + @Override + public ZoneId getZone() { + return zone; + } - @Override - public Clock withZone(final ZoneId zone) { - if (zone.equals(this.zone)) { - return this; - } - return new ModifiableFixedTimeClock(instant, zone); - } + @Override + public Clock withZone(final ZoneId zone) { + if (zone.equals(this.zone)) { + return this; + } + return new ModifiableFixedTimeClock(instant, zone); + } - @Override - public long millis() { - return instant.toEpochMilli(); - } + @Override + public long millis() { + return instant.toEpochMilli(); + } - @Override - public Instant instant() { - return instant; - } + @Override + public Instant instant() { + return instant; + } - @Override - public boolean equals(final Object obj) { - if (obj instanceof ModifiableFixedTimeClock) { - final ModifiableFixedTimeClock other = (ModifiableFixedTimeClock) obj; - return instant.equals(other.instant) && zone.equals(other.zone); - } - return false; - } + @Override + public boolean equals(final Object obj) { + if (obj instanceof ModifiableFixedTimeClock) { + final ModifiableFixedTimeClock other = (ModifiableFixedTimeClock) obj; + return instant.equals(other.instant) && zone.equals(other.zone); + } + return false; + } - @Override - public int hashCode() { - return instant.hashCode() ^ zone.hashCode(); - } + @Override + public int hashCode() { + return instant.hashCode() ^ zone.hashCode(); + } - @Override - public String toString() { - return "FixedClock[" + instant + "," + zone + "]"; - } + @Override + public String toString() { + return "FixedClock[" + instant + "," + zone + "]"; + } } diff --git a/performanceDb/src/main/java/org/lucares/performance/db/BlockingIterator.java b/performanceDb/src/main/java/org/lucares/performance/db/BlockingIterator.java index 0dcaf5d..c92254a 100644 --- a/performanceDb/src/main/java/org/lucares/performance/db/BlockingIterator.java +++ b/performanceDb/src/main/java/org/lucares/performance/db/BlockingIterator.java @@ -6,7 +6,7 @@ import java.util.concurrent.TimeoutException; public interface BlockingIterator { - public Optional next(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException; + public Optional next(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException; - public Optional next() throws InterruptedException; + public Optional next() throws InterruptedException; } \ No newline at end of file diff --git a/performanceDb/src/main/java/org/lucares/performance/db/BlockingIteratorIterator.java b/performanceDb/src/main/java/org/lucares/performance/db/BlockingIteratorIterator.java index c199e61..8fbfa32 100644 --- a/performanceDb/src/main/java/org/lucares/performance/db/BlockingIteratorIterator.java +++ b/performanceDb/src/main/java/org/lucares/performance/db/BlockingIteratorIterator.java @@ -6,24 +6,24 @@ import java.util.concurrent.TimeUnit; final class BlockingIteratorIterator implements BlockingIterator { - private final Iterator iterator; + private final Iterator iterator; - public BlockingIteratorIterator(final Iterator iterator) { - this.iterator = iterator; - } + public BlockingIteratorIterator(final Iterator iterator) { + this.iterator = iterator; + } - @Override - public Optional next() throws InterruptedException { - if (iterator.hasNext()) { - final E next = iterator.next(); - return Optional.of(next); - } else { - return Optional.empty(); - } - } + @Override + public Optional next() throws InterruptedException { + if (iterator.hasNext()) { + final E next = iterator.next(); + return Optional.of(next); + } else { + return Optional.empty(); + } + } - @Override - public Optional next(final long timeout, final TimeUnit unit) throws InterruptedException { - return next(); - } + @Override + public Optional next(final long timeout, final TimeUnit unit) throws InterruptedException { + return next(); + } } \ No newline at end of file diff --git a/performanceDb/src/main/java/org/lucares/performance/db/BlockingQueueIterator.java b/performanceDb/src/main/java/org/lucares/performance/db/BlockingQueueIterator.java index e586e51..a937762 100644 --- a/performanceDb/src/main/java/org/lucares/performance/db/BlockingQueueIterator.java +++ b/performanceDb/src/main/java/org/lucares/performance/db/BlockingQueueIterator.java @@ -10,50 +10,50 @@ import org.slf4j.LoggerFactory; public final class BlockingQueueIterator implements BlockingIterator { - private static final Logger LOGGER = LoggerFactory.getLogger(BlockingQueueIterator.class); + private static final Logger LOGGER = LoggerFactory.getLogger(BlockingQueueIterator.class); - private final BlockingQueue queue; + private final BlockingQueue queue; - private boolean closed = false; + private boolean closed = false; - private final E poison; + private final E poison; - public BlockingQueueIterator(final BlockingQueue queue, final E poison) { - this.queue = queue; - this.poison = poison; - } + public BlockingQueueIterator(final BlockingQueue queue, final E poison) { + this.queue = queue; + this.poison = poison; + } - @Override - public Optional next() throws InterruptedException { - try { - return next(Long.MAX_VALUE, TimeUnit.NANOSECONDS); - } catch (final TimeoutException e) { - throw new IllegalStateException( - "We just got a timeout exception after waiting the longest time possible. Which is " - + TimeUnit.NANOSECONDS.toDays(Long.MAX_VALUE) + " days. We didn't expect that.", - e); - } - } + @Override + public Optional next() throws InterruptedException { + try { + return next(Long.MAX_VALUE, TimeUnit.NANOSECONDS); + } catch (final TimeoutException e) { + throw new IllegalStateException( + "We just got a timeout exception after waiting the longest time possible. Which is " + + TimeUnit.NANOSECONDS.toDays(Long.MAX_VALUE) + " days. We didn't expect that.", + e); + } + } - @Override - public Optional next(final long timeout, final TimeUnit unit) throws InterruptedException, TimeoutException { + @Override + public Optional next(final long timeout, final TimeUnit unit) throws InterruptedException, TimeoutException { - if (closed) { - return Optional.empty(); - } + if (closed) { + return Optional.empty(); + } - LOGGER.trace("wait for next entry"); - final E next = queue.poll(timeout, unit); - LOGGER.trace("received entry: {}", next); + LOGGER.trace("wait for next entry"); + final E next = queue.poll(timeout, unit); + LOGGER.trace("received entry: {}", next); - if (next == poison) { - LOGGER.trace("received poison"); - closed = true; - return Optional.empty(); - } else if (next == null) { - throw new TimeoutException(); - } + if (next == poison) { + LOGGER.trace("received poison"); + closed = true; + return Optional.empty(); + } else if (next == null) { + throw new TimeoutException(); + } - return Optional.of(next); - } + return Optional.of(next); + } } \ No newline at end of file diff --git a/performanceDb/src/main/java/org/lucares/performance/db/EntryToEntriesIterator.java b/performanceDb/src/main/java/org/lucares/performance/db/EntryToEntriesIterator.java index 30e0f6a..f77023a 100644 --- a/performanceDb/src/main/java/org/lucares/performance/db/EntryToEntriesIterator.java +++ b/performanceDb/src/main/java/org/lucares/performance/db/EntryToEntriesIterator.java @@ -7,20 +7,20 @@ import org.lucares.pdb.api.Entry; public class EntryToEntriesIterator implements Iterator { - private final Iterator entryIterator; + private final Iterator entryIterator; - public EntryToEntriesIterator(final Iterator entryIterator) { - this.entryIterator = entryIterator; - } + public EntryToEntriesIterator(final Iterator entryIterator) { + this.entryIterator = entryIterator; + } - @Override - public boolean hasNext() { - return entryIterator.hasNext(); - } + @Override + public boolean hasNext() { + return entryIterator.hasNext(); + } - @Override - public Entries next() { - return new Entries(entryIterator.next()); - } + @Override + public Entries next() { + return new Entries(entryIterator.next()); + } } diff --git a/performanceDb/src/main/java/org/lucares/performance/db/Group.java b/performanceDb/src/main/java/org/lucares/performance/db/Group.java index 8bd341f..bdfcc0e 100644 --- a/performanceDb/src/main/java/org/lucares/performance/db/Group.java +++ b/performanceDb/src/main/java/org/lucares/performance/db/Group.java @@ -6,30 +6,30 @@ import org.lucares.pdb.api.Tags; import org.lucares.pdb.datastore.PdbFile; class Group { - private final Tags tags; + private final Tags tags; - private final List files; + private final List files; - public Group(final Tags tags, final List files) { - super(); - this.tags = tags; - this.files = files; - } + public Group(final Tags tags, final List files) { + super(); + this.tags = tags; + this.files = files; + } - public Tags getTags() { - return tags; - } + public Tags getTags() { + return tags; + } - public List getFiles() { - return files; - } + public List getFiles() { + return files; + } - public void addFile(final PdbFile file) { - files.add(file); - } + public void addFile(final PdbFile file) { + files.add(file); + } - @Override - public String toString() { - return tags + ": " + files.size() + " files"; - } + @Override + public String toString() { + return tags + ": " + files.size() + " files"; + } } diff --git a/performanceDb/src/main/java/org/lucares/performance/db/Grouping.java b/performanceDb/src/main/java/org/lucares/performance/db/Grouping.java index 7b02965..4ab6d9e 100644 --- a/performanceDb/src/main/java/org/lucares/performance/db/Grouping.java +++ b/performanceDb/src/main/java/org/lucares/performance/db/Grouping.java @@ -13,61 +13,61 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class Grouping { - private static final Logger LOGGER = LoggerFactory.getLogger(Grouping.class); + private static final Logger LOGGER = LoggerFactory.getLogger(Grouping.class); - public static final List NO_GROUPING = Collections.emptyList(); + public static final List NO_GROUPING = Collections.emptyList(); - private final List groups = new ArrayList<>(); + private final List groups = new ArrayList<>(); - private Grouping(final Group group) { - this.groups.add(group); - } + private Grouping(final Group group) { + this.groups.add(group); + } - private Grouping(final Collection groups) { - this.groups.addAll(groups); - } + private Grouping(final Collection groups) { + this.groups.addAll(groups); + } - public static Grouping groupBy(final List pdbFiles, final List groupByField) { + public static Grouping groupBy(final List pdbFiles, final List groupByField) { - final Grouping result; - if (noGrouping(groupByField)) { - final Group group = new Group(Tags.EMPTY, pdbFiles); + final Grouping result; + if (noGrouping(groupByField)) { + final Group group = new Group(Tags.EMPTY, pdbFiles); - result = new Grouping(group); - } else { - final Map grouping = new HashMap<>(); + result = new Grouping(group); + } else { + final Map grouping = new HashMap<>(); - for (final PdbFile pdbFile : pdbFiles) { - final Tags tags = pdbFile.getTags(); - final Tags groupTags = tags.subset(groupByField); + for (final PdbFile pdbFile : pdbFiles) { + final Tags tags = pdbFile.getTags(); + final Tags groupTags = tags.subset(groupByField); - addIfNotExists(grouping, groupTags); - grouping.get(groupTags).addFile(pdbFile); - } - result = new Grouping(grouping.values()); - } - LOGGER.trace("grouped {} files by {}: {}", pdbFiles.size(), groupByField, result); - return result; - } + addIfNotExists(grouping, groupTags); + grouping.get(groupTags).addFile(pdbFile); + } + result = new Grouping(grouping.values()); + } + LOGGER.trace("grouped {} files by {}: {}", pdbFiles.size(), groupByField, result); + return result; + } - private static boolean noGrouping(final List groupByField) { - return groupByField == null || groupByField.isEmpty(); - } + private static boolean noGrouping(final List groupByField) { + return groupByField == null || groupByField.isEmpty(); + } - private static void addIfNotExists(final Map grouping, final Tags groupTags) { - if (!grouping.containsKey(groupTags)) { - final List files = new ArrayList<>(); + private static void addIfNotExists(final Map grouping, final Tags groupTags) { + if (!grouping.containsKey(groupTags)) { + final List files = new ArrayList<>(); - grouping.put(groupTags, new Group(groupTags, files)); - } - } + grouping.put(groupTags, new Group(groupTags, files)); + } + } - public Collection getGroups() { - return groups; - } + public Collection getGroups() { + return groups; + } - @Override - public String toString() { - return String.valueOf(groups); - } + @Override + public String toString() { + return String.valueOf(groups); + } } diff --git a/performanceDb/src/main/java/org/lucares/performance/db/PdbExport.java b/performanceDb/src/main/java/org/lucares/performance/db/PdbExport.java index da16eca..d831dde 100644 --- a/performanceDb/src/main/java/org/lucares/performance/db/PdbExport.java +++ b/performanceDb/src/main/java/org/lucares/performance/db/PdbExport.java @@ -31,153 +31,153 @@ import org.slf4j.LoggerFactory; public class PdbExport { - private static final int KB = 1024; - private static final int MB = KB * 1024; - private static final int GB = MB * 1024; + private static final int KB = 1024; + private static final int MB = KB * 1024; + private static final int GB = MB * 1024; - public static final char MAGIC_BYTE = '#'; - public static final char MARKER_DICT_ENTRY_CHAR = '$'; - public static final String MARKER_DICT_ENTRY = String.valueOf(MARKER_DICT_ENTRY_CHAR); - public static final char SEPARATOR_TAG_ID_CHAR = ':'; - public static final String SEPARATOR_TAG_ID = String.valueOf(SEPARATOR_TAG_ID_CHAR); + public static final char MAGIC_BYTE = '#'; + public static final char MARKER_DICT_ENTRY_CHAR = '$'; + public static final String MARKER_DICT_ENTRY = String.valueOf(MARKER_DICT_ENTRY_CHAR); + public static final char SEPARATOR_TAG_ID_CHAR = ':'; + public static final String SEPARATOR_TAG_ID = String.valueOf(SEPARATOR_TAG_ID_CHAR); - private static final Logger LOGGER = LoggerFactory.getLogger(PdbExport.class); + private static final Logger LOGGER = LoggerFactory.getLogger(PdbExport.class); - public static void main(final String[] args) throws Exception { + public static void main(final String[] args) throws Exception { - initLogging(); + initLogging(); - final Path dataDirectory = Paths.get(args[0]); - final Path backupDir = Paths.get(args[1]) - .resolve(OffsetDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"))); + final Path dataDirectory = Paths.get(args[0]); + final Path backupDir = Paths.get(args[1]) + .resolve(OffsetDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"))); - export(dataDirectory, backupDir); - } + export(dataDirectory, backupDir); + } - public static List export(final Path dataDirectory, final Path backupDir) throws Exception { - final List exportFiles = new ArrayList<>(); - Files.createDirectories(backupDir); + public static List export(final Path dataDirectory, final Path backupDir) throws Exception { + final List exportFiles = new ArrayList<>(); + Files.createDirectories(backupDir); - Runtime.getRuntime().addShutdownHook(new Thread() { - @Override - public void run() { - LOGGER.info("shutdown hook"); - } + Runtime.getRuntime().addShutdownHook(new Thread() { + @Override + public void run() { + LOGGER.info("shutdown hook"); + } - }); + }); - final OffsetDateTime start = OffsetDateTime.now(); - final String datePrefix = start.format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss")); - final AtomicLong tagsIdCounter = new AtomicLong(0); - long exportFileCounter = 0; + final OffsetDateTime start = OffsetDateTime.now(); + final String datePrefix = start.format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss")); + final AtomicLong tagsIdCounter = new AtomicLong(0); + long exportFileCounter = 0; - Path exportFile = null; - Writer writer = null; + Path exportFile = null; + Writer writer = null; - try (final PerformanceDb db = new PerformanceDb(dataDirectory);) { + try (final PerformanceDb db = new PerformanceDb(dataDirectory);) { - LOGGER.info("Searching for all files. This may take a while ..."); - final List pdbFiles = db.getFilesForQuery(new Query("", DateTimeRange.max())); + LOGGER.info("Searching for all files. This may take a while ..."); + final List pdbFiles = db.getFilesForQuery(new Query("", DateTimeRange.max())); - long count = 0; - long lastEpochMilli = 0; - long begin = System.currentTimeMillis(); + long count = 0; + long lastEpochMilli = 0; + long begin = System.currentTimeMillis(); - for (final PdbFile pdbFile : pdbFiles) { + for (final PdbFile pdbFile : pdbFiles) { - if (writer == null || Files.size(exportFile) > 4 * GB) { - if (writer != null) { - writer.flush(); - writer.close(); - } - exportFile = backupDir.resolve(String.format("%s.%05d.pdb.gz", datePrefix, exportFileCounter++)); - exportFiles.add(exportFile); - writer = createWriter(exportFile); - LOGGER.info("new export file: {}", exportFile); + if (writer == null || Files.size(exportFile) > 4 * GB) { + if (writer != null) { + writer.flush(); + writer.close(); + } + exportFile = backupDir.resolve(String.format("%s.%05d.pdb.gz", datePrefix, exportFileCounter++)); + exportFiles.add(exportFile); + writer = createWriter(exportFile); + LOGGER.info("new export file: {}", exportFile); - lastEpochMilli = 0; - } + lastEpochMilli = 0; + } - final Stream timeValueStream = PdbFile.toStream(Arrays.asList(pdbFile), db.getDataStore()); + final Stream timeValueStream = PdbFile.toStream(Arrays.asList(pdbFile), db.getDataStore()); - final Tags tags = pdbFile.getTags(); - final long tagsId = addNewTagsToDictionary(writer, tags, tagsIdCounter); + final Tags tags = pdbFile.getTags(); + final long tagsId = addNewTagsToDictionary(writer, tags, tagsIdCounter); - final Iterator it = timeValueStream.iterator(); - while (it.hasNext()) { - final LongList entry = it.next(); + final Iterator it = timeValueStream.iterator(); + while (it.hasNext()) { + final LongList entry = it.next(); - for (int i = 0; i < entry.size(); i += 2) { + for (int i = 0; i < entry.size(); i += 2) { - final long epochMilli = entry.get(i); - final long value = entry.get(i + 1); + final long epochMilli = entry.get(i); + final long value = entry.get(i + 1); - final long epochMilliDiff = epochMilli - lastEpochMilli; - lastEpochMilli = epochMilli; + final long epochMilliDiff = epochMilli - lastEpochMilli; + lastEpochMilli = epochMilli; - writer.write(Long.toString(epochMilliDiff)); - writer.write(','); - writer.write(Long.toString(value)); - writer.write(','); - writer.write(Long.toString(tagsId)); - writer.write('\n'); + writer.write(Long.toString(epochMilliDiff)); + writer.write(','); + writer.write(Long.toString(value)); + writer.write(','); + writer.write(Long.toString(tagsId)); + writer.write('\n'); - count++; - final long chunk = 10_000_000; - if (count % chunk == 0) { - final long end = System.currentTimeMillis(); - final long duration = end - begin; - final long entriesPerSecond = (long) (chunk / (duration / 1000.0)); - LOGGER.info("progress: {} - {} entries/s + duration {}", String.format("%,d", count), - String.format("%,d", entriesPerSecond), duration); - begin = System.currentTimeMillis(); - } - } - } - } + count++; + final long chunk = 10_000_000; + if (count % chunk == 0) { + final long end = System.currentTimeMillis(); + final long duration = end - begin; + final long entriesPerSecond = (long) (chunk / (duration / 1000.0)); + LOGGER.info("progress: {} - {} entries/s + duration {}", String.format("%,d", count), + String.format("%,d", entriesPerSecond), duration); + begin = System.currentTimeMillis(); + } + } + } + } - LOGGER.info("total: " + count); + LOGGER.info("total: " + count); - } finally { - if (writer != null) { - writer.close(); - } - } + } finally { + if (writer != null) { + writer.close(); + } + } - final OffsetDateTime end = OffsetDateTime.now(); + final OffsetDateTime end = OffsetDateTime.now(); - LOGGER.info("duration: " + Duration.between(start, end)); - return exportFiles; - } + LOGGER.info("duration: " + Duration.between(start, end)); + return exportFiles; + } - private static void initLogging() { - Configurator.setRootLevel(Level.INFO); - } + private static void initLogging() { + Configurator.setRootLevel(Level.INFO); + } - private static long addNewTagsToDictionary(final Writer writer, final Tags tags, final AtomicLong tagsIdCounter) - throws IOException { - final long tagsId = tagsIdCounter.getAndIncrement(); + private static long addNewTagsToDictionary(final Writer writer, final Tags tags, final AtomicLong tagsIdCounter) + throws IOException { + final long tagsId = tagsIdCounter.getAndIncrement(); - writer.write(MARKER_DICT_ENTRY); - writer.write(Long.toString(tagsId)); - writer.write(SEPARATOR_TAG_ID); - writer.write(tags.toCsv()); - writer.write('\n'); + writer.write(MARKER_DICT_ENTRY); + writer.write(Long.toString(tagsId)); + writer.write(SEPARATOR_TAG_ID); + writer.write(tags.toCsv()); + writer.write('\n'); - return tagsId; - } + return tagsId; + } - private static Writer createWriter(final Path file) { + private static Writer createWriter(final Path file) { - try { - final OutputStreamWriter writer = new OutputStreamWriter( - new GZIPOutputStream(new FileOutputStream(file.toFile()), 4096 * 4), StandardCharsets.UTF_8); - // initialize file header - writer.write(MAGIC_BYTE); - return writer; + try { + final OutputStreamWriter writer = new OutputStreamWriter( + new GZIPOutputStream(new FileOutputStream(file.toFile()), 4096 * 4), StandardCharsets.UTF_8); + // initialize file header + writer.write(MAGIC_BYTE); + return writer; - } catch (final IOException e) { - throw new IllegalStateException(e); - } - } + } catch (final IOException e) { + throw new IllegalStateException(e); + } + } } \ No newline at end of file diff --git a/performanceDb/src/main/java/org/lucares/performance/db/PerformanceDb.java b/performanceDb/src/main/java/org/lucares/performance/db/PerformanceDb.java index 26f5776..6aa9f4e 100644 --- a/performanceDb/src/main/java/org/lucares/performance/db/PerformanceDb.java +++ b/performanceDb/src/main/java/org/lucares/performance/db/PerformanceDb.java @@ -30,157 +30,157 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class PerformanceDb implements AutoCloseable { - private final static Logger LOGGER = LoggerFactory.getLogger(PerformanceDb.class); - private final static Logger METRICS_LOGGER = LoggerFactory.getLogger("org.lucares.metrics.ingestion.block"); + private final static Logger LOGGER = LoggerFactory.getLogger(PerformanceDb.class); + private final static Logger METRICS_LOGGER = LoggerFactory.getLogger("org.lucares.metrics.ingestion.block"); - private final DataStore dataStore; + private final DataStore dataStore; - public PerformanceDb(final Path dataDirectory) throws IOException { + public PerformanceDb(final Path dataDirectory) throws IOException { - dataStore = new DataStore(dataDirectory); + dataStore = new DataStore(dataDirectory); - } + } - void putEntry(final Entry entry) throws WriteException { - putEntries(Arrays.asList(entry)); - } + void putEntry(final Entry entry) throws WriteException { + putEntries(Arrays.asList(entry)); + } - void putEntries(final Iterable entries) throws WriteException { - putEntries(entries.iterator()); - } + void putEntries(final Iterable entries) throws WriteException { + putEntries(entries.iterator()); + } - private void putEntries(final Iterator entries) throws WriteException { + private void putEntries(final Iterator entries) throws WriteException { - final EntryToEntriesIterator entriesIterator = new EntryToEntriesIterator(entries); - final BlockingIteratorIterator iterator = new BlockingIteratorIterator<>(entriesIterator); - putEntries(iterator); - } + final EntryToEntriesIterator entriesIterator = new EntryToEntriesIterator(entries); + final BlockingIteratorIterator iterator = new BlockingIteratorIterator<>(entriesIterator); + putEntries(iterator); + } - public void putEntries(final BlockingIterator entriesIterator) throws WriteException { + public void putEntries(final BlockingIterator entriesIterator) throws WriteException { - final Duration timeBetweenSyncs = Duration.ofSeconds(1); - long count = 0; - long insertionsSinceLastSync = 0; + final Duration timeBetweenSyncs = Duration.ofSeconds(1); + long count = 0; + long insertionsSinceLastSync = 0; - try { - long lastSync = System.currentTimeMillis(); - long nextSync = lastSync + timeBetweenSyncs.toMillis(); + try { + long lastSync = System.currentTimeMillis(); + long nextSync = lastSync + timeBetweenSyncs.toMillis(); - while (true) { - final Optional entriesOptional = entriesIterator.next(); - if (!entriesOptional.isPresent()) { - break; - } + while (true) { + final Optional entriesOptional = entriesIterator.next(); + if (!entriesOptional.isPresent()) { + break; + } - final Entries entries = entriesOptional.get(); - for (final Entry entry : entries) { + final Entries entries = entriesOptional.get(); + for (final Entry entry : entries) { - try { - final Tags tags = entry.getTags(); - final long dateAsEpochMilli = entry.getEpochMilli(); - final long value = entry.getValue(); + try { + final Tags tags = entry.getTags(); + final long dateAsEpochMilli = entry.getEpochMilli(); + final long value = entry.getValue(); - dataStore.write(dateAsEpochMilli, tags, value); + dataStore.write(dateAsEpochMilli, tags, value); - count++; - insertionsSinceLastSync++; + count++; + insertionsSinceLastSync++; - if (nextSync <= System.currentTimeMillis()) { - final long end = System.currentTimeMillis(); - final long duration = end - lastSync; - final long entriesPerSecond = (long) (insertionsSinceLastSync / (duration / 1000.0)); + if (nextSync <= System.currentTimeMillis()) { + final long end = System.currentTimeMillis(); + final long duration = end - lastSync; + final long entriesPerSecond = (long) (insertionsSinceLastSync / (duration / 1000.0)); - METRICS_LOGGER.debug(String.format("inserting %d/s ; total: %,d; last: %s", - entriesPerSecond, count, entry)); + METRICS_LOGGER.debug(String.format("inserting %d/s ; total: %,d; last: %s", + entriesPerSecond, count, entry)); - lastSync = System.currentTimeMillis(); - nextSync = lastSync + timeBetweenSyncs.toMillis(); - insertionsSinceLastSync = 0; - } + lastSync = System.currentTimeMillis(); + nextSync = lastSync + timeBetweenSyncs.toMillis(); + insertionsSinceLastSync = 0; + } - } catch (final InvalidValueException | SyntaxException e) { + } catch (final InvalidValueException | SyntaxException e) { - LOGGER.info("skipping entry: " + e.getMessage() + " : " + entry); - LOGGER.info("", e); - } - } - } + LOGGER.info("skipping entry: " + e.getMessage() + " : " + entry); + LOGGER.info("", e); + } + } + } - } catch (final RuntimeException e) { - throw new WriteException(e); - } catch (final InterruptedException e) { - Thread.currentThread().interrupt(); - LOGGER.info("Thread was interrupted. Aborting exectution."); - } finally { - dataStore.flush(); - } - } + } catch (final RuntimeException e) { + throw new WriteException(e); + } catch (final InterruptedException e) { + Thread.currentThread().interrupt(); + LOGGER.info("Thread was interrupted. Aborting exectution."); + } finally { + dataStore.flush(); + } + } - /** - * - * @param query - * @return - */ - public Result get(final Query query) { - return get(query, Grouping.NO_GROUPING); - } + /** + * + * @param query + * @return + */ + public Result get(final Query query) { + return get(query, Grouping.NO_GROUPING); + } - public List getFilesForQuery(final Query query) { - return dataStore.getFilesForQuery(query); - } + public List getFilesForQuery(final Query query) { + return dataStore.getFilesForQuery(query); + } - /** - * Return the entries as an unbound, ordered and non-parallel stream. - * - * @param query - * @param groupBy the tag to group by - * @return {@link Result} - */ - public Result get(final Query query, final List groupBy) { - final long start = System.nanoTime(); - final List pdbFiles = dataStore.getFilesForQuery(query); + /** + * Return the entries as an unbound, ordered and non-parallel stream. + * + * @param query + * @param groupBy the tag to group by + * @return {@link Result} + */ + public Result get(final Query query, final List groupBy) { + final long start = System.nanoTime(); + final List pdbFiles = dataStore.getFilesForQuery(query); - final Grouping grouping = Grouping.groupBy(pdbFiles, groupBy); + final Grouping grouping = Grouping.groupBy(pdbFiles, groupBy); - final Result result = toResult(grouping); - METRICS_LOGGER.debug("query execution took: " + (System.nanoTime() - start) / 1_000_000.0 + "ms: " + query - + " (" + groupBy + "): files found: " + pdbFiles.size()); - return result; - } + final Result result = toResult(grouping); + METRICS_LOGGER.debug("query execution took: " + (System.nanoTime() - start) / 1_000_000.0 + "ms: " + query + + " (" + groupBy + "): files found: " + pdbFiles.size()); + return result; + } - private Result toResult(final Grouping grouping) { - final List groupResults = new ArrayList<>(); - for (final Group group : grouping.getGroups()) { - final Stream stream = PdbFile.toStream(group.getFiles(), dataStore.getDiskStorage()); - final GroupResult groupResult = new GroupResult(stream, group.getTags()); - groupResults.add(groupResult); - } - final Result result = new Result(groupResults); - return result; - } + private Result toResult(final Grouping grouping) { + final List groupResults = new ArrayList<>(); + for (final Group group : grouping.getGroups()) { + final Stream stream = PdbFile.toStream(group.getFiles(), dataStore.getDiskStorage()); + final GroupResult groupResult = new GroupResult(stream, group.getTags()); + groupResults.add(groupResult); + } + final Result result = new Result(groupResults); + return result; + } - @Override - public void close() { - try { - dataStore.close(); - } catch (final Exception e) { - LOGGER.error("failed to close PerformanceDB", e); - } - } + @Override + public void close() { + try { + dataStore.close(); + } catch (final Exception e) { + LOGGER.error("failed to close PerformanceDB", e); + } + } - public List autocomplete(final QueryWithCaretMarker query) { + public List autocomplete(final QueryWithCaretMarker query) { - return dataStore.propose(query); - } + return dataStore.propose(query); + } - public List getFields(final DateTimeRange dateRange) { + public List getFields(final DateTimeRange dateRange) { - final List fields = dataStore.getAvailableFields(dateRange); + final List fields = dataStore.getAvailableFields(dateRange); - return fields; - } + return fields; + } - public PartitionDiskStore getDataStore() { - return dataStore.getDiskStorage(); - } + public PartitionDiskStore getDataStore() { + return dataStore.getDiskStorage(); + } } diff --git a/performanceDb/src/test/java/org/lucares/performance/db/EntryByDateComparator.java b/performanceDb/src/test/java/org/lucares/performance/db/EntryByDateComparator.java index 36a0403..9a76cd3 100644 --- a/performanceDb/src/test/java/org/lucares/performance/db/EntryByDateComparator.java +++ b/performanceDb/src/test/java/org/lucares/performance/db/EntryByDateComparator.java @@ -5,18 +5,18 @@ import java.util.Comparator; import org.lucares.pdb.api.Entry; public class EntryByDateComparator implements Comparator { - public static final Comparator INSTANCE = new EntryByDateComparator(); + public static final Comparator INSTANCE = new EntryByDateComparator(); - @Override - public int compare(final Entry o1, final Entry o2) { + @Override + public int compare(final Entry o1, final Entry o2) { - long result = o1.getEpochMilli() - o2.getEpochMilli(); + long result = o1.getEpochMilli() - o2.getEpochMilli(); - if (result == 0) { - result = o1.getValue() - o2.getValue(); - } + if (result == 0) { + result = o1.getValue() - o2.getValue(); + } - return result < 0 ? -1 : (result == 0 ? 0 : 1); - } + return result < 0 ? -1 : (result == 0 ? 0 : 1); + } } diff --git a/performanceDb/src/test/java/org/lucares/performance/db/PerformanceDbTest.java b/performanceDb/src/test/java/org/lucares/performance/db/PerformanceDbTest.java index 7059e5c..d800b77 100644 --- a/performanceDb/src/test/java/org/lucares/performance/db/PerformanceDbTest.java +++ b/performanceDb/src/test/java/org/lucares/performance/db/PerformanceDbTest.java @@ -29,331 +29,331 @@ import org.testng.annotations.Test; @Test public class PerformanceDbTest { - private Path dataDirectory; + private Path dataDirectory; - @BeforeMethod - public void beforeMethod() throws IOException { - dataDirectory = Files.createTempDirectory("pdb"); - } + @BeforeMethod + public void beforeMethod() throws IOException { + dataDirectory = Files.createTempDirectory("pdb"); + } - @AfterMethod - public void afterMethod() throws IOException { - org.lucares.utils.file.FileUtils.delete(dataDirectory); - } + @AfterMethod + public void afterMethod() throws IOException { + org.lucares.utils.file.FileUtils.delete(dataDirectory); + } - public void testInsertRead() throws Exception { + public void testInsertRead() throws Exception { - try (PerformanceDb db = new PerformanceDb(dataDirectory)) { - final OffsetDateTime nowInUtc = DateUtils.nowInUtc(); - final long date = nowInUtc.toInstant().toEpochMilli(); - final long value = 1; - final Tags tags = Tags.createAndAddToDictionary("myKey", "myValue"); - db.putEntry(new Entry(date, value, tags)); - - final Result result = db.get(Query.createQuery(tags, DateTimeRange.ofDay(nowInUtc))); - final LongList stream = result.singleGroup().flatMap(); - - Assert.assertEquals(stream.size(), 2); - - Assert.assertEquals(stream.get(0), date); - Assert.assertEquals(stream.get(1), value); - } - } - - public void testInsertIntoMultipleFilesRead() throws Exception { - - try (PerformanceDb db = new PerformanceDb(dataDirectory)) { - final DateTimeRange dateRange = new DateTimeRange(DateUtils.getDate(2016, 11, 1, 10, 0, 0), - DateUtils.getDate(2016, 11, 2, 12, 34, 56)); - final long dayOne = dateRange.getStartEpochMilli(); - final long dayTwo = dateRange.getEndEpochMilli(); - final long valueOne = 1; - final long valueTwo = 2; - final Tags tags = Tags.createAndAddToDictionary("myKey", "myValue"); - - db.putEntry(new Entry(dayOne, valueOne, tags)); - db.putEntry(new Entry(dayTwo, valueTwo, tags)); - - final LongList stream = db.get(Query.createQuery(tags, dateRange)).singleGroup().flatMap(); - - Assert.assertEquals(stream.size(), 4); - - Assert.assertEquals(stream.get(0), dayOne); - Assert.assertEquals(stream.get(1), valueOne); - Assert.assertEquals(stream.get(2), dayTwo); - Assert.assertEquals(stream.get(3), valueTwo); - } - } - - private List generateEntries(final DateTimeRange dateRange, final long n, final int addToDate, - final Tags tags) { - final List result = new ArrayList<>(); - final long differenceInMs = dateRange.duration().toMillis() / n; - long currentTime = dateRange.getStartEpochMilli(); - - for (long i = 0; i < n; i++) { - final long value = ThreadLocalRandom.current().nextInt(0, Integer.MAX_VALUE); - final long date = OffsetDateTime.ofInstant(Instant.ofEpochMilli(currentTime + addToDate), ZoneOffset.UTC) - .toInstant().toEpochMilli(); - result.add(new Entry(date, value, tags)); - - currentTime += differenceInMs; - } - return result; - } - - @DataProvider - public Object[][] providerAppendToExistingFile() throws Exception { - return new Object[][] { // - { 2 }, // - { 100 }, // - { 500 }, // - }; - } - - @Test(dataProvider = "providerAppendToExistingFile") - public void testAppendToExistingFile(final long numberOfEntries) throws Exception { - - try (PerformanceDb db = new PerformanceDb(dataDirectory)) { - - final int year = 2016; - final int month = 1; - final int day = 2; - - final DateTimeRange timeRange = DateTimeRange.ofDay(DateUtils.getDate(year, month, day, 1, 1, 1)); - - final Tags tags = Tags.createAndAddToDictionary("myKey", "one"); - final List entries = generateEntries(timeRange, numberOfEntries, 0, tags); - - printEntries(entries, ""); - - db.putEntries(entries); - - final LongList actualEntries = db.get(Query.createQuery(tags, timeRange)).singleGroup().flatMap(); - Assert.assertEquals(actualEntries.size(), entries.size() * 2); - - for (int i = 0; i < entries.size(); i++) { - final Entry entry = entries.get(i); - final long epochMilli = entry.getEpochMilli(); - final long value = entry.getValue(); - - Assert.assertEquals(actualEntries.get(i * 2), epochMilli); - Assert.assertEquals(actualEntries.get(i * 2 + 1), value); - } - - } - } - - @DataProvider - public Object[][] providerAppendToExistingFileWithRestart() throws Exception { - return new Object[][] { // - { 2 }, // - { 100 }, // - { 500 }, // - }; - } - - @Test(dataProvider = "providerAppendToExistingFileWithRestart") - public void testAppendToExistingFileWithRestart(final long numberOfEntries) throws Exception { - final Tags tags; - final List expected = new ArrayList<>(); - try (PerformanceDb db = new PerformanceDb(dataDirectory)) { - - final int year = 2016; - final int month = 1; - final int day = 2; - - tags = Tags.createAndAddToDictionary("myKey", "one"); - final DateTimeRange timeRange = DateTimeRange.ofDay(DateUtils.getDate(year, month, day, 1, 1, 1)); - - final List entries = generateEntries(timeRange, numberOfEntries, 0, tags); - db.putEntries(entries); - expected.addAll(entries); - } - - try (PerformanceDb db = new PerformanceDb(dataDirectory)) { - final int year = 2016; - final int month = 1; - final int day = 3; - - final DateTimeRange timeRange = DateTimeRange.ofDay(DateUtils.getDate(year, month, day, 1, 1, 1)); - final List entries = generateEntries(timeRange, numberOfEntries, 0, tags); - db.putEntries(entries); - expected.addAll(entries); - - final LongList actualEntries = db.get(Query.createQuery(tags, timeRange)).singleGroup().flatMap(); - Assert.assertEquals(actualEntries.size(), expected.size() * 2); - - Assert.assertEquals(actualEntries, toExpectedValues(expected)); - } - } - - public void testInsertIntoMultipleFilesWithDifferentTags() throws Exception { - - try (PerformanceDb db = new PerformanceDb(dataDirectory)) { - final OffsetDateTime from = DateUtils.getDate(2016, 1, 1, 00, 00, 00); - final OffsetDateTime to = DateUtils.getDate(2016, 1, 1, 23, 59, 50); - - final DateTimeRange timeRange = new DateTimeRange(from, to); - final DateTimeRange dateRange = new DateTimeRange(from, to); - final long numberOfEntries = timeRange.duration().toHours(); - - final Tags tagsCommon = Tags.createAndAddToDictionary("commonKey", "commonValue"); - final Tags tagsOne = Tags.createAndAddToDictionary("myKey", "one", "commonKey", "commonValue"); - final List entriesOne = generateEntries(timeRange, numberOfEntries, 1, tagsOne); - db.putEntries(entriesOne); - printEntries(entriesOne, "one"); - - final Tags tagsTwo = Tags.createAndAddToDictionary("myKey", "two", "commonKey", "commonValue"); - final List entriesTwo = generateEntries(timeRange, numberOfEntries, 2, tagsTwo); - printEntries(entriesTwo, "two"); - db.putEntries(entriesTwo); - - final Tags tagsThree = Tags.createAndAddToDictionary("myKey", "three", "commonKey", "commonValue"); - final List entriesThree = generateEntries(timeRange, numberOfEntries, 3, tagsThree); - printEntries(entriesThree, "three"); - db.putEntries(entriesThree); - - final LongList actualEntriesOne = db.get(Query.createQuery(tagsOne, dateRange)).singleGroup().flatMap(); - Assert.assertEquals(actualEntriesOne, toExpectedValues(entriesOne)); - - final LongList actualEntriesTwo = db.get(Query.createQuery(tagsTwo, dateRange)).singleGroup().flatMap(); - Assert.assertEquals(actualEntriesTwo, toExpectedValues(entriesTwo)); - - final LongList actualEntriesThree = db.get(Query.createQuery(tagsThree, dateRange)).singleGroup().flatMap(); - Assert.assertEquals(actualEntriesThree, toExpectedValues(entriesThree)); - - final LongList actualEntriesAll = db.get(Query.createQuery(tagsCommon, dateRange)).singleGroup().flatMap(); - final List expectedAll = CollectionUtils.collate(entriesOne, - CollectionUtils.collate(entriesTwo, entriesThree, EntryByDateComparator.INSTANCE), - EntryByDateComparator.INSTANCE); - final LongList expectedValues = toExpectedValues(expectedAll); - - actualEntriesAll.sort(); - expectedValues.sort(); - - Assert.assertEquals(actualEntriesAll, expectedValues); - } - } - - public void testGroupBySingleField() throws Exception { - try (PerformanceDb db = new PerformanceDb(dataDirectory)) { - final OffsetDateTime from = DateUtils.getDate(2016, 1, 1, 00, 00, 00); - final OffsetDateTime to = DateUtils.getDate(2016, 1, 1, 23, 59, 50); - - final DateTimeRange timeRange = new DateTimeRange(from, to); - final long numberOfEntries = timeRange.duration().toHours(); - - final String key = "myKey"; - final Tags tagsOne = Tags.createAndAddToDictionary(key, "one", "commonKey", "commonValue"); - final Tags tagsTwo = Tags.createAndAddToDictionary(key, "two", "commonKey", "commonValue"); - final Tags tagsThree = Tags.createAndAddToDictionary("commonKey", "commonValue"); - final LongList entriesOne = storeEntries(db, timeRange, numberOfEntries, tagsOne, 1); - final LongList entriesTwo = storeEntries(db, timeRange, numberOfEntries, tagsTwo, 2); - final LongList entriesThree = storeEntries(db, timeRange, numberOfEntries, tagsThree, 3); - - final Result result = db.get(Query.createQuery("commonKey=commonValue", timeRange), Arrays.asList(key)); - - final List groups = result.getGroups(); - - for (final GroupResult groupResult : groups) { - final Tags groupedBy = groupResult.getGroupedBy(); - - if (groupedBy.equals(Tags.createAndAddToDictionary(key, "one"))) { - Assert.assertEquals(groupResult.flatMap(), entriesOne); - } else if (groupedBy.equals(Tags.createAndAddToDictionary(key, "two"))) { - - Assert.assertEquals(groupResult.flatMap(), entriesTwo); - } else if (groupedBy.isEmpty()) { - Assert.assertEquals(groupResult.flatMap(), entriesThree); - } else { - Assert.fail("unexpected group: " + groupResult.getGroupedBy()); - } - } - } - } - - public void testGroupByMultipleFields() throws Exception { - try (PerformanceDb db = new PerformanceDb(dataDirectory)) { - final OffsetDateTime from = DateUtils.getDate(2016, 1, 1, 00, 00, 00); - final OffsetDateTime to = DateUtils.getDate(2016, 1, 1, 23, 59, 50); - - final DateTimeRange timeRange = new DateTimeRange(from, to); - final long numberOfEntries = timeRange.duration().toHours(); - - final String key1 = "myKey1"; - final String key2 = "myKey2"; - final Tags tagsOne = Tags.createAndAddToDictionary(key1, "one", key2, "aaa", "commonKey", "commonValue"); - final Tags tagsTwoA = Tags.createAndAddToDictionary(key1, "two", key2, "bbb", "commonKey", "commonValue"); - final Tags tagsTwoB = Tags.createAndAddToDictionary(key1, "two", key2, "bbb", "commonKey", "commonValue"); - final Tags tagsThree = Tags.createAndAddToDictionary(key1, "three", "commonKey", "commonValue"); - - final LongList entriesOne = storeEntries(db, timeRange, numberOfEntries, tagsOne, 1); - final LongList entriesTwo = storeEntries(db, timeRange, numberOfEntries, tagsTwoA, 2); - entriesTwo.addAll(storeEntries(db, timeRange, numberOfEntries, tagsTwoB, 3)); - final LongList entriesThree = storeEntries(db, timeRange, numberOfEntries, tagsThree, 4); - - final Result result = db.get(Query.createQuery("commonKey=commonValue", timeRange), - Arrays.asList(key1, key2)); - - final List groups = result.getGroups(); - - for (final GroupResult groupResult : groups) { - final Tags groupedBy = groupResult.getGroupedBy(); - - if (groupedBy.equals(Tags.createAndAddToDictionary(key1, "one", key2, "aaa"))) { - Assert.assertEquals(groupResult.flatMap(), entriesOne); - } else if (groupedBy.equals(Tags.createAndAddToDictionary(key1, "two", key2, "bbb"))) { - // there is no defined order of the entries. - // eventually we might return them in ascending order, but - // that is not yet implemented - final LongList actualEntries = groupResult.flatMap(); - - entriesTwo.sort(); - actualEntries.sort(); - - Assert.assertEquals(actualEntries, entriesTwo); - } else if (groupedBy.equals(Tags.createAndAddToDictionary(key1, "three"))) { - Assert.assertEquals(groupResult.flatMap(), entriesThree); - } else { - Assert.fail("unexpected group: " + groupedBy); - } - } - } - } - - private LongList storeEntries(final PerformanceDb performanceDb, final DateTimeRange timeRange, - final long numberOfEntries, final Tags tags, final int addToDate) { - final List entries = generateEntries(timeRange, numberOfEntries, addToDate, tags); - performanceDb.putEntries(entries); - - final LongList result = new LongList(); - - for (final Entry entry : entries) { - result.add(entry.getEpochMilli()); - result.add(entry.getValue()); - } - - return result; - } - - private void printEntries(final List entriesOne, final String label) { - - int index = 0; - for (final Entry entry : entriesOne) { - System.out.printf("%4d %s %d (%s)\n", index, entry.getEpochMilli(), entry.getValue(), label); - index++; - } - } - - private LongList toExpectedValues(final List entries) { - - final LongList result = new LongList(); - for (final Entry entry : entries) { - result.add(entry.getEpochMilli()); - result.add(entry.getValue()); - } - - return result; - } + try (PerformanceDb db = new PerformanceDb(dataDirectory)) { + final OffsetDateTime nowInUtc = DateUtils.nowInUtc(); + final long date = nowInUtc.toInstant().toEpochMilli(); + final long value = 1; + final Tags tags = Tags.createAndAddToDictionary("myKey", "myValue"); + db.putEntry(new Entry(date, value, tags)); + + final Result result = db.get(Query.createQuery(tags, DateTimeRange.ofDay(nowInUtc))); + final LongList stream = result.singleGroup().flatMap(); + + Assert.assertEquals(stream.size(), 2); + + Assert.assertEquals(stream.get(0), date); + Assert.assertEquals(stream.get(1), value); + } + } + + public void testInsertIntoMultipleFilesRead() throws Exception { + + try (PerformanceDb db = new PerformanceDb(dataDirectory)) { + final DateTimeRange dateRange = new DateTimeRange(DateUtils.getDate(2016, 11, 1, 10, 0, 0), + DateUtils.getDate(2016, 11, 2, 12, 34, 56)); + final long dayOne = dateRange.getStartEpochMilli(); + final long dayTwo = dateRange.getEndEpochMilli(); + final long valueOne = 1; + final long valueTwo = 2; + final Tags tags = Tags.createAndAddToDictionary("myKey", "myValue"); + + db.putEntry(new Entry(dayOne, valueOne, tags)); + db.putEntry(new Entry(dayTwo, valueTwo, tags)); + + final LongList stream = db.get(Query.createQuery(tags, dateRange)).singleGroup().flatMap(); + + Assert.assertEquals(stream.size(), 4); + + Assert.assertEquals(stream.get(0), dayOne); + Assert.assertEquals(stream.get(1), valueOne); + Assert.assertEquals(stream.get(2), dayTwo); + Assert.assertEquals(stream.get(3), valueTwo); + } + } + + private List generateEntries(final DateTimeRange dateRange, final long n, final int addToDate, + final Tags tags) { + final List result = new ArrayList<>(); + final long differenceInMs = dateRange.duration().toMillis() / n; + long currentTime = dateRange.getStartEpochMilli(); + + for (long i = 0; i < n; i++) { + final long value = ThreadLocalRandom.current().nextInt(0, Integer.MAX_VALUE); + final long date = OffsetDateTime.ofInstant(Instant.ofEpochMilli(currentTime + addToDate), ZoneOffset.UTC) + .toInstant().toEpochMilli(); + result.add(new Entry(date, value, tags)); + + currentTime += differenceInMs; + } + return result; + } + + @DataProvider + public Object[][] providerAppendToExistingFile() throws Exception { + return new Object[][] { // + { 2 }, // + { 100 }, // + { 500 }, // + }; + } + + @Test(dataProvider = "providerAppendToExistingFile") + public void testAppendToExistingFile(final long numberOfEntries) throws Exception { + + try (PerformanceDb db = new PerformanceDb(dataDirectory)) { + + final int year = 2016; + final int month = 1; + final int day = 2; + + final DateTimeRange timeRange = DateTimeRange.ofDay(DateUtils.getDate(year, month, day, 1, 1, 1)); + + final Tags tags = Tags.createAndAddToDictionary("myKey", "one"); + final List entries = generateEntries(timeRange, numberOfEntries, 0, tags); + + printEntries(entries, ""); + + db.putEntries(entries); + + final LongList actualEntries = db.get(Query.createQuery(tags, timeRange)).singleGroup().flatMap(); + Assert.assertEquals(actualEntries.size(), entries.size() * 2); + + for (int i = 0; i < entries.size(); i++) { + final Entry entry = entries.get(i); + final long epochMilli = entry.getEpochMilli(); + final long value = entry.getValue(); + + Assert.assertEquals(actualEntries.get(i * 2), epochMilli); + Assert.assertEquals(actualEntries.get(i * 2 + 1), value); + } + + } + } + + @DataProvider + public Object[][] providerAppendToExistingFileWithRestart() throws Exception { + return new Object[][] { // + { 2 }, // + { 100 }, // + { 500 }, // + }; + } + + @Test(dataProvider = "providerAppendToExistingFileWithRestart") + public void testAppendToExistingFileWithRestart(final long numberOfEntries) throws Exception { + final Tags tags; + final List expected = new ArrayList<>(); + try (PerformanceDb db = new PerformanceDb(dataDirectory)) { + + final int year = 2016; + final int month = 1; + final int day = 2; + + tags = Tags.createAndAddToDictionary("myKey", "one"); + final DateTimeRange timeRange = DateTimeRange.ofDay(DateUtils.getDate(year, month, day, 1, 1, 1)); + + final List entries = generateEntries(timeRange, numberOfEntries, 0, tags); + db.putEntries(entries); + expected.addAll(entries); + } + + try (PerformanceDb db = new PerformanceDb(dataDirectory)) { + final int year = 2016; + final int month = 1; + final int day = 3; + + final DateTimeRange timeRange = DateTimeRange.ofDay(DateUtils.getDate(year, month, day, 1, 1, 1)); + final List entries = generateEntries(timeRange, numberOfEntries, 0, tags); + db.putEntries(entries); + expected.addAll(entries); + + final LongList actualEntries = db.get(Query.createQuery(tags, timeRange)).singleGroup().flatMap(); + Assert.assertEquals(actualEntries.size(), expected.size() * 2); + + Assert.assertEquals(actualEntries, toExpectedValues(expected)); + } + } + + public void testInsertIntoMultipleFilesWithDifferentTags() throws Exception { + + try (PerformanceDb db = new PerformanceDb(dataDirectory)) { + final OffsetDateTime from = DateUtils.getDate(2016, 1, 1, 00, 00, 00); + final OffsetDateTime to = DateUtils.getDate(2016, 1, 1, 23, 59, 50); + + final DateTimeRange timeRange = new DateTimeRange(from, to); + final DateTimeRange dateRange = new DateTimeRange(from, to); + final long numberOfEntries = timeRange.duration().toHours(); + + final Tags tagsCommon = Tags.createAndAddToDictionary("commonKey", "commonValue"); + final Tags tagsOne = Tags.createAndAddToDictionary("myKey", "one", "commonKey", "commonValue"); + final List entriesOne = generateEntries(timeRange, numberOfEntries, 1, tagsOne); + db.putEntries(entriesOne); + printEntries(entriesOne, "one"); + + final Tags tagsTwo = Tags.createAndAddToDictionary("myKey", "two", "commonKey", "commonValue"); + final List entriesTwo = generateEntries(timeRange, numberOfEntries, 2, tagsTwo); + printEntries(entriesTwo, "two"); + db.putEntries(entriesTwo); + + final Tags tagsThree = Tags.createAndAddToDictionary("myKey", "three", "commonKey", "commonValue"); + final List entriesThree = generateEntries(timeRange, numberOfEntries, 3, tagsThree); + printEntries(entriesThree, "three"); + db.putEntries(entriesThree); + + final LongList actualEntriesOne = db.get(Query.createQuery(tagsOne, dateRange)).singleGroup().flatMap(); + Assert.assertEquals(actualEntriesOne, toExpectedValues(entriesOne)); + + final LongList actualEntriesTwo = db.get(Query.createQuery(tagsTwo, dateRange)).singleGroup().flatMap(); + Assert.assertEquals(actualEntriesTwo, toExpectedValues(entriesTwo)); + + final LongList actualEntriesThree = db.get(Query.createQuery(tagsThree, dateRange)).singleGroup().flatMap(); + Assert.assertEquals(actualEntriesThree, toExpectedValues(entriesThree)); + + final LongList actualEntriesAll = db.get(Query.createQuery(tagsCommon, dateRange)).singleGroup().flatMap(); + final List expectedAll = CollectionUtils.collate(entriesOne, + CollectionUtils.collate(entriesTwo, entriesThree, EntryByDateComparator.INSTANCE), + EntryByDateComparator.INSTANCE); + final LongList expectedValues = toExpectedValues(expectedAll); + + actualEntriesAll.sort(); + expectedValues.sort(); + + Assert.assertEquals(actualEntriesAll, expectedValues); + } + } + + public void testGroupBySingleField() throws Exception { + try (PerformanceDb db = new PerformanceDb(dataDirectory)) { + final OffsetDateTime from = DateUtils.getDate(2016, 1, 1, 00, 00, 00); + final OffsetDateTime to = DateUtils.getDate(2016, 1, 1, 23, 59, 50); + + final DateTimeRange timeRange = new DateTimeRange(from, to); + final long numberOfEntries = timeRange.duration().toHours(); + + final String key = "myKey"; + final Tags tagsOne = Tags.createAndAddToDictionary(key, "one", "commonKey", "commonValue"); + final Tags tagsTwo = Tags.createAndAddToDictionary(key, "two", "commonKey", "commonValue"); + final Tags tagsThree = Tags.createAndAddToDictionary("commonKey", "commonValue"); + final LongList entriesOne = storeEntries(db, timeRange, numberOfEntries, tagsOne, 1); + final LongList entriesTwo = storeEntries(db, timeRange, numberOfEntries, tagsTwo, 2); + final LongList entriesThree = storeEntries(db, timeRange, numberOfEntries, tagsThree, 3); + + final Result result = db.get(Query.createQuery("commonKey=commonValue", timeRange), Arrays.asList(key)); + + final List groups = result.getGroups(); + + for (final GroupResult groupResult : groups) { + final Tags groupedBy = groupResult.getGroupedBy(); + + if (groupedBy.equals(Tags.createAndAddToDictionary(key, "one"))) { + Assert.assertEquals(groupResult.flatMap(), entriesOne); + } else if (groupedBy.equals(Tags.createAndAddToDictionary(key, "two"))) { + + Assert.assertEquals(groupResult.flatMap(), entriesTwo); + } else if (groupedBy.isEmpty()) { + Assert.assertEquals(groupResult.flatMap(), entriesThree); + } else { + Assert.fail("unexpected group: " + groupResult.getGroupedBy()); + } + } + } + } + + public void testGroupByMultipleFields() throws Exception { + try (PerformanceDb db = new PerformanceDb(dataDirectory)) { + final OffsetDateTime from = DateUtils.getDate(2016, 1, 1, 00, 00, 00); + final OffsetDateTime to = DateUtils.getDate(2016, 1, 1, 23, 59, 50); + + final DateTimeRange timeRange = new DateTimeRange(from, to); + final long numberOfEntries = timeRange.duration().toHours(); + + final String key1 = "myKey1"; + final String key2 = "myKey2"; + final Tags tagsOne = Tags.createAndAddToDictionary(key1, "one", key2, "aaa", "commonKey", "commonValue"); + final Tags tagsTwoA = Tags.createAndAddToDictionary(key1, "two", key2, "bbb", "commonKey", "commonValue"); + final Tags tagsTwoB = Tags.createAndAddToDictionary(key1, "two", key2, "bbb", "commonKey", "commonValue"); + final Tags tagsThree = Tags.createAndAddToDictionary(key1, "three", "commonKey", "commonValue"); + + final LongList entriesOne = storeEntries(db, timeRange, numberOfEntries, tagsOne, 1); + final LongList entriesTwo = storeEntries(db, timeRange, numberOfEntries, tagsTwoA, 2); + entriesTwo.addAll(storeEntries(db, timeRange, numberOfEntries, tagsTwoB, 3)); + final LongList entriesThree = storeEntries(db, timeRange, numberOfEntries, tagsThree, 4); + + final Result result = db.get(Query.createQuery("commonKey=commonValue", timeRange), + Arrays.asList(key1, key2)); + + final List groups = result.getGroups(); + + for (final GroupResult groupResult : groups) { + final Tags groupedBy = groupResult.getGroupedBy(); + + if (groupedBy.equals(Tags.createAndAddToDictionary(key1, "one", key2, "aaa"))) { + Assert.assertEquals(groupResult.flatMap(), entriesOne); + } else if (groupedBy.equals(Tags.createAndAddToDictionary(key1, "two", key2, "bbb"))) { + // there is no defined order of the entries. + // eventually we might return them in ascending order, but + // that is not yet implemented + final LongList actualEntries = groupResult.flatMap(); + + entriesTwo.sort(); + actualEntries.sort(); + + Assert.assertEquals(actualEntries, entriesTwo); + } else if (groupedBy.equals(Tags.createAndAddToDictionary(key1, "three"))) { + Assert.assertEquals(groupResult.flatMap(), entriesThree); + } else { + Assert.fail("unexpected group: " + groupedBy); + } + } + } + } + + private LongList storeEntries(final PerformanceDb performanceDb, final DateTimeRange timeRange, + final long numberOfEntries, final Tags tags, final int addToDate) { + final List entries = generateEntries(timeRange, numberOfEntries, addToDate, tags); + performanceDb.putEntries(entries); + + final LongList result = new LongList(); + + for (final Entry entry : entries) { + result.add(entry.getEpochMilli()); + result.add(entry.getValue()); + } + + return result; + } + + private void printEntries(final List entriesOne, final String label) { + + int index = 0; + for (final Entry entry : entriesOne) { + System.out.printf("%4d %s %d (%s)\n", index, entry.getEpochMilli(), entry.getValue(), label); + index++; + } + } + + private LongList toExpectedValues(final List entries) { + + final LongList result = new LongList(); + for (final Entry entry : entries) { + result.add(entry.getEpochMilli()); + result.add(entry.getValue()); + } + + return result; + } }