Fix to string for maps with values of type Empty
The MAX_KEY inserted into the tree had a value of one byte. This triggered an assertion for maps with values of type Empty, because they expected values to be empty. Fixed by using an empty array for the value of the MAX_KEY.
This commit is contained in:
@@ -49,6 +49,11 @@ public final class ByteArrayKey implements Comparable<ByteArrayKey> {
|
||||
return compare(key, otherKey) == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Arrays.toString(bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
|
||||
@@ -81,15 +81,15 @@ class NodeEntry {
|
||||
+ valueAsString + "]";
|
||||
}
|
||||
|
||||
public String toString(final Function<byte[], String> keyDecoder, final Function<byte[], String> valueDecoder) {
|
||||
public <K,V> String toString(final Function<byte[], K> keyDecoder, final Function<byte[], V> valueDecoder) {
|
||||
final String valueAsString = isInnerNode() ? String.valueOf(VariableByteEncoder.decodeFirstValue(value))
|
||||
: valueDecoder.apply(value);
|
||||
: String.valueOf(valueDecoder.apply(value));
|
||||
|
||||
final String keyAsString;
|
||||
if (Arrays.equals(key, PersistentMap.MAX_KEY)) {
|
||||
keyAsString = "<<<MAX_KEY>>>";
|
||||
} else {
|
||||
keyAsString = keyDecoder.apply(key);
|
||||
keyAsString = String.valueOf(keyDecoder.apply(key));
|
||||
}
|
||||
|
||||
return "NodeEntry [type=" + type + ", key=" + keyAsString + ", value=" + valueAsString + "]";
|
||||
|
||||
@@ -7,6 +7,7 @@ import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.function.Function;
|
||||
import java.util.Objects;
|
||||
import java.util.Stack;
|
||||
import java.util.UUID;
|
||||
@@ -39,6 +40,14 @@ public class PersistentMap<K, V> implements AutoCloseable {
|
||||
public byte[] encode(O object);
|
||||
|
||||
public O decode(byte[] bytes);
|
||||
|
||||
public default Function<byte[], O> asDecoder() {
|
||||
return bytes -> this.decode(bytes);
|
||||
}
|
||||
|
||||
public default Function<O,byte[]> asEncoder() {
|
||||
return plain -> this.encode(plain);
|
||||
}
|
||||
}
|
||||
|
||||
private static final class StringCoder implements EncoderDecoder<String> {
|
||||
@@ -160,7 +169,7 @@ public class PersistentMap<K, V> implements AutoCloseable {
|
||||
writeNodeOffsetOfRootNode(blockOffset);
|
||||
|
||||
// 5. insert a dummy entry with a 'maximum' key
|
||||
putValue(MAX_KEY, new byte[] { 0 });
|
||||
putValue(MAX_KEY, new byte[] { });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -357,7 +366,9 @@ public class PersistentMap<K, V> implements AutoCloseable {
|
||||
}
|
||||
|
||||
private void writeNode(final PersistentMapDiskNode node) {
|
||||
LOGGER.trace("writing node {}", node);
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("writing node {}", node.toString(keyEncoder.asDecoder(), valueEncoder.asDecoder()));
|
||||
}
|
||||
final long nodeOffest = node.getNodeOffset();
|
||||
// final DiskBlock diskBlock = diskStore.getDiskBlock(nodeOffest, BLOCK_SIZE);
|
||||
DiskBlock diskBlock = node.getDiskBlock();
|
||||
|
||||
@@ -8,6 +8,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.TreeMap;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.lucares.collections.LongList;
|
||||
@@ -201,6 +202,20 @@ public class PersistentMapDiskNode {
|
||||
+ String.join("\n", entries.values().stream().map(NodeEntry::toString).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
public <K,V> String toString(Function<byte[], K> keyDecoder, Function<byte[], V> valueDecoder) {
|
||||
StringBuilder result = new StringBuilder();
|
||||
result.append("@");
|
||||
result.append(nodeOffset);
|
||||
result.append(": ");
|
||||
for (NodeEntry e : entries.values()) {
|
||||
String s = e.toString(keyDecoder, valueDecoder);
|
||||
result.append("\n");
|
||||
result.append(s);
|
||||
}
|
||||
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
public NodeEntry getTopNodeEntry() {
|
||||
return entries.lastEntry().getValue();
|
||||
}
|
||||
|
||||
@@ -179,6 +179,66 @@ public class PersistentMapTest {
|
||||
|
||||
}
|
||||
}
|
||||
@Test(invocationCount = 1)
|
||||
public void testManyEmptyValues() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
final var insertedValues = new HashMap<Long, Empty>();
|
||||
|
||||
final SecureRandom rnd = new SecureRandom();
|
||||
rnd.setSeed(1);
|
||||
|
||||
try (final PersistentMap<Long, Empty> map = new PersistentMap<>(file, PersistentMap.LONG_CODER,
|
||||
PersistentMap.EMPTY_ENCODER)) {
|
||||
|
||||
for (int i = 0; i < 1500; i++) {
|
||||
// System.out.println("\n\ninserting: " + i);
|
||||
|
||||
final Long key = (long) (rnd.nextGaussian() * Integer.MAX_VALUE);
|
||||
final Empty value = Empty.INSTANCE;
|
||||
Assert.assertNull(map.getValue(key));
|
||||
|
||||
Assert.assertNull(map.putValue(key, value));
|
||||
|
||||
insertedValues.put(key, value);
|
||||
|
||||
// map.print();
|
||||
|
||||
final boolean failEarly = false;
|
||||
if (failEarly) {
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final Empty actualValue = map.getValue(entry.getKey());
|
||||
|
||||
if (!Objects.equals(actualValue, entry.getValue())) {
|
||||
map.print();
|
||||
}
|
||||
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " in the " + i + "th iteration");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try (final PersistentMap<Long, Empty> map = new PersistentMap<>(file, PersistentMap.LONG_CODER,
|
||||
PersistentMap.EMPTY_ENCODER)) {
|
||||
map.print();
|
||||
final AtomicInteger counter = new AtomicInteger();
|
||||
map.visitNodeEntriesPreOrder(
|
||||
(node, parentNode, nodeEntry, depth) -> counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0));
|
||||
|
||||
Assert.assertEquals(counter.get(), 4,
|
||||
"number of nodes should be small. Any number larger than 4 indicates, "
|
||||
+ "that new inner nodes are created even though the existing inner "
|
||||
+ "nodes could hold the values");
|
||||
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final Empty actualValue = map.getValue(entry.getKey());
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " after all iterations");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(invocationCount = 1)
|
||||
public void testEasyValues() throws Exception {
|
||||
|
||||
@@ -66,7 +66,7 @@ public class PdbFile {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "PdbFile [tags=" + tags + ", rootBlockNumber=" + rootBlockNumber + "]";
|
||||
return "PdbFile [tags=" + tags + ", rootBlockNumber=" + rootBlockNumber + ", partitionId="+partitionId+"]";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -22,13 +22,13 @@ class PdbWriter implements AutoCloseable, Flushable {
|
||||
private final PdbFile pdbFile;
|
||||
private long lastEpochMilli;
|
||||
|
||||
private final TimeSeriesFile bsFile;
|
||||
private final TimeSeriesFile timeSeriesFile;
|
||||
|
||||
public PdbWriter(final PdbFile pdbFile, final DiskStorage diskStorage) {
|
||||
this.pdbFile = pdbFile;
|
||||
|
||||
bsFile = TimeSeriesFile.existingFile(pdbFile.getRootBlockNumber(), diskStorage);
|
||||
final Optional<Long> optionalLastValue = bsFile.getLastValue(); // TODO is this last value correct?
|
||||
timeSeriesFile = TimeSeriesFile.existingFile(pdbFile.getRootBlockNumber(), diskStorage);
|
||||
final Optional<Long> optionalLastValue = timeSeriesFile.getLastValue(); // TODO is this last value correct?
|
||||
|
||||
lastEpochMilli = optionalLastValue.orElse(0L);
|
||||
}
|
||||
@@ -43,7 +43,7 @@ class PdbWriter implements AutoCloseable, Flushable {
|
||||
|
||||
public void write(final long epochMilli, final long value) throws WriteException, InvalidValueException {
|
||||
try {
|
||||
bsFile.appendTimeValue(epochMilli, value);
|
||||
timeSeriesFile.appendTimeValue(epochMilli, value);
|
||||
|
||||
lastEpochMilli = epochMilli;
|
||||
} catch (final RuntimeException e) {
|
||||
@@ -55,12 +55,12 @@ class PdbWriter implements AutoCloseable, Flushable {
|
||||
public void close() {
|
||||
|
||||
LOGGER.debug("close PdbWriter {}", pdbFile);
|
||||
bsFile.close();
|
||||
timeSeriesFile.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
bsFile.flush();
|
||||
timeSeriesFile.flush();
|
||||
}
|
||||
|
||||
public static void writeEntry(final PdbFile pdbFile, final DiskStorage diskStorage, final Entry... entries) {
|
||||
|
||||
Reference in New Issue
Block a user