apply new code formatter and save action
This commit is contained in:
@@ -25,110 +25,110 @@ import org.testng.annotations.Test;
|
||||
@Test
|
||||
public class BSFileTest {
|
||||
|
||||
private Path dataDirectory;
|
||||
private Path dataDirectory;
|
||||
|
||||
@BeforeMethod
|
||||
public void beforeMethod() throws IOException {
|
||||
dataDirectory = Files.createTempDirectory("pdb");
|
||||
}
|
||||
@BeforeMethod
|
||||
public void beforeMethod() throws IOException {
|
||||
dataDirectory = Files.createTempDirectory("pdb");
|
||||
}
|
||||
|
||||
@AfterMethod
|
||||
public void afterMethod() throws IOException {
|
||||
FileUtils.delete(dataDirectory);
|
||||
}
|
||||
@AfterMethod
|
||||
public void afterMethod() throws IOException {
|
||||
FileUtils.delete(dataDirectory);
|
||||
}
|
||||
|
||||
public void testBlockStorage() throws Exception {
|
||||
final Path file = dataDirectory.resolve("data.int.db");
|
||||
final int numLongs = 1000;
|
||||
long blockOffset = -1;
|
||||
public void testBlockStorage() throws Exception {
|
||||
final Path file = dataDirectory.resolve("data.int.db");
|
||||
final int numLongs = 1000;
|
||||
long blockOffset = -1;
|
||||
|
||||
long start = System.nanoTime();
|
||||
|
||||
try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) {
|
||||
long start = System.nanoTime();
|
||||
|
||||
try (final BSFile bsFile = BSFile.newFile(ds, NullCustomizer.INSTANCE)) {
|
||||
try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) {
|
||||
|
||||
blockOffset = bsFile.getRootBlockOffset();
|
||||
try (final BSFile bsFile = BSFile.newFile(ds, NullCustomizer.INSTANCE)) {
|
||||
|
||||
for (long i = 0; i < numLongs / 2; i++) {
|
||||
bsFile.append(i);
|
||||
}
|
||||
}
|
||||
try (final BSFile bsFile = BSFile.existingFile(blockOffset, ds, NullCustomizer.INSTANCE)) {
|
||||
blockOffset = bsFile.getRootBlockOffset();
|
||||
|
||||
for (long i = numLongs / 2; i < numLongs; i++) {
|
||||
bsFile.append(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
System.out.println("duration write: " + (System.nanoTime() - start) / 1_000_000.0 + "ms");
|
||||
for (long i = 0; i < numLongs / 2; i++) {
|
||||
bsFile.append(i);
|
||||
}
|
||||
}
|
||||
try (final BSFile bsFile = BSFile.existingFile(blockOffset, ds, NullCustomizer.INSTANCE)) {
|
||||
|
||||
start = System.nanoTime();
|
||||
try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) {
|
||||
final BSFile bsFile = BSFile.existingFile(blockOffset, ds, NullCustomizer.INSTANCE);
|
||||
final LongList actualLongs = bsFile.asLongList();
|
||||
final LongList expectedLongs = LongList.rangeClosed(0, numLongs - 1);
|
||||
Assert.assertEquals(actualLongs, expectedLongs);
|
||||
}
|
||||
System.out.println("duration read: " + (System.nanoTime() - start) / 1_000_000.0 + "ms");
|
||||
}
|
||||
for (long i = numLongs / 2; i < numLongs; i++) {
|
||||
bsFile.append(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
System.out.println("duration write: " + (System.nanoTime() - start) / 1_000_000.0 + "ms");
|
||||
|
||||
public void testBlockStorageMultithreading() throws Exception {
|
||||
final ExecutorService pool = Executors.newCachedThreadPool();
|
||||
start = System.nanoTime();
|
||||
try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) {
|
||||
final BSFile bsFile = BSFile.existingFile(blockOffset, ds, NullCustomizer.INSTANCE);
|
||||
final LongList actualLongs = bsFile.asLongList();
|
||||
final LongList expectedLongs = LongList.rangeClosed(0, numLongs - 1);
|
||||
Assert.assertEquals(actualLongs, expectedLongs);
|
||||
}
|
||||
System.out.println("duration read: " + (System.nanoTime() - start) / 1_000_000.0 + "ms");
|
||||
}
|
||||
|
||||
final Path file = dataDirectory.resolve("data.int.db");
|
||||
public void testBlockStorageMultithreading() throws Exception {
|
||||
final ExecutorService pool = Executors.newCachedThreadPool();
|
||||
|
||||
final int threads = 50;
|
||||
final int values = 10000;
|
||||
final Map<Long, LongList> expected = new HashMap<>();
|
||||
final List<Future<Void>> futures = new ArrayList<>();
|
||||
final long start = System.nanoTime();
|
||||
try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) {
|
||||
final Path file = dataDirectory.resolve("data.int.db");
|
||||
|
||||
for (int i = 0; i < threads; i++) {
|
||||
final Future<Void> future = pool.submit(() -> {
|
||||
final ThreadLocalRandom random = ThreadLocalRandom.current();
|
||||
final LongList listOfValues = new LongList();
|
||||
final int threads = 50;
|
||||
final int values = 10000;
|
||||
final Map<Long, LongList> expected = new HashMap<>();
|
||||
final List<Future<Void>> futures = new ArrayList<>();
|
||||
final long start = System.nanoTime();
|
||||
try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) {
|
||||
|
||||
try (BSFile bsFile = BSFile.newFile(ds, NullCustomizer.INSTANCE)) {
|
||||
for (int i = 0; i < threads; i++) {
|
||||
final Future<Void> future = pool.submit(() -> {
|
||||
final ThreadLocalRandom random = ThreadLocalRandom.current();
|
||||
final LongList listOfValues = new LongList();
|
||||
|
||||
for (int j = 0; j < values; j++) {
|
||||
try (BSFile bsFile = BSFile.newFile(ds, NullCustomizer.INSTANCE)) {
|
||||
|
||||
// will produce 1,2 and 3 byte sequences when encoded
|
||||
final long value = random.nextLong(32768);
|
||||
listOfValues.add(value);
|
||||
bsFile.append(value);
|
||||
}
|
||||
expected.put(bsFile.getRootBlockOffset(), listOfValues);
|
||||
}
|
||||
for (int j = 0; j < values; j++) {
|
||||
|
||||
return null;
|
||||
});
|
||||
futures.add(future);
|
||||
}
|
||||
// will produce 1,2 and 3 byte sequences when encoded
|
||||
final long value = random.nextLong(32768);
|
||||
listOfValues.add(value);
|
||||
bsFile.append(value);
|
||||
}
|
||||
expected.put(bsFile.getRootBlockOffset(), listOfValues);
|
||||
}
|
||||
|
||||
for (final Future<Void> future : futures) {
|
||||
future.get();
|
||||
}
|
||||
return null;
|
||||
});
|
||||
futures.add(future);
|
||||
}
|
||||
|
||||
pool.shutdown();
|
||||
pool.awaitTermination(5, TimeUnit.MINUTES);
|
||||
}
|
||||
System.out.println("duration write: " + (System.nanoTime() - start) / 1_000_000.0 + "ms");
|
||||
for (final Future<Void> future : futures) {
|
||||
future.get();
|
||||
}
|
||||
|
||||
// verification
|
||||
try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) {
|
||||
for (final Entry<Long, LongList> entry : expected.entrySet()) {
|
||||
final long rootBlockNumber = entry.getKey();
|
||||
final LongList expectedValues = entry.getValue();
|
||||
pool.shutdown();
|
||||
pool.awaitTermination(5, TimeUnit.MINUTES);
|
||||
}
|
||||
System.out.println("duration write: " + (System.nanoTime() - start) / 1_000_000.0 + "ms");
|
||||
|
||||
try (BSFile bsFile = BSFile.existingFile(rootBlockNumber, ds, NullCustomizer.INSTANCE)) {
|
||||
final LongList actualLongs = bsFile.asLongList();
|
||||
final LongList expectedLongs = expectedValues;
|
||||
Assert.assertEquals(actualLongs, expectedLongs, "for rootBlockNumber=" + rootBlockNumber);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// verification
|
||||
try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) {
|
||||
for (final Entry<Long, LongList> entry : expected.entrySet()) {
|
||||
final long rootBlockNumber = entry.getKey();
|
||||
final LongList expectedValues = entry.getValue();
|
||||
|
||||
try (BSFile bsFile = BSFile.existingFile(rootBlockNumber, ds, NullCustomizer.INSTANCE)) {
|
||||
final LongList actualLongs = bsFile.asLongList();
|
||||
final LongList expectedLongs = expectedValues;
|
||||
Assert.assertEquals(actualLongs, expectedLongs, "for rootBlockNumber=" + rootBlockNumber);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -15,70 +15,70 @@ import org.testng.annotations.BeforeMethod;
|
||||
|
||||
public class TimeSeriesFileTest {
|
||||
|
||||
private Path dataDirectory;
|
||||
private Path dataDirectory;
|
||||
|
||||
@BeforeMethod
|
||||
public void beforeMethod() throws IOException {
|
||||
dataDirectory = Files.createTempDirectory("pdb");
|
||||
}
|
||||
@BeforeMethod
|
||||
public void beforeMethod() throws IOException {
|
||||
dataDirectory = Files.createTempDirectory("pdb");
|
||||
}
|
||||
|
||||
@AfterMethod
|
||||
public void afterMethod() throws IOException {
|
||||
FileUtils.delete(dataDirectory);
|
||||
}
|
||||
@AfterMethod
|
||||
public void afterMethod() throws IOException {
|
||||
FileUtils.delete(dataDirectory);
|
||||
}
|
||||
|
||||
public void testBlockStorageTimeValue() throws Exception {
|
||||
final Path file = dataDirectory.resolve("data.int.db");
|
||||
final Random random = ThreadLocalRandom.current();
|
||||
final int numTimeValuePairs = 1000;
|
||||
long blockNumber = -1;
|
||||
final LongList expectedLongs = new LongList();
|
||||
public void testBlockStorageTimeValue() throws Exception {
|
||||
final Path file = dataDirectory.resolve("data.int.db");
|
||||
final Random random = ThreadLocalRandom.current();
|
||||
final int numTimeValuePairs = 1000;
|
||||
long blockNumber = -1;
|
||||
final LongList expectedLongs = new LongList();
|
||||
|
||||
long start = System.nanoTime();
|
||||
long lastEpochMilli = 0;
|
||||
//
|
||||
try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) {
|
||||
long start = System.nanoTime();
|
||||
long lastEpochMilli = 0;
|
||||
//
|
||||
try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) {
|
||||
|
||||
try (final TimeSeriesFile bsFile = TimeSeriesFile.newFile(ds)) {
|
||||
try (final TimeSeriesFile bsFile = TimeSeriesFile.newFile(ds)) {
|
||||
|
||||
blockNumber = bsFile.getRootBlockOffset();
|
||||
blockNumber = bsFile.getRootBlockOffset();
|
||||
|
||||
for (long i = 0; i < numTimeValuePairs / 2; i++) {
|
||||
for (long i = 0; i < numTimeValuePairs / 2; i++) {
|
||||
|
||||
final long epochMilli = lastEpochMilli + random.nextInt(1000);
|
||||
final long value = random.nextInt(10000);
|
||||
final long epochMilli = lastEpochMilli + random.nextInt(1000);
|
||||
final long value = random.nextInt(10000);
|
||||
|
||||
lastEpochMilli = epochMilli;
|
||||
lastEpochMilli = epochMilli;
|
||||
|
||||
bsFile.appendTimeValue(epochMilli, value);
|
||||
expectedLongs.add(epochMilli);
|
||||
expectedLongs.add(value);
|
||||
}
|
||||
}
|
||||
try (final TimeSeriesFile bsFile = TimeSeriesFile.existingFile(blockNumber, ds)) {
|
||||
bsFile.appendTimeValue(epochMilli, value);
|
||||
expectedLongs.add(epochMilli);
|
||||
expectedLongs.add(value);
|
||||
}
|
||||
}
|
||||
try (final TimeSeriesFile bsFile = TimeSeriesFile.existingFile(blockNumber, ds)) {
|
||||
|
||||
for (long i = numTimeValuePairs / 2; i < numTimeValuePairs; i++) {
|
||||
final long epochMilli = lastEpochMilli + random.nextInt(100);
|
||||
final long value = random.nextInt(10000);
|
||||
for (long i = numTimeValuePairs / 2; i < numTimeValuePairs; i++) {
|
||||
final long epochMilli = lastEpochMilli + random.nextInt(100);
|
||||
final long value = random.nextInt(10000);
|
||||
|
||||
lastEpochMilli = epochMilli;
|
||||
lastEpochMilli = epochMilli;
|
||||
|
||||
bsFile.appendTimeValue(epochMilli, value);
|
||||
expectedLongs.add(epochMilli);
|
||||
expectedLongs.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
System.out.println("duration write: " + (System.nanoTime() - start) / 1_000_000.0 + "ms");
|
||||
bsFile.appendTimeValue(epochMilli, value);
|
||||
expectedLongs.add(epochMilli);
|
||||
expectedLongs.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
System.out.println("duration write: " + (System.nanoTime() - start) / 1_000_000.0 + "ms");
|
||||
|
||||
start = System.nanoTime();
|
||||
try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) {
|
||||
final TimeSeriesFile bsFile = TimeSeriesFile.existingFile(blockNumber, ds);
|
||||
final LongList actualLongs = bsFile.asTimeValueLongList();
|
||||
start = System.nanoTime();
|
||||
try (final DiskStorage ds = new DiskStorage(file, dataDirectory)) {
|
||||
final TimeSeriesFile bsFile = TimeSeriesFile.existingFile(blockNumber, ds);
|
||||
final LongList actualLongs = bsFile.asTimeValueLongList();
|
||||
|
||||
Assert.assertEquals(actualLongs, expectedLongs);
|
||||
}
|
||||
System.out.println("duration read: " + (System.nanoTime() - start) / 1_000_000.0 + "ms");
|
||||
}
|
||||
Assert.assertEquals(actualLongs, expectedLongs);
|
||||
}
|
||||
System.out.println("duration read: " + (System.nanoTime() - start) / 1_000_000.0 + "ms");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -18,289 +18,289 @@ import org.testng.annotations.Test;
|
||||
|
||||
@Test
|
||||
public class DiskStorageTest {
|
||||
private static final int BLOCK_SIZE = 512;
|
||||
private static final int BLOCK_SIZE = 512;
|
||||
|
||||
private Path dataDirectory;
|
||||
private Path dataDirectory;
|
||||
|
||||
@BeforeMethod
|
||||
public void beforeMethod() throws IOException {
|
||||
dataDirectory = Files.createTempDirectory("pdb");
|
||||
}
|
||||
@BeforeMethod
|
||||
public void beforeMethod() throws IOException {
|
||||
dataDirectory = Files.createTempDirectory("pdb");
|
||||
}
|
||||
|
||||
@AfterMethod
|
||||
public void afterMethod() throws IOException {
|
||||
FileUtils.delete(dataDirectory);
|
||||
}
|
||||
@AfterMethod
|
||||
public void afterMethod() throws IOException {
|
||||
FileUtils.delete(dataDirectory);
|
||||
}
|
||||
|
||||
/**
|
||||
* File systems work with 4096 byte blocks, but we want to work with 512 bytes
|
||||
* per block. Does flushing a 512 byte block flush the full 4096 byte block?
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test(enabled = false)
|
||||
public void testFlushingASectorOrABlock() throws Exception {
|
||||
final Path databaseFile = dataDirectory.resolve("db.ds");
|
||||
Files.deleteIfExists(databaseFile);
|
||||
/**
|
||||
* File systems work with 4096 byte blocks, but we want to work with 512 bytes
|
||||
* per block. Does flushing a 512 byte block flush the full 4096 byte block?
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test(enabled = false)
|
||||
public void testFlushingASectorOrABlock() throws Exception {
|
||||
final Path databaseFile = dataDirectory.resolve("db.ds");
|
||||
Files.deleteIfExists(databaseFile);
|
||||
|
||||
try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
final int numBlocks = 10;
|
||||
try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
final int numBlocks = 10;
|
||||
|
||||
allocateBlocks(ds, numBlocks, BLOCK_SIZE);
|
||||
final List<DiskBlock> blocks = new ArrayList<>();
|
||||
allocateBlocks(ds, numBlocks, BLOCK_SIZE);
|
||||
final List<DiskBlock> blocks = new ArrayList<>();
|
||||
|
||||
// fill the first 16 512-byte blocks
|
||||
// that is more than on 4096 byte block
|
||||
for (int i = 0; i < numBlocks; i++) {
|
||||
final DiskBlock diskBlock = ds.getDiskBlock(i, BLOCK_SIZE);
|
||||
assertAllValuesAreEqual(diskBlock);
|
||||
fill(diskBlock, (byte) i);
|
||||
diskBlock.writeAsync();
|
||||
blocks.add(diskBlock);
|
||||
}
|
||||
// fill the first 16 512-byte blocks
|
||||
// that is more than on 4096 byte block
|
||||
for (int i = 0; i < numBlocks; i++) {
|
||||
final DiskBlock diskBlock = ds.getDiskBlock(i, BLOCK_SIZE);
|
||||
assertAllValuesAreEqual(diskBlock);
|
||||
fill(diskBlock, (byte) i);
|
||||
diskBlock.writeAsync();
|
||||
blocks.add(diskBlock);
|
||||
}
|
||||
|
||||
// now force (aka flush) a block in the middle of the first 4096 byte block
|
||||
blocks.get(3).writeAsync();
|
||||
blocks.get(3).force();
|
||||
// now force (aka flush) a block in the middle of the first 4096 byte block
|
||||
blocks.get(3).writeAsync();
|
||||
blocks.get(3).force();
|
||||
|
||||
System.exit(0);
|
||||
System.exit(0);
|
||||
|
||||
// read all blocks again an check what they contain
|
||||
// read all blocks again an check what they contain
|
||||
|
||||
// 1. we do this with the existing file channel
|
||||
// this one should see every change, because we wrote them to the file channel
|
||||
for (int i = 0; i < numBlocks; i++) {
|
||||
final DiskBlock diskBlock = ds.getDiskBlock(i, BLOCK_SIZE);
|
||||
assertAllValuesAreEqual(diskBlock, (byte) i);
|
||||
fill(diskBlock, (byte) i);
|
||||
blocks.add(diskBlock);
|
||||
}
|
||||
// 1. we do this with the existing file channel
|
||||
// this one should see every change, because we wrote them to the file channel
|
||||
for (int i = 0; i < numBlocks; i++) {
|
||||
final DiskBlock diskBlock = ds.getDiskBlock(i, BLOCK_SIZE);
|
||||
assertAllValuesAreEqual(diskBlock, (byte) i);
|
||||
fill(diskBlock, (byte) i);
|
||||
blocks.add(diskBlock);
|
||||
}
|
||||
|
||||
// 2. we read the file from another file channel
|
||||
// this one might not see changes made to the first file channel
|
||||
//
|
||||
// But it does see the changes. Most likely, because both channels
|
||||
// use the same buffers from the operating system.
|
||||
try (DiskStorage ds2 = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
for (int i = 0; i < numBlocks; i++) {
|
||||
final DiskBlock diskBlock = ds2.getDiskBlock(i, BLOCK_SIZE);
|
||||
assertAllValuesAreEqual(diskBlock, (byte) i);
|
||||
fill(diskBlock, (byte) i);
|
||||
blocks.add(diskBlock);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// 2. we read the file from another file channel
|
||||
// this one might not see changes made to the first file channel
|
||||
//
|
||||
// But it does see the changes. Most likely, because both channels
|
||||
// use the same buffers from the operating system.
|
||||
try (DiskStorage ds2 = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
for (int i = 0; i < numBlocks; i++) {
|
||||
final DiskBlock diskBlock = ds2.getDiskBlock(i, BLOCK_SIZE);
|
||||
assertAllValuesAreEqual(diskBlock, (byte) i);
|
||||
fill(diskBlock, (byte) i);
|
||||
blocks.add(diskBlock);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test(enabled = true)
|
||||
public void testDiskStorage() throws Exception {
|
||||
final Path databaseFile = dataDirectory.resolve("db.ds");
|
||||
@Test(enabled = true)
|
||||
public void testDiskStorage() throws Exception {
|
||||
final Path databaseFile = dataDirectory.resolve("db.ds");
|
||||
|
||||
final ExecutorService pool = Executors.newCachedThreadPool();
|
||||
final ExecutorService pool = Executors.newCachedThreadPool();
|
||||
|
||||
try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
final int numBlocks = 10;
|
||||
try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
final int numBlocks = 10;
|
||||
|
||||
final long[] blockOffsets = allocateBlocks(ds, numBlocks, BLOCK_SIZE);
|
||||
final long[] blockOffsets = allocateBlocks(ds, numBlocks, BLOCK_SIZE);
|
||||
|
||||
for (final long blockOffset : blockOffsets) {
|
||||
for (final long blockOffset : blockOffsets) {
|
||||
|
||||
final long block = blockOffset;
|
||||
pool.submit(() -> {
|
||||
final ThreadLocalRandom random = ThreadLocalRandom.current();
|
||||
try {
|
||||
// now read/write random blocks
|
||||
for (int j = 0; j < 10; j++) {
|
||||
final DiskBlock diskBlock = ds.getDiskBlock(block, BLOCK_SIZE);
|
||||
final long block = blockOffset;
|
||||
pool.submit(() -> {
|
||||
final ThreadLocalRandom random = ThreadLocalRandom.current();
|
||||
try {
|
||||
// now read/write random blocks
|
||||
for (int j = 0; j < 10; j++) {
|
||||
final DiskBlock diskBlock = ds.getDiskBlock(block, BLOCK_SIZE);
|
||||
|
||||
assertAllValuesAreEqual(diskBlock);
|
||||
fill(diskBlock, (byte) random.nextInt(127));
|
||||
assertAllValuesAreEqual(diskBlock);
|
||||
fill(diskBlock, (byte) random.nextInt(127));
|
||||
|
||||
if (random.nextBoolean()) {
|
||||
diskBlock.writeAsync();
|
||||
} else {
|
||||
diskBlock.writeAsync();
|
||||
diskBlock.force();
|
||||
}
|
||||
}
|
||||
if (random.nextBoolean()) {
|
||||
diskBlock.writeAsync();
|
||||
} else {
|
||||
diskBlock.writeAsync();
|
||||
diskBlock.force();
|
||||
}
|
||||
}
|
||||
|
||||
} catch (final Exception e) {
|
||||
e.printStackTrace();
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (final Exception e) {
|
||||
e.printStackTrace();
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pool.shutdown();
|
||||
pool.awaitTermination(1, TimeUnit.MINUTES);
|
||||
}
|
||||
}
|
||||
pool.shutdown();
|
||||
pool.awaitTermination(1, TimeUnit.MINUTES);
|
||||
}
|
||||
}
|
||||
|
||||
@Test(enabled = true, expectedExceptions = IllegalArgumentException.class)
|
||||
public void testAllocationSmallerThanMinimalBlockSize() throws Exception {
|
||||
final Path databaseFile = dataDirectory.resolve("db.ds");
|
||||
@Test(enabled = true, expectedExceptions = IllegalArgumentException.class)
|
||||
public void testAllocationSmallerThanMinimalBlockSize() throws Exception {
|
||||
final Path databaseFile = dataDirectory.resolve("db.ds");
|
||||
|
||||
try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
|
||||
final int blockSize = 31; // minimal block size is 32
|
||||
ds.allocateBlock(blockSize);
|
||||
}
|
||||
}
|
||||
final int blockSize = 31; // minimal block size is 32
|
||||
ds.allocateBlock(blockSize);
|
||||
}
|
||||
}
|
||||
|
||||
@Test(enabled = true)
|
||||
public void testAllocateAndFreeSingleBlockInFreeList() throws Exception {
|
||||
final Path databaseFile = dataDirectory.resolve("db.ds");
|
||||
@Test(enabled = true)
|
||||
public void testAllocateAndFreeSingleBlockInFreeList() throws Exception {
|
||||
final Path databaseFile = dataDirectory.resolve("db.ds");
|
||||
|
||||
try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
|
||||
final int blockSize = 32;
|
||||
final long block_8_39 = ds.allocateBlock(blockSize);
|
||||
final long block_40_71 = ds.allocateBlock(blockSize);
|
||||
final long block_72_103 = ds.allocateBlock(blockSize);
|
||||
final int blockSize = 32;
|
||||
final long block_8_39 = ds.allocateBlock(blockSize);
|
||||
final long block_40_71 = ds.allocateBlock(blockSize);
|
||||
final long block_72_103 = ds.allocateBlock(blockSize);
|
||||
|
||||
Assert.assertEquals(block_8_39, 8);
|
||||
Assert.assertEquals(block_40_71, 40);
|
||||
Assert.assertEquals(block_72_103, 72);
|
||||
Assert.assertEquals(block_8_39, 8);
|
||||
Assert.assertEquals(block_40_71, 40);
|
||||
Assert.assertEquals(block_72_103, 72);
|
||||
|
||||
ds.free(block_40_71, blockSize);
|
||||
ds.free(block_40_71, blockSize);
|
||||
|
||||
// should reuse the block we just freed
|
||||
final long actual_block_40_71 = ds.allocateBlock(blockSize);
|
||||
// should reuse the block we just freed
|
||||
final long actual_block_40_71 = ds.allocateBlock(blockSize);
|
||||
|
||||
Assert.assertEquals(actual_block_40_71, 40);
|
||||
}
|
||||
}
|
||||
Assert.assertEquals(actual_block_40_71, 40);
|
||||
}
|
||||
}
|
||||
|
||||
@Test(enabled = true)
|
||||
public void testAllocateAndFreeMultipleBlocksInFreeList() throws Exception {
|
||||
final Path databaseFile = dataDirectory.resolve("db.ds");
|
||||
@Test(enabled = true)
|
||||
public void testAllocateAndFreeMultipleBlocksInFreeList() throws Exception {
|
||||
final Path databaseFile = dataDirectory.resolve("db.ds");
|
||||
|
||||
try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
|
||||
final int blockSize = 32;
|
||||
ds.allocateBlock(blockSize);
|
||||
final long block_40_71 = ds.allocateBlock(blockSize);
|
||||
final long block_72_103 = ds.allocateBlock(blockSize);
|
||||
final long block_104_135 = ds.allocateBlock(blockSize);
|
||||
ds.allocateBlock(blockSize);
|
||||
final int blockSize = 32;
|
||||
ds.allocateBlock(blockSize);
|
||||
final long block_40_71 = ds.allocateBlock(blockSize);
|
||||
final long block_72_103 = ds.allocateBlock(blockSize);
|
||||
final long block_104_135 = ds.allocateBlock(blockSize);
|
||||
ds.allocateBlock(blockSize);
|
||||
|
||||
ds.free(block_72_103, blockSize);
|
||||
ds.free(block_104_135, blockSize);
|
||||
ds.free(block_40_71, blockSize); // the block with the smaller index is freed last, this increases line
|
||||
// coverage, because there is a branch for prepending the root node
|
||||
ds.free(block_72_103, blockSize);
|
||||
ds.free(block_104_135, blockSize);
|
||||
ds.free(block_40_71, blockSize); // the block with the smaller index is freed last, this increases line
|
||||
// coverage, because there is a branch for prepending the root node
|
||||
|
||||
// should reuse the first block we just freed
|
||||
// this removes the root node of the free list
|
||||
final long actual_block_40_71 = ds.allocateBlock(blockSize);
|
||||
Assert.assertEquals(actual_block_40_71, 40);
|
||||
// should reuse the first block we just freed
|
||||
// this removes the root node of the free list
|
||||
final long actual_block_40_71 = ds.allocateBlock(blockSize);
|
||||
Assert.assertEquals(actual_block_40_71, 40);
|
||||
|
||||
// should reuse the second block we just freed
|
||||
final long actual_block_72_103 = ds.allocateBlock(blockSize);
|
||||
Assert.assertEquals(actual_block_72_103, 72);
|
||||
// should reuse the second block we just freed
|
||||
final long actual_block_72_103 = ds.allocateBlock(blockSize);
|
||||
Assert.assertEquals(actual_block_72_103, 72);
|
||||
|
||||
// should reuse the third block we just freed
|
||||
// this removes the last node of the free list
|
||||
final long actual_block_104_135 = ds.allocateBlock(blockSize);
|
||||
Assert.assertEquals(actual_block_104_135, 104);
|
||||
// should reuse the third block we just freed
|
||||
// this removes the last node of the free list
|
||||
final long actual_block_104_135 = ds.allocateBlock(blockSize);
|
||||
Assert.assertEquals(actual_block_104_135, 104);
|
||||
|
||||
final long block_168_199 = ds.allocateBlock(blockSize);
|
||||
Assert.assertEquals(block_168_199, 168);
|
||||
}
|
||||
}
|
||||
final long block_168_199 = ds.allocateBlock(blockSize);
|
||||
Assert.assertEquals(block_168_199, 168);
|
||||
}
|
||||
}
|
||||
|
||||
@Test(enabled = true)
|
||||
public void testAllocateAndFreeInsertFreeNodeInTheMiddleOfTheFreeList() throws Exception {
|
||||
final Path databaseFile = dataDirectory.resolve("db.ds");
|
||||
@Test(enabled = true)
|
||||
public void testAllocateAndFreeInsertFreeNodeInTheMiddleOfTheFreeList() throws Exception {
|
||||
final Path databaseFile = dataDirectory.resolve("db.ds");
|
||||
|
||||
try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
|
||||
final int blockSize = 32;
|
||||
ds.allocateBlock(blockSize);
|
||||
ds.allocateBlock(blockSize);
|
||||
final long block_72_103 = ds.allocateBlock(blockSize);
|
||||
final long block_104_135 = ds.allocateBlock(blockSize);
|
||||
final long block_136_167 = ds.allocateBlock(blockSize);
|
||||
final int blockSize = 32;
|
||||
ds.allocateBlock(blockSize);
|
||||
ds.allocateBlock(blockSize);
|
||||
final long block_72_103 = ds.allocateBlock(blockSize);
|
||||
final long block_104_135 = ds.allocateBlock(blockSize);
|
||||
final long block_136_167 = ds.allocateBlock(blockSize);
|
||||
|
||||
// free the last block first, to increase code coverage
|
||||
ds.free(block_136_167, blockSize);
|
||||
ds.free(block_72_103, blockSize);
|
||||
ds.free(block_104_135, blockSize);
|
||||
// free the last block first, to increase code coverage
|
||||
ds.free(block_136_167, blockSize);
|
||||
ds.free(block_72_103, blockSize);
|
||||
ds.free(block_104_135, blockSize);
|
||||
|
||||
// the first free block is re-used
|
||||
final long actual_block_72_103 = ds.allocateBlock(blockSize);
|
||||
Assert.assertEquals(actual_block_72_103, block_72_103);
|
||||
// the first free block is re-used
|
||||
final long actual_block_72_103 = ds.allocateBlock(blockSize);
|
||||
Assert.assertEquals(actual_block_72_103, block_72_103);
|
||||
|
||||
final long actual_block_104_135 = ds.allocateBlock(blockSize);
|
||||
Assert.assertEquals(actual_block_104_135, block_104_135);
|
||||
final long actual_block_104_135 = ds.allocateBlock(blockSize);
|
||||
Assert.assertEquals(actual_block_104_135, block_104_135);
|
||||
|
||||
final long actual_block_136_167 = ds.allocateBlock(blockSize);
|
||||
Assert.assertEquals(actual_block_136_167, block_136_167);
|
||||
}
|
||||
}
|
||||
final long actual_block_136_167 = ds.allocateBlock(blockSize);
|
||||
Assert.assertEquals(actual_block_136_167, block_136_167);
|
||||
}
|
||||
}
|
||||
|
||||
@Test(enabled = true)
|
||||
public void testAllocateAndFreeMultipleBlocksWithDifferentSizes() throws Exception {
|
||||
final Path databaseFile = dataDirectory.resolve("db.ds");
|
||||
@Test(enabled = true)
|
||||
public void testAllocateAndFreeMultipleBlocksWithDifferentSizes() throws Exception {
|
||||
final Path databaseFile = dataDirectory.resolve("db.ds");
|
||||
|
||||
try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
try (DiskStorage ds = new DiskStorage(databaseFile, dataDirectory)) {
|
||||
|
||||
final int blockSizeSmall = 32;
|
||||
final int blockSizeBig = 64;
|
||||
ds.allocateBlock(blockSizeSmall);
|
||||
ds.allocateBlock(blockSizeSmall);
|
||||
final long big_block_72_103 = ds.allocateBlock(blockSizeBig);
|
||||
final long small_block_136_167 = ds.allocateBlock(blockSizeSmall);
|
||||
ds.allocateBlock(blockSizeSmall);
|
||||
final int blockSizeSmall = 32;
|
||||
final int blockSizeBig = 64;
|
||||
ds.allocateBlock(blockSizeSmall);
|
||||
ds.allocateBlock(blockSizeSmall);
|
||||
final long big_block_72_103 = ds.allocateBlock(blockSizeBig);
|
||||
final long small_block_136_167 = ds.allocateBlock(blockSizeSmall);
|
||||
ds.allocateBlock(blockSizeSmall);
|
||||
|
||||
ds.free(big_block_72_103, blockSizeBig);
|
||||
ds.free(small_block_136_167, blockSizeSmall);
|
||||
ds.free(big_block_72_103, blockSizeBig);
|
||||
ds.free(small_block_136_167, blockSizeSmall);
|
||||
|
||||
final long actual_small_block_136_167 = ds.allocateBlock(blockSizeSmall);
|
||||
Assert.assertEquals(actual_small_block_136_167, small_block_136_167);
|
||||
}
|
||||
}
|
||||
final long actual_small_block_136_167 = ds.allocateBlock(blockSizeSmall);
|
||||
Assert.assertEquals(actual_small_block_136_167, small_block_136_167);
|
||||
}
|
||||
}
|
||||
|
||||
private void assertAllValuesAreEqual(final DiskBlock diskBlock, final byte expectedVal) {
|
||||
final byte[] buffer = diskBlock.getBuffer();
|
||||
for (int i = 0; i < buffer.length; i++) {
|
||||
if (expectedVal != buffer[i]) {
|
||||
System.err.println(
|
||||
"block " + diskBlock.getBlockOffset() + " " + buffer[i] + " != " + expectedVal + " at " + i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
private void assertAllValuesAreEqual(final DiskBlock diskBlock, final byte expectedVal) {
|
||||
final byte[] buffer = diskBlock.getBuffer();
|
||||
for (int i = 0; i < buffer.length; i++) {
|
||||
if (expectedVal != buffer[i]) {
|
||||
System.err.println(
|
||||
"block " + diskBlock.getBlockOffset() + " " + buffer[i] + " != " + expectedVal + " at " + i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void assertAllValuesAreEqual(final DiskBlock diskBlock) {
|
||||
private void assertAllValuesAreEqual(final DiskBlock diskBlock) {
|
||||
|
||||
final byte[] buffer = diskBlock.getBuffer();
|
||||
final byte expected = buffer[0];
|
||||
for (int i = 0; i < buffer.length; i++) {
|
||||
if (expected != buffer[i]) {
|
||||
System.err.println(
|
||||
"block " + diskBlock.getBlockOffset() + " " + buffer[i] + " != " + expected + " at " + i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
final byte[] buffer = diskBlock.getBuffer();
|
||||
final byte expected = buffer[0];
|
||||
for (int i = 0; i < buffer.length; i++) {
|
||||
if (expected != buffer[i]) {
|
||||
System.err.println(
|
||||
"block " + diskBlock.getBlockOffset() + " " + buffer[i] + " != " + expected + " at " + i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private void fill(final DiskBlock diskBlock, final byte val) {
|
||||
final byte[] buffer = diskBlock.getBuffer();
|
||||
private void fill(final DiskBlock diskBlock, final byte val) {
|
||||
final byte[] buffer = diskBlock.getBuffer();
|
||||
|
||||
for (int i = 0; i < buffer.length; i++) {
|
||||
buffer[i] = val;
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < buffer.length; i++) {
|
||||
buffer[i] = val;
|
||||
}
|
||||
}
|
||||
|
||||
private long[] allocateBlocks(final DiskStorage ds, final int numNewBlocks, final int blockSize)
|
||||
throws IOException {
|
||||
private long[] allocateBlocks(final DiskStorage ds, final int numNewBlocks, final int blockSize)
|
||||
throws IOException {
|
||||
|
||||
final long[] result = new long[numNewBlocks];
|
||||
for (int i = 0; i < numNewBlocks; i++) {
|
||||
final long blockOffset = ds.allocateBlock(blockSize);
|
||||
result[i] = blockOffset;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
final long[] result = new long[numNewBlocks];
|
||||
for (int i = 0; i < numNewBlocks; i++) {
|
||||
final long blockOffset = ds.allocateBlock(blockSize);
|
||||
result[i] = blockOffset;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,79 +15,79 @@ import java.util.concurrent.ThreadLocalRandom;
|
||||
|
||||
public class CsvTestDataCreator {
|
||||
|
||||
private static final List<String> PODS = Arrays.asList("vapbrewe01", "vapfinra01", "vapondem01", "vapondem02",
|
||||
"vapondem03", "vapondem04", "vapnyse01", "vapnorto01", "vapfackb01", "vaprjrey01", "vadtrans01",
|
||||
"vadaxcel09", "vadaxcel66");
|
||||
private static final List<String> HOSTS = new ArrayList<>();
|
||||
private static final List<String> CLASSES = Arrays.asList("AuditLog", "Brava", "Collection", "Folder", "Field",
|
||||
"Tagging", "Arrangment", "Review", "Production", "ProductionExport", "View", "Jobs", "Navigation",
|
||||
"RecentNavigation", "Entity", "Search", "Tasks", "PcWorkflow", "Batch", "Matter");
|
||||
private static final List<String> ENDPOINTS = Arrays.asList("create", "remove", "update", "delete", "createBulk",
|
||||
"removeBulk", "deleteBulk", "list", "index", "listing", "all");
|
||||
private static final List<String> METHODS = new ArrayList<>();
|
||||
private static final List<String> PROJECTS = new ArrayList<>();
|
||||
private static final List<String> SOURCE = Arrays.asList("web", "service", "metrics");
|
||||
private static final List<String> BUILDS = new ArrayList<>();
|
||||
private static final List<String> PODS = Arrays.asList("vapbrewe01", "vapfinra01", "vapondem01", "vapondem02",
|
||||
"vapondem03", "vapondem04", "vapnyse01", "vapnorto01", "vapfackb01", "vaprjrey01", "vadtrans01",
|
||||
"vadaxcel09", "vadaxcel66");
|
||||
private static final List<String> HOSTS = new ArrayList<>();
|
||||
private static final List<String> CLASSES = Arrays.asList("AuditLog", "Brava", "Collection", "Folder", "Field",
|
||||
"Tagging", "Arrangment", "Review", "Production", "ProductionExport", "View", "Jobs", "Navigation",
|
||||
"RecentNavigation", "Entity", "Search", "Tasks", "PcWorkflow", "Batch", "Matter");
|
||||
private static final List<String> ENDPOINTS = Arrays.asList("create", "remove", "update", "delete", "createBulk",
|
||||
"removeBulk", "deleteBulk", "list", "index", "listing", "all");
|
||||
private static final List<String> METHODS = new ArrayList<>();
|
||||
private static final List<String> PROJECTS = new ArrayList<>();
|
||||
private static final List<String> SOURCE = Arrays.asList("web", "service", "metrics");
|
||||
private static final List<String> BUILDS = new ArrayList<>();
|
||||
|
||||
static {
|
||||
for (int i = 0; i < 500; i++) {
|
||||
BUILDS.add("AXC_5.15_" + i);
|
||||
}
|
||||
static {
|
||||
for (int i = 0; i < 500; i++) {
|
||||
BUILDS.add("AXC_5.15_" + i);
|
||||
}
|
||||
|
||||
for (int i = 0; i < 500; i++) {
|
||||
HOSTS.add(UUID.randomUUID().toString().substring(1, 16));
|
||||
PROJECTS.add(UUID.randomUUID().toString().substring(1, 16) + "_Review");
|
||||
}
|
||||
for (int i = 0; i < 500; i++) {
|
||||
HOSTS.add(UUID.randomUUID().toString().substring(1, 16));
|
||||
PROJECTS.add(UUID.randomUUID().toString().substring(1, 16) + "_Review");
|
||||
}
|
||||
|
||||
for (final String clazz : CLASSES) {
|
||||
for (final String endpoint : ENDPOINTS) {
|
||||
METHODS.add(clazz + "Service." + endpoint);
|
||||
METHODS.add(clazz + "Controller." + endpoint);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (final String clazz : CLASSES) {
|
||||
for (final String endpoint : ENDPOINTS) {
|
||||
METHODS.add(clazz + "Service." + endpoint);
|
||||
METHODS.add(clazz + "Controller." + endpoint);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(final String[] args) throws IOException {
|
||||
final Path testdataFile = Files.createTempFile("testData", ".csv");
|
||||
public static void main(final String[] args) throws IOException {
|
||||
final Path testdataFile = Files.createTempFile("testData", ".csv");
|
||||
|
||||
final ThreadLocalRandom r = ThreadLocalRandom.current();
|
||||
int lines = 0;
|
||||
final ThreadLocalRandom r = ThreadLocalRandom.current();
|
||||
int lines = 0;
|
||||
|
||||
try (FileWriter writer = new FileWriter(testdataFile.toFile())) {
|
||||
writer.append("@timestamp,duration,pod,host,method,project,source,build\n");
|
||||
try (FileWriter writer = new FileWriter(testdataFile.toFile())) {
|
||||
writer.append("@timestamp,duration,pod,host,method,project,source,build\n");
|
||||
|
||||
for (lines = 0; lines < 1_000_000; lines++) {
|
||||
final String timestamp = Instant.ofEpochMilli(r.nextLong(1234567890L, 12345678901L))
|
||||
.atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
|
||||
final String duration = String.valueOf(r.nextInt(10000));
|
||||
final String pod = PODS.get(r.nextInt(PODS.size()));
|
||||
final String host = HOSTS.get(r.nextInt(HOSTS.size()));
|
||||
final String method = METHODS.get(r.nextInt(METHODS.size()));
|
||||
final String project = PROJECTS.get(r.nextInt(PROJECTS.size()));
|
||||
final String source = SOURCE.get(r.nextInt(SOURCE.size()));
|
||||
final String build = BUILDS.get(r.nextInt(BUILDS.size()));
|
||||
for (lines = 0; lines < 1_000_000; lines++) {
|
||||
final String timestamp = Instant.ofEpochMilli(r.nextLong(1234567890L, 12345678901L))
|
||||
.atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
|
||||
final String duration = String.valueOf(r.nextInt(10000));
|
||||
final String pod = PODS.get(r.nextInt(PODS.size()));
|
||||
final String host = HOSTS.get(r.nextInt(HOSTS.size()));
|
||||
final String method = METHODS.get(r.nextInt(METHODS.size()));
|
||||
final String project = PROJECTS.get(r.nextInt(PROJECTS.size()));
|
||||
final String source = SOURCE.get(r.nextInt(SOURCE.size()));
|
||||
final String build = BUILDS.get(r.nextInt(BUILDS.size()));
|
||||
|
||||
writer.append(timestamp);
|
||||
writer.append(",");
|
||||
writer.append(duration);
|
||||
writer.append(",");
|
||||
writer.append(pod);
|
||||
writer.append(",");
|
||||
writer.append(host);
|
||||
writer.append(",");
|
||||
writer.append(method);
|
||||
writer.append(",");
|
||||
writer.append(project);
|
||||
writer.append(",");
|
||||
writer.append(source);
|
||||
writer.append(",");
|
||||
writer.append(build);
|
||||
writer.append("\n");
|
||||
writer.append(timestamp);
|
||||
writer.append(",");
|
||||
writer.append(duration);
|
||||
writer.append(",");
|
||||
writer.append(pod);
|
||||
writer.append(",");
|
||||
writer.append(host);
|
||||
writer.append(",");
|
||||
writer.append(method);
|
||||
writer.append(",");
|
||||
writer.append(project);
|
||||
writer.append(",");
|
||||
writer.append(source);
|
||||
writer.append(",");
|
||||
writer.append(build);
|
||||
writer.append("\n");
|
||||
|
||||
if (lines % 1000 == 0) {
|
||||
System.out.println("lines: " + lines);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (lines % 1000 == 0) {
|
||||
System.out.println("lines: " + lines);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,27 +11,27 @@ import org.testng.annotations.Test;
|
||||
|
||||
@Test
|
||||
public class NodeEntryTest {
|
||||
@DataProvider
|
||||
public Object[][] providerPrefixCompare() {
|
||||
final List<Object[]> result = new ArrayList<>();
|
||||
@DataProvider
|
||||
public Object[][] providerPrefixCompare() {
|
||||
final List<Object[]> result = new ArrayList<>();
|
||||
|
||||
result.add(new Object[] { "ab", "abc", -1 });
|
||||
result.add(new Object[] { "abb", "abc", -1 });
|
||||
result.add(new Object[] { "abc", "abc", 0 });
|
||||
result.add(new Object[] { "abcd", "abc", 0 });
|
||||
result.add(new Object[] { "abd", "abc", 1 });
|
||||
result.add(new Object[] { "abz", "abc", 23 });
|
||||
result.add(new Object[] { "ab", "abc", -1 });
|
||||
result.add(new Object[] { "abb", "abc", -1 });
|
||||
result.add(new Object[] { "abc", "abc", 0 });
|
||||
result.add(new Object[] { "abcd", "abc", 0 });
|
||||
result.add(new Object[] { "abd", "abc", 1 });
|
||||
result.add(new Object[] { "abz", "abc", 23 });
|
||||
|
||||
return result.toArray(Object[][]::new);
|
||||
}
|
||||
return result.toArray(Object[][]::new);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "providerPrefixCompare")
|
||||
public void testPrefixCompare(final String key, final String prefix, final int expected) {
|
||||
@Test(dataProvider = "providerPrefixCompare")
|
||||
public void testPrefixCompare(final String key, final String prefix, final int expected) {
|
||||
|
||||
final NodeEntry nodeEntry = new NodeEntry(ValueType.NODE_POINTER, key.getBytes(StandardCharsets.UTF_8),
|
||||
new byte[0]);
|
||||
final NodeEntry nodeEntry = new NodeEntry(ValueType.NODE_POINTER, key.getBytes(StandardCharsets.UTF_8),
|
||||
new byte[0]);
|
||||
|
||||
final int actual = nodeEntry.compareKeyPrefix(prefix.getBytes(StandardCharsets.UTF_8));
|
||||
Assert.assertEquals(actual, expected, key + " ? " + prefix);
|
||||
}
|
||||
final int actual = nodeEntry.compareKeyPrefix(prefix.getBytes(StandardCharsets.UTF_8));
|
||||
Assert.assertEquals(actual, expected, key + " ? " + prefix);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,29 +14,29 @@ import org.testng.annotations.Test;
|
||||
@Test
|
||||
public class PersistentMapDiskNodeTest {
|
||||
|
||||
public void serializeDeserialize() throws Exception {
|
||||
public void serializeDeserialize() throws Exception {
|
||||
|
||||
final List<NodeEntry> entries = new ArrayList<>();
|
||||
entries.add(newNode(ValueType.NODE_POINTER, "key1", "value1"));
|
||||
entries.add(newNode(ValueType.VALUE_INLINE, "key2_", "value2--"));
|
||||
entries.add(newNode(ValueType.NODE_POINTER, "key3__", "value3---"));
|
||||
entries.add(newNode(ValueType.VALUE_INLINE, "key4___", "value4----"));
|
||||
final List<NodeEntry> entries = new ArrayList<>();
|
||||
entries.add(newNode(ValueType.NODE_POINTER, "key1", "value1"));
|
||||
entries.add(newNode(ValueType.VALUE_INLINE, "key2_", "value2--"));
|
||||
entries.add(newNode(ValueType.NODE_POINTER, "key3__", "value3---"));
|
||||
entries.add(newNode(ValueType.VALUE_INLINE, "key4___", "value4----"));
|
||||
|
||||
final long nodeOffset = ThreadLocalRandom.current().nextInt();
|
||||
final PersistentMapDiskNode node = new PersistentMapDiskNode(nodeOffset, entries, null);
|
||||
final long nodeOffset = ThreadLocalRandom.current().nextInt();
|
||||
final PersistentMapDiskNode node = new PersistentMapDiskNode(nodeOffset, entries, null);
|
||||
|
||||
final byte[] buffer = node.serialize();
|
||||
final byte[] buffer = node.serialize();
|
||||
|
||||
final ByteBuffer byteBuffer = ByteBuffer.wrap(buffer);
|
||||
final PersistentMapDiskNode actualNode = PersistentMapDiskNode.parse(nodeOffset,
|
||||
new DiskBlock(nodeOffset, byteBuffer));
|
||||
final ByteBuffer byteBuffer = ByteBuffer.wrap(buffer);
|
||||
final PersistentMapDiskNode actualNode = PersistentMapDiskNode.parse(nodeOffset,
|
||||
new DiskBlock(nodeOffset, byteBuffer));
|
||||
|
||||
Assert.assertEquals(actualNode.getEntries(), entries);
|
||||
}
|
||||
Assert.assertEquals(actualNode.getEntries(), entries);
|
||||
}
|
||||
|
||||
private static NodeEntry newNode(final ValueType type, final String key, final String value) {
|
||||
return new NodeEntry(ValueType.VALUE_INLINE, key.getBytes(StandardCharsets.UTF_8),
|
||||
value.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
private static NodeEntry newNode(final ValueType type, final String key, final String value) {
|
||||
return new NodeEntry(ValueType.VALUE_INLINE, key.getBytes(StandardCharsets.UTF_8),
|
||||
value.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -24,368 +24,369 @@ import org.testng.annotations.Test;
|
||||
@Test
|
||||
public class PersistentMapTest {
|
||||
|
||||
private Path dataDirectory;
|
||||
|
||||
@BeforeMethod
|
||||
public void beforeMethod() throws IOException {
|
||||
dataDirectory = Files.createTempDirectory("pdb");
|
||||
}
|
||||
private Path dataDirectory;
|
||||
|
||||
@BeforeMethod
|
||||
public void beforeMethod() throws IOException {
|
||||
dataDirectory = Files.createTempDirectory("pdb");
|
||||
}
|
||||
|
||||
@AfterMethod
|
||||
public void afterMethod() throws IOException {
|
||||
FileUtils.delete(dataDirectory);
|
||||
}
|
||||
|
||||
public void testSingleValue() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
final String value = "value1";
|
||||
final String key = "key1";
|
||||
@AfterMethod
|
||||
public void afterMethod() throws IOException {
|
||||
FileUtils.delete(dataDirectory);
|
||||
}
|
||||
|
||||
public void testSingleValue() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
final String value = "value1";
|
||||
final String key = "key1";
|
||||
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file, dataDirectory, PersistentMap.STRING_CODER,
|
||||
PersistentMap.STRING_CODER)) {
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file, dataDirectory,
|
||||
PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) {
|
||||
|
||||
Assert.assertNull(map.getValue(key));
|
||||
Assert.assertNull(map.getValue(key));
|
||||
|
||||
Assert.assertNull(map.putValue(key, value));
|
||||
Assert.assertNull(map.putValue(key, value));
|
||||
|
||||
Assert.assertEquals(map.getValue(key), value);
|
||||
}
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file, dataDirectory,PersistentMap.STRING_CODER,
|
||||
PersistentMap.STRING_CODER)) {
|
||||
Assert.assertEquals(map.getValue(key), value);
|
||||
}
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file, dataDirectory,
|
||||
PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) {
|
||||
|
||||
Assert.assertEquals(map.getValue(key), value);
|
||||
}
|
||||
}
|
||||
Assert.assertEquals(map.getValue(key), value);
|
||||
}
|
||||
}
|
||||
|
||||
@Test(invocationCount = 1)
|
||||
public void testManyValues() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
final var insertedValues = new HashMap<String, String>();
|
||||
@Test(invocationCount = 1)
|
||||
public void testManyValues() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
final var insertedValues = new HashMap<String, String>();
|
||||
|
||||
final Random rnd = new Random(1);
|
||||
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file,dataDirectory, PersistentMap.STRING_CODER,
|
||||
PersistentMap.STRING_CODER)) {
|
||||
map.setMaxEntriesInNode(2);
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
// System.out.println("\n\ninserting: " + i);
|
||||
final UUID nextUUID = new UUID(rnd.nextLong(), rnd.nextLong());
|
||||
final String key = nextUUID.toString() + "__" + i;
|
||||
final String value = "long value to waste some bytes " + i + "__"
|
||||
+ UUID.randomUUID().toString().repeat(1);
|
||||
Assert.assertNull(map.getValue(key));
|
||||
|
||||
Assert.assertNull(map.putValue(key, value));
|
||||
|
||||
insertedValues.put(key, value);
|
||||
|
||||
// map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER);
|
||||
|
||||
final boolean failEarly = false;
|
||||
if (failEarly) {
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final String actualValue = map.getValue(entry.getKey());
|
||||
|
||||
if (!Objects.equals(actualValue, entry.getValue())) {
|
||||
map.print();
|
||||
}
|
||||
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " in the " + i + "th iteration");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file,dataDirectory, PersistentMap.STRING_CODER,
|
||||
PersistentMap.STRING_CODER)) {
|
||||
// map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER);
|
||||
final AtomicInteger maxDepth = new AtomicInteger();
|
||||
map.visitNodeEntriesPreOrder(
|
||||
(node, parentNode, nodeEntry, depth) -> maxDepth.set(Math.max(depth, maxDepth.get())));
|
||||
|
||||
Assert.assertTrue(maxDepth.get() >= 4,
|
||||
"The tree's depth. This test must have at least depth 4, "
|
||||
+ "so that we can be sure that splitting parent nodes works recursively, but was "
|
||||
+ maxDepth.get());
|
||||
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final String actualValue = map.getValue(entry.getKey());
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " after all iterations");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(invocationCount = 1)
|
||||
public void testManySmallValues() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
final var insertedValues = new HashMap<Long, Long>();
|
||||
|
||||
final SecureRandom rnd = new SecureRandom();
|
||||
rnd.setSeed(1);
|
||||
|
||||
try (final PersistentMap<Long, Long> map = new PersistentMap<>(file,dataDirectory, PersistentMap.LONG_CODER,
|
||||
PersistentMap.LONG_CODER)) {
|
||||
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
// System.out.println("\n\ninserting: " + i);
|
||||
|
||||
final Long key = (long) (rnd.nextGaussian() * Integer.MAX_VALUE);
|
||||
final Long value = (long) (rnd.nextGaussian() * Integer.MAX_VALUE);
|
||||
Assert.assertNull(map.getValue(key));
|
||||
|
||||
Assert.assertNull(map.putValue(key, value));
|
||||
|
||||
insertedValues.put(key, value);
|
||||
|
||||
// map.print();
|
||||
|
||||
final boolean failEarly = false;
|
||||
if (failEarly) {
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final Long actualValue = map.getValue(entry.getKey());
|
||||
|
||||
if (!Objects.equals(actualValue, entry.getValue())) {
|
||||
map.print();
|
||||
}
|
||||
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " in the " + i + "th iteration");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try (final PersistentMap<Long, Long> map = new PersistentMap<>(file,dataDirectory, PersistentMap.LONG_CODER,
|
||||
PersistentMap.LONG_CODER)) {
|
||||
// map.print(PersistentMap.LONG_DECODER, PersistentMap.LONG_DECODER);
|
||||
final AtomicInteger counter = new AtomicInteger();
|
||||
map.visitNodeEntriesPreOrder(
|
||||
(node, parentNode, nodeEntry, depth) -> counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0));
|
||||
|
||||
Assert.assertEquals(counter.get(), 4,
|
||||
"number of nodes should be small. Any number larger than 4 indicates, "
|
||||
+ "that new inner nodes are created even though the existing inner "
|
||||
+ "nodes could hold the values");
|
||||
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final Long actualValue = map.getValue(entry.getKey());
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " after all iterations");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@Test(invocationCount = 1)
|
||||
public void testManyEmptyValues() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
final var insertedValues = new HashMap<Long, Empty>();
|
||||
|
||||
final SecureRandom rnd = new SecureRandom();
|
||||
rnd.setSeed(1);
|
||||
|
||||
try (final PersistentMap<Long, Empty> map = new PersistentMap<>(file,dataDirectory, PersistentMap.LONG_CODER,
|
||||
PersistentMap.EMPTY_ENCODER)) {
|
||||
|
||||
for (int i = 0; i < 1500; i++) {
|
||||
// System.out.println("\n\ninserting: " + i);
|
||||
|
||||
final Long key = (long) (rnd.nextGaussian() * Integer.MAX_VALUE);
|
||||
final Empty value = Empty.INSTANCE;
|
||||
Assert.assertNull(map.getValue(key));
|
||||
|
||||
Assert.assertNull(map.putValue(key, value));
|
||||
|
||||
insertedValues.put(key, value);
|
||||
|
||||
// map.print();
|
||||
|
||||
final boolean failEarly = false;
|
||||
if (failEarly) {
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final Empty actualValue = map.getValue(entry.getKey());
|
||||
|
||||
if (!Objects.equals(actualValue, entry.getValue())) {
|
||||
map.print();
|
||||
}
|
||||
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " in the " + i + "th iteration");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try (final PersistentMap<Long, Empty> map = new PersistentMap<>(file,dataDirectory, PersistentMap.LONG_CODER,
|
||||
PersistentMap.EMPTY_ENCODER)) {
|
||||
map.print();
|
||||
final AtomicInteger counter = new AtomicInteger();
|
||||
map.visitNodeEntriesPreOrder(
|
||||
(node, parentNode, nodeEntry, depth) -> counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0));
|
||||
|
||||
Assert.assertEquals(counter.get(), 4,
|
||||
"number of nodes should be small. Any number larger than 4 indicates, "
|
||||
+ "that new inner nodes are created even though the existing inner "
|
||||
+ "nodes could hold the values");
|
||||
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final Empty actualValue = map.getValue(entry.getKey());
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " after all iterations");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(invocationCount = 1)
|
||||
public void testEasyValues() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
final var insertedValues = new HashMap<String, String>();
|
||||
|
||||
final Queue<Integer> numbers = new LinkedList<>(Arrays.asList(1, 15, 11, 4, 16, 3, 13));
|
||||
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file,dataDirectory, PersistentMap.STRING_CODER,
|
||||
PersistentMap.STRING_CODER)) {
|
||||
|
||||
final int numbersSize = numbers.size();
|
||||
for (int i = 0; i < numbersSize; i++) {
|
||||
|
||||
final Integer keyNumber = numbers.poll();
|
||||
// System.out.println("\n\ninserting: " + keyNumber);
|
||||
|
||||
final String key = "" + keyNumber;
|
||||
final String value = "value";
|
||||
Assert.assertNull(map.getValue(key));
|
||||
|
||||
Assert.assertNull(map.putValue(key, value));
|
||||
|
||||
insertedValues.put(key, value);
|
||||
|
||||
// map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER);
|
||||
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final String actualValue = map.getValue(entry.getKey());
|
||||
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " in the " + i + "th iteration");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file,dataDirectory, PersistentMap.STRING_CODER,
|
||||
PersistentMap.STRING_CODER)) {
|
||||
// map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER);
|
||||
|
||||
final AtomicInteger counter = new AtomicInteger();
|
||||
map.visitNodeEntriesPreOrder(
|
||||
(node, parentNode, nodeEntry, depth) -> counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0));
|
||||
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final String actualValue = map.getValue(entry.getKey());
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " after all iterations");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFindAllByPrefix() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
|
||||
final Map<String, String> expectedBar = new HashMap<>();
|
||||
for (int i = 0; i < 100; i++) {
|
||||
// the value is a little bit longer to make sure that the values don't fit into
|
||||
// a single leaf node
|
||||
expectedBar.put("bar:" + i, "bar:" + i + "__##################################");
|
||||
}
|
||||
|
||||
final Map<String, String> input = new HashMap<>();
|
||||
input.putAll(expectedBar);
|
||||
for (int i = 0; i < 500; i++) {
|
||||
input.put(UUID.randomUUID().toString(), UUID.randomUUID().toString());
|
||||
}
|
||||
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file,dataDirectory, PersistentMap.STRING_CODER,
|
||||
PersistentMap.STRING_CODER)) {
|
||||
|
||||
map.putAllValues(input);
|
||||
}
|
||||
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file,dataDirectory, PersistentMap.STRING_CODER,
|
||||
PersistentMap.STRING_CODER)) {
|
||||
|
||||
{
|
||||
final LinkedHashMap<String, String> actualBar = new LinkedHashMap<>();
|
||||
final Visitor<String, String> visitor = (key, value) -> actualBar.put(key, value);
|
||||
map.visitValues("bar:", visitor);
|
||||
|
||||
Assert.assertEquals(actualBar, expectedBar);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test(invocationCount = 1)
|
||||
public void testLotsOfValues() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
final var insertedValues = new HashMap<Long, Long>();
|
||||
|
||||
final SecureRandom rnd = new SecureRandom();
|
||||
rnd.setSeed(1);
|
||||
|
||||
try (final PersistentMap<Long, Long> map = new PersistentMap<>(file,dataDirectory, PersistentMap.LONG_CODER,
|
||||
PersistentMap.LONG_CODER)) {
|
||||
|
||||
for (int i = 0; i < 1_000; i++) {
|
||||
|
||||
final Long key = (long) (rnd.nextGaussian() * Integer.MAX_VALUE);
|
||||
final Long value = (long) (rnd.nextGaussian() * Integer.MAX_VALUE);
|
||||
|
||||
if (insertedValues.containsKey(key)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Assert.assertNull(map.putValue(key, value));
|
||||
|
||||
insertedValues.put(key, value);
|
||||
|
||||
final boolean failEarly = false;
|
||||
if (failEarly) {
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final Long actualValue = map.getValue(entry.getKey());
|
||||
|
||||
if (!Objects.equals(actualValue, entry.getValue())) {
|
||||
map.print();
|
||||
}
|
||||
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " in the " + i + "th iteration");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try (final PersistentMap<Long, Long> map = new PersistentMap<>(file,dataDirectory, PersistentMap.LONG_CODER,
|
||||
PersistentMap.LONG_CODER)) {
|
||||
final AtomicInteger counter = new AtomicInteger();
|
||||
final AtomicInteger maxDepth = new AtomicInteger();
|
||||
map.visitNodeEntriesPreOrder((node, parentNode, nodeEntry, depth) -> {
|
||||
counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0);
|
||||
maxDepth.set(Math.max(maxDepth.get(), depth));
|
||||
});
|
||||
|
||||
final long start = System.nanoTime();
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final Long actualValue = map.getValue(entry.getKey());
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " after all iterations");
|
||||
}
|
||||
System.out.println("nodes=" + counter.get() + ", depth=" + maxDepth.get() + ": "
|
||||
+ (System.nanoTime() - start) / 1_000_000.0 + "ms");
|
||||
}
|
||||
}
|
||||
final Random rnd = new Random(1);
|
||||
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file, dataDirectory,
|
||||
PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) {
|
||||
map.setMaxEntriesInNode(2);
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
// System.out.println("\n\ninserting: " + i);
|
||||
final UUID nextUUID = new UUID(rnd.nextLong(), rnd.nextLong());
|
||||
final String key = nextUUID.toString() + "__" + i;
|
||||
final String value = "long value to waste some bytes " + i + "__"
|
||||
+ UUID.randomUUID().toString().repeat(1);
|
||||
Assert.assertNull(map.getValue(key));
|
||||
|
||||
Assert.assertNull(map.putValue(key, value));
|
||||
|
||||
insertedValues.put(key, value);
|
||||
|
||||
// map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER);
|
||||
|
||||
final boolean failEarly = false;
|
||||
if (failEarly) {
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final String actualValue = map.getValue(entry.getKey());
|
||||
|
||||
if (!Objects.equals(actualValue, entry.getValue())) {
|
||||
map.print();
|
||||
}
|
||||
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " in the " + i + "th iteration");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file, dataDirectory,
|
||||
PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) {
|
||||
// map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER);
|
||||
final AtomicInteger maxDepth = new AtomicInteger();
|
||||
map.visitNodeEntriesPreOrder(
|
||||
(node, parentNode, nodeEntry, depth) -> maxDepth.set(Math.max(depth, maxDepth.get())));
|
||||
|
||||
Assert.assertTrue(maxDepth.get() >= 4,
|
||||
"The tree's depth. This test must have at least depth 4, "
|
||||
+ "so that we can be sure that splitting parent nodes works recursively, but was "
|
||||
+ maxDepth.get());
|
||||
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final String actualValue = map.getValue(entry.getKey());
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " after all iterations");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(invocationCount = 1)
|
||||
public void testManySmallValues() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
final var insertedValues = new HashMap<Long, Long>();
|
||||
|
||||
final SecureRandom rnd = new SecureRandom();
|
||||
rnd.setSeed(1);
|
||||
|
||||
try (final PersistentMap<Long, Long> map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER,
|
||||
PersistentMap.LONG_CODER)) {
|
||||
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
// System.out.println("\n\ninserting: " + i);
|
||||
|
||||
final Long key = (long) (rnd.nextGaussian() * Integer.MAX_VALUE);
|
||||
final Long value = (long) (rnd.nextGaussian() * Integer.MAX_VALUE);
|
||||
Assert.assertNull(map.getValue(key));
|
||||
|
||||
Assert.assertNull(map.putValue(key, value));
|
||||
|
||||
insertedValues.put(key, value);
|
||||
|
||||
// map.print();
|
||||
|
||||
final boolean failEarly = false;
|
||||
if (failEarly) {
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final Long actualValue = map.getValue(entry.getKey());
|
||||
|
||||
if (!Objects.equals(actualValue, entry.getValue())) {
|
||||
map.print();
|
||||
}
|
||||
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " in the " + i + "th iteration");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try (final PersistentMap<Long, Long> map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER,
|
||||
PersistentMap.LONG_CODER)) {
|
||||
// map.print(PersistentMap.LONG_DECODER, PersistentMap.LONG_DECODER);
|
||||
final AtomicInteger counter = new AtomicInteger();
|
||||
map.visitNodeEntriesPreOrder(
|
||||
(node, parentNode, nodeEntry, depth) -> counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0));
|
||||
|
||||
Assert.assertEquals(counter.get(), 4,
|
||||
"number of nodes should be small. Any number larger than 4 indicates, "
|
||||
+ "that new inner nodes are created even though the existing inner "
|
||||
+ "nodes could hold the values");
|
||||
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final Long actualValue = map.getValue(entry.getKey());
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " after all iterations");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(invocationCount = 1)
|
||||
public void testManyEmptyValues() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
final var insertedValues = new HashMap<Long, Empty>();
|
||||
|
||||
final SecureRandom rnd = new SecureRandom();
|
||||
rnd.setSeed(1);
|
||||
|
||||
try (final PersistentMap<Long, Empty> map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER,
|
||||
PersistentMap.EMPTY_ENCODER)) {
|
||||
|
||||
for (int i = 0; i < 1500; i++) {
|
||||
// System.out.println("\n\ninserting: " + i);
|
||||
|
||||
final Long key = (long) (rnd.nextGaussian() * Integer.MAX_VALUE);
|
||||
final Empty value = Empty.INSTANCE;
|
||||
Assert.assertNull(map.getValue(key));
|
||||
|
||||
Assert.assertNull(map.putValue(key, value));
|
||||
|
||||
insertedValues.put(key, value);
|
||||
|
||||
// map.print();
|
||||
|
||||
final boolean failEarly = false;
|
||||
if (failEarly) {
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final Empty actualValue = map.getValue(entry.getKey());
|
||||
|
||||
if (!Objects.equals(actualValue, entry.getValue())) {
|
||||
map.print();
|
||||
}
|
||||
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " in the " + i + "th iteration");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try (final PersistentMap<Long, Empty> map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER,
|
||||
PersistentMap.EMPTY_ENCODER)) {
|
||||
map.print();
|
||||
final AtomicInteger counter = new AtomicInteger();
|
||||
map.visitNodeEntriesPreOrder(
|
||||
(node, parentNode, nodeEntry, depth) -> counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0));
|
||||
|
||||
Assert.assertEquals(counter.get(), 4,
|
||||
"number of nodes should be small. Any number larger than 4 indicates, "
|
||||
+ "that new inner nodes are created even though the existing inner "
|
||||
+ "nodes could hold the values");
|
||||
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final Empty actualValue = map.getValue(entry.getKey());
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " after all iterations");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(invocationCount = 1)
|
||||
public void testEasyValues() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
final var insertedValues = new HashMap<String, String>();
|
||||
|
||||
final Queue<Integer> numbers = new LinkedList<>(Arrays.asList(1, 15, 11, 4, 16, 3, 13));
|
||||
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file, dataDirectory,
|
||||
PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) {
|
||||
|
||||
final int numbersSize = numbers.size();
|
||||
for (int i = 0; i < numbersSize; i++) {
|
||||
|
||||
final Integer keyNumber = numbers.poll();
|
||||
// System.out.println("\n\ninserting: " + keyNumber);
|
||||
|
||||
final String key = "" + keyNumber;
|
||||
final String value = "value";
|
||||
Assert.assertNull(map.getValue(key));
|
||||
|
||||
Assert.assertNull(map.putValue(key, value));
|
||||
|
||||
insertedValues.put(key, value);
|
||||
|
||||
// map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER);
|
||||
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final String actualValue = map.getValue(entry.getKey());
|
||||
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " in the " + i + "th iteration");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file, dataDirectory,
|
||||
PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) {
|
||||
// map.print(PersistentMap.STRING_DECODER, PersistentMap.STRING_DECODER);
|
||||
|
||||
final AtomicInteger counter = new AtomicInteger();
|
||||
map.visitNodeEntriesPreOrder(
|
||||
(node, parentNode, nodeEntry, depth) -> counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0));
|
||||
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final String actualValue = map.getValue(entry.getKey());
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " after all iterations");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFindAllByPrefix() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
|
||||
final Map<String, String> expectedBar = new HashMap<>();
|
||||
for (int i = 0; i < 100; i++) {
|
||||
// the value is a little bit longer to make sure that the values don't fit into
|
||||
// a single leaf node
|
||||
expectedBar.put("bar:" + i, "bar:" + i + "__##################################");
|
||||
}
|
||||
|
||||
final Map<String, String> input = new HashMap<>();
|
||||
input.putAll(expectedBar);
|
||||
for (int i = 0; i < 500; i++) {
|
||||
input.put(UUID.randomUUID().toString(), UUID.randomUUID().toString());
|
||||
}
|
||||
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file, dataDirectory,
|
||||
PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) {
|
||||
|
||||
map.putAllValues(input);
|
||||
}
|
||||
|
||||
try (final PersistentMap<String, String> map = new PersistentMap<>(file, dataDirectory,
|
||||
PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) {
|
||||
|
||||
{
|
||||
final LinkedHashMap<String, String> actualBar = new LinkedHashMap<>();
|
||||
final Visitor<String, String> visitor = (key, value) -> actualBar.put(key, value);
|
||||
map.visitValues("bar:", visitor);
|
||||
|
||||
Assert.assertEquals(actualBar, expectedBar);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test(invocationCount = 1)
|
||||
public void testLotsOfValues() throws Exception {
|
||||
final Path file = dataDirectory.resolve("map.db");
|
||||
final var insertedValues = new HashMap<Long, Long>();
|
||||
|
||||
final SecureRandom rnd = new SecureRandom();
|
||||
rnd.setSeed(1);
|
||||
|
||||
try (final PersistentMap<Long, Long> map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER,
|
||||
PersistentMap.LONG_CODER)) {
|
||||
|
||||
for (int i = 0; i < 1_000; i++) {
|
||||
|
||||
final Long key = (long) (rnd.nextGaussian() * Integer.MAX_VALUE);
|
||||
final Long value = (long) (rnd.nextGaussian() * Integer.MAX_VALUE);
|
||||
|
||||
if (insertedValues.containsKey(key)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Assert.assertNull(map.putValue(key, value));
|
||||
|
||||
insertedValues.put(key, value);
|
||||
|
||||
final boolean failEarly = false;
|
||||
if (failEarly) {
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final Long actualValue = map.getValue(entry.getKey());
|
||||
|
||||
if (!Objects.equals(actualValue, entry.getValue())) {
|
||||
map.print();
|
||||
}
|
||||
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " in the " + i + "th iteration");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try (final PersistentMap<Long, Long> map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER,
|
||||
PersistentMap.LONG_CODER)) {
|
||||
final AtomicInteger counter = new AtomicInteger();
|
||||
final AtomicInteger maxDepth = new AtomicInteger();
|
||||
map.visitNodeEntriesPreOrder((node, parentNode, nodeEntry, depth) -> {
|
||||
counter.addAndGet(nodeEntry.isInnerNode() ? 1 : 0);
|
||||
maxDepth.set(Math.max(maxDepth.get(), depth));
|
||||
});
|
||||
|
||||
final long start = System.nanoTime();
|
||||
for (final var entry : insertedValues.entrySet()) {
|
||||
final Long actualValue = map.getValue(entry.getKey());
|
||||
Assert.assertEquals(actualValue, entry.getValue(),
|
||||
"value for key " + entry.getKey() + " after all iterations");
|
||||
}
|
||||
System.out.println("nodes=" + counter.get() + ", depth=" + maxDepth.get() + ": "
|
||||
+ (System.nanoTime() - start) / 1_000_000.0 + "ms");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user