@@ -1,96 +0,0 @@
|
||||
package org.lucares.performance.db;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.lucares.pdb.datastore.Entry;
|
||||
import org.lucares.pdb.datastore.PdbIndexId;
|
||||
|
||||
/**
|
||||
* Wrapper for chunk of {@link Entry}s.
|
||||
* <p>
|
||||
* This class is supposed to be provided to the queue returned by
|
||||
* PerformanceDb.getQueue(). Processing {@link Entry}s in chunks is more
|
||||
* efficient than processing each one individually.
|
||||
* <p>
|
||||
* Optionally, you can request that the entries will be flushed to disk by
|
||||
* calling {@link #forceFlush()} before adding it to the queue.
|
||||
* <p>
|
||||
* Optionally, this class can act like a future. This is useful if you have to
|
||||
* wait until the entries have been processed. Use {@link #forceFlush()} and
|
||||
* {@link #waitUntilFlushed(long, TimeUnit)}.
|
||||
*/
|
||||
public class Entries implements Iterable<Entry> {
|
||||
/**
|
||||
* A special {@link Entries} instance that can be used as poison object for
|
||||
* blocking queues.
|
||||
*/
|
||||
public static final Entries POISON = new Entries(new PdbIndexId("poison"), 0);
|
||||
|
||||
private final List<Entry> entries;
|
||||
|
||||
private boolean forceFlush = false;
|
||||
|
||||
private CountDownLatch flushLatch = null;
|
||||
|
||||
private final PdbIndexId index;
|
||||
|
||||
public Entries(final PdbIndexId index, final int initialSize) {
|
||||
this.index = index;
|
||||
entries = new ArrayList<>(initialSize);
|
||||
}
|
||||
|
||||
public Entries(final PdbIndexId index, final Entry... entries) {
|
||||
this.index = index;
|
||||
this.entries = new ArrayList<>(Arrays.asList(entries));
|
||||
}
|
||||
|
||||
public Entries(final PdbIndexId index, final Collection<Entry> entries) {
|
||||
this.index = index;
|
||||
this.entries = new ArrayList<>(entries);
|
||||
}
|
||||
|
||||
public void add(final Entry entry) {
|
||||
entries.add(entry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Entry> iterator() {
|
||||
return entries.iterator();
|
||||
}
|
||||
|
||||
public int size() {
|
||||
return entries.size();
|
||||
}
|
||||
|
||||
public boolean isForceFlush() {
|
||||
return forceFlush;
|
||||
}
|
||||
|
||||
public void forceFlush() {
|
||||
forceFlush = true;
|
||||
flushLatch = new CountDownLatch(1);
|
||||
}
|
||||
|
||||
public void waitUntilFlushed(final long timeout, final TimeUnit unit)
|
||||
throws InterruptedException, TimeoutException {
|
||||
final boolean finished = flushLatch.await(timeout, unit);
|
||||
if (!finished) {
|
||||
throw new TimeoutException();
|
||||
}
|
||||
}
|
||||
|
||||
public void notifyFlushed() {
|
||||
flushLatch.countDown();
|
||||
}
|
||||
|
||||
public PdbIndexId getIndex() {
|
||||
return index;
|
||||
}
|
||||
}
|
||||
@@ -2,16 +2,14 @@ package org.lucares.performance.db;
|
||||
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.lucares.pdb.datastore.Entries;
|
||||
import org.lucares.pdb.datastore.Entry;
|
||||
import org.lucares.pdb.datastore.PdbIndexId;
|
||||
|
||||
public class EntryToEntriesIterator implements Iterator<Entries> {
|
||||
|
||||
private final Iterator<Entry> entryIterator;
|
||||
private final PdbIndexId indexId;
|
||||
|
||||
public EntryToEntriesIterator(final PdbIndexId indexId, final Iterator<Entry> entryIterator) {
|
||||
this.indexId = indexId;
|
||||
public EntryToEntriesIterator(final Iterator<Entry> entryIterator) {
|
||||
this.entryIterator = entryIterator;
|
||||
}
|
||||
|
||||
@@ -22,7 +20,7 @@ public class EntryToEntriesIterator implements Iterator<Entries> {
|
||||
|
||||
@Override
|
||||
public Entries next() {
|
||||
return new Entries(indexId, entryIterator.next());
|
||||
return new Entries(entryIterator.next());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,186 @@
|
||||
package org.lucares.performance.db;
|
||||
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.time.Duration;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.zip.GZIPOutputStream;
|
||||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.core.config.Configurator;
|
||||
import org.lucares.collections.LongList;
|
||||
import org.lucares.pdb.api.DateTimeRange;
|
||||
import org.lucares.pdb.api.Query;
|
||||
import org.lucares.pdb.api.Tags;
|
||||
import org.lucares.pdb.datastore.PdbFile;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class PdbExport {
|
||||
|
||||
private static final int KB = 1024;
|
||||
private static final int MB = KB * 1024;
|
||||
private static final int GB = MB * 1024;
|
||||
|
||||
public static final char MAGIC_BYTE = '#';
|
||||
public static final char MARKER_DICT_ENTRY_CHAR = '$';
|
||||
public static final String MARKER_DICT_ENTRY = String.valueOf(MARKER_DICT_ENTRY_CHAR);
|
||||
public static final char SEPARATOR_TAG_ID_CHAR = ':';
|
||||
public static final String SEPARATOR_TAG_ID = String.valueOf(SEPARATOR_TAG_ID_CHAR);
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(PdbExport.class);
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
|
||||
initLogging();
|
||||
|
||||
final Path dataDirectory = Paths.get(args[0]);
|
||||
final Path backupDir = Paths.get(args[1])
|
||||
.resolve(OffsetDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss")));
|
||||
|
||||
export(dataDirectory, backupDir);
|
||||
}
|
||||
|
||||
public static List<Path> export(final Path dataDirectory, final Path backupDir) throws Exception {
|
||||
final List<Path> exportFiles = new ArrayList<>();
|
||||
Files.createDirectories(backupDir);
|
||||
|
||||
Runtime.getRuntime().addShutdownHook(new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
LOGGER.info("shutdown hook");
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
final OffsetDateTime start = OffsetDateTime.now();
|
||||
final String datePrefix = start.format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"));
|
||||
final AtomicLong tagsIdCounter = new AtomicLong(0);
|
||||
long exportFileCounter = 0;
|
||||
|
||||
Path exportFile = null;
|
||||
Writer writer = null;
|
||||
|
||||
try (final PerformanceDb db = new PerformanceDb(dataDirectory);) {
|
||||
|
||||
LOGGER.info("Searching for all files. This may take a while ...");
|
||||
final List<PdbFile> pdbFiles = db.getFilesForQuery(new Query("", DateTimeRange.max()));
|
||||
|
||||
long count = 0;
|
||||
long lastEpochMilli = 0;
|
||||
long begin = System.currentTimeMillis();
|
||||
|
||||
for (final PdbFile pdbFile : pdbFiles) {
|
||||
|
||||
if (writer == null || Files.size(exportFile) > 4 * GB) {
|
||||
if (writer != null) {
|
||||
writer.flush();
|
||||
writer.close();
|
||||
}
|
||||
exportFile = backupDir
|
||||
.resolve(String.format(Locale.US, "%s.%05d.pdb.gz", datePrefix, exportFileCounter++));
|
||||
exportFiles.add(exportFile);
|
||||
writer = createWriter(exportFile);
|
||||
LOGGER.info("new export file: {}", exportFile);
|
||||
|
||||
lastEpochMilli = 0;
|
||||
}
|
||||
|
||||
final Stream<LongList> timeValueStream = PdbFile.toStream(Arrays.asList(pdbFile), db.getDataStore());
|
||||
|
||||
final Tags tags = pdbFile.getTags();
|
||||
final long tagsId = addNewTagsToDictionary(writer, tags, tagsIdCounter);
|
||||
|
||||
final Iterator<LongList> it = timeValueStream.iterator();
|
||||
while (it.hasNext()) {
|
||||
final LongList entry = it.next();
|
||||
|
||||
for (int i = 0; i < entry.size(); i += 2) {
|
||||
|
||||
final long epochMilli = entry.get(i);
|
||||
final long value = entry.get(i + 1);
|
||||
|
||||
final long epochMilliDiff = epochMilli - lastEpochMilli;
|
||||
lastEpochMilli = epochMilli;
|
||||
|
||||
writer.write(Long.toString(epochMilliDiff));
|
||||
writer.write(',');
|
||||
writer.write(Long.toString(value));
|
||||
writer.write(',');
|
||||
writer.write(Long.toString(tagsId));
|
||||
writer.write('\n');
|
||||
|
||||
count++;
|
||||
final long chunk = 10_000_000;
|
||||
if (count % chunk == 0) {
|
||||
final long end = System.currentTimeMillis();
|
||||
final long duration = end - begin;
|
||||
final long entriesPerSecond = (long) (chunk / (duration / 1000.0));
|
||||
LOGGER.info("progress: {} - {} entries/s + duration {}",
|
||||
String.format(Locale.US, "%,d", count),
|
||||
String.format(Locale.US, "%,d", entriesPerSecond), duration);
|
||||
begin = System.currentTimeMillis();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LOGGER.info("total: " + count);
|
||||
|
||||
} finally {
|
||||
if (writer != null) {
|
||||
writer.close();
|
||||
}
|
||||
}
|
||||
|
||||
final OffsetDateTime end = OffsetDateTime.now();
|
||||
|
||||
LOGGER.info("duration: " + Duration.between(start, end));
|
||||
return exportFiles;
|
||||
}
|
||||
|
||||
private static void initLogging() {
|
||||
Configurator.setRootLevel(Level.INFO);
|
||||
}
|
||||
|
||||
private static long addNewTagsToDictionary(final Writer writer, final Tags tags, final AtomicLong tagsIdCounter)
|
||||
throws IOException {
|
||||
final long tagsId = tagsIdCounter.getAndIncrement();
|
||||
|
||||
writer.write(MARKER_DICT_ENTRY);
|
||||
writer.write(Long.toString(tagsId));
|
||||
writer.write(SEPARATOR_TAG_ID);
|
||||
writer.write(tags.toCsv());
|
||||
writer.write('\n');
|
||||
|
||||
return tagsId;
|
||||
}
|
||||
|
||||
private static Writer createWriter(final Path file) {
|
||||
|
||||
try {
|
||||
final OutputStreamWriter writer = new OutputStreamWriter(
|
||||
new GZIPOutputStream(new FileOutputStream(file.toFile()), 4096 * 4), StandardCharsets.UTF_8);
|
||||
// initialize file header
|
||||
writer.write(MAGIC_BYTE);
|
||||
return writer;
|
||||
|
||||
} catch (final IOException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -22,12 +22,10 @@ import org.lucares.pdb.api.Query;
|
||||
import org.lucares.pdb.api.QueryWithCaretMarker;
|
||||
import org.lucares.pdb.api.Result;
|
||||
import org.lucares.pdb.api.Tags;
|
||||
import org.lucares.pdb.datastore.Entries;
|
||||
import org.lucares.pdb.datastore.Entry;
|
||||
import org.lucares.pdb.datastore.Indexes;
|
||||
import org.lucares.pdb.datastore.InvalidValueException;
|
||||
import org.lucares.pdb.datastore.PdbFile;
|
||||
import org.lucares.pdb.datastore.PdbIndex;
|
||||
import org.lucares.pdb.datastore.PdbIndexId;
|
||||
import org.lucares.pdb.datastore.Proposal;
|
||||
import org.lucares.pdb.datastore.WriteException;
|
||||
import org.lucares.pdb.datastore.internal.DataStore;
|
||||
@@ -40,14 +38,14 @@ public class PerformanceDb implements AutoCloseable {
|
||||
private final static Logger LOGGER = LoggerFactory.getLogger(PerformanceDb.class);
|
||||
private final static Logger METRICS_LOGGER = LoggerFactory.getLogger("org.lucares.metrics.ingestion.block");
|
||||
|
||||
private final Indexes indexes;
|
||||
private final DataStore dataStore;
|
||||
private final ExecutorService serverThreadPool = Executors.newFixedThreadPool(1);
|
||||
private final ArrayBlockingQueue<Entries> queue;
|
||||
|
||||
public PerformanceDb(final Path dataDirectory) throws IOException {
|
||||
|
||||
queue = new ArrayBlockingQueue<>(10);
|
||||
indexes = new Indexes(dataDirectory);
|
||||
dataStore = new DataStore(dataDirectory);
|
||||
startThread();
|
||||
}
|
||||
|
||||
@@ -74,17 +72,17 @@ public class PerformanceDb implements AutoCloseable {
|
||||
|
||||
}
|
||||
|
||||
void putEntry(final PdbIndexId indexId, final Entry entry) throws WriteException {
|
||||
putEntries(indexId, Arrays.asList(entry));
|
||||
void putEntry(final Entry entry) throws WriteException {
|
||||
putEntries(Arrays.asList(entry));
|
||||
}
|
||||
|
||||
void putEntries(final PdbIndexId indexId, final Iterable<Entry> entries) throws WriteException {
|
||||
putEntries(indexId, entries.iterator());
|
||||
void putEntries(final Iterable<Entry> entries) throws WriteException {
|
||||
putEntries(entries.iterator());
|
||||
}
|
||||
|
||||
private void putEntries(final PdbIndexId indexId, final Iterator<Entry> entries) throws WriteException {
|
||||
private void putEntries(final Iterator<Entry> entries) throws WriteException {
|
||||
|
||||
final EntryToEntriesIterator entriesIterator = new EntryToEntriesIterator(indexId, entries);
|
||||
final EntryToEntriesIterator entriesIterator = new EntryToEntriesIterator(entries);
|
||||
final BlockingIteratorIterator<Entries> iterator = new BlockingIteratorIterator<>(entriesIterator);
|
||||
putEntries(iterator);
|
||||
}
|
||||
@@ -106,7 +104,6 @@ public class PerformanceDb implements AutoCloseable {
|
||||
}
|
||||
|
||||
final Entries entries = entriesOptional.get();
|
||||
final DataStore dataStore = indexes.getOrCreateDataStore(entries.getIndex());
|
||||
for (final Entry entry : entries) {
|
||||
|
||||
try {
|
||||
@@ -142,7 +139,7 @@ public class PerformanceDb implements AutoCloseable {
|
||||
if (entries.isForceFlush()) {
|
||||
LOGGER.debug("flush triggered via entries.isForceFlush()");
|
||||
final long start = System.nanoTime();
|
||||
indexes.flush();
|
||||
dataStore.flush();
|
||||
LOGGER.debug("flush duration: {}ms", (System.nanoTime() - start) / 1_000_000.0);
|
||||
entries.notifyFlushed();
|
||||
}
|
||||
@@ -155,7 +152,7 @@ public class PerformanceDb implements AutoCloseable {
|
||||
LOGGER.info("Thread was interrupted. Aborting execution.");
|
||||
} finally {
|
||||
LOGGER.info("flush after inserting all data");
|
||||
indexes.flush();
|
||||
dataStore.flush();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -169,8 +166,7 @@ public class PerformanceDb implements AutoCloseable {
|
||||
}
|
||||
|
||||
public List<PdbFile> getFilesForQuery(final Query query) {
|
||||
final PdbIndexId indexId = new PdbIndexId(query.getIndex());
|
||||
return indexes.getOrCreateDataStore(indexId).getFilesForQuery(query);
|
||||
return dataStore.getFilesForQuery(query);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -182,21 +178,17 @@ public class PerformanceDb implements AutoCloseable {
|
||||
*/
|
||||
public Result get(final Query query, final List<String> groupBy) {
|
||||
final long start = System.nanoTime();
|
||||
|
||||
final PdbIndexId indexId = new PdbIndexId(query.getIndex());
|
||||
|
||||
final DataStore dataStore = indexes.getOrCreateDataStore(indexId);
|
||||
final List<PdbFile> pdbFiles = dataStore.getFilesForQuery(query);
|
||||
|
||||
final Grouping grouping = Grouping.groupBy(pdbFiles, groupBy);
|
||||
|
||||
final Result result = toResult(grouping, dataStore);
|
||||
final Result result = toResult(grouping);
|
||||
METRICS_LOGGER.debug("query execution took: " + (System.nanoTime() - start) / 1_000_000.0 + "ms: " + query
|
||||
+ " (" + groupBy + "): files found: " + pdbFiles.size());
|
||||
return result;
|
||||
}
|
||||
|
||||
private Result toResult(final Grouping grouping, final DataStore dataStore) {
|
||||
private Result toResult(final Grouping grouping) {
|
||||
final List<GroupResult> groupResults = new ArrayList<>();
|
||||
for (final Group group : grouping.getGroups()) {
|
||||
final Stream<LongList> stream = PdbFile.toStream(group.getFiles(), dataStore.getDiskStorage());
|
||||
@@ -220,7 +212,7 @@ public class PerformanceDb implements AutoCloseable {
|
||||
Thread.interrupted();
|
||||
}
|
||||
|
||||
indexes.close();
|
||||
dataStore.close();
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("failed to close PerformanceDB", e);
|
||||
}
|
||||
@@ -228,26 +220,17 @@ public class PerformanceDb implements AutoCloseable {
|
||||
|
||||
public List<Proposal> autocomplete(final QueryWithCaretMarker query) {
|
||||
|
||||
final PdbIndexId indexId = new PdbIndexId(query.getIndex());
|
||||
return indexes.getOrCreateDataStore(indexId).propose(query);
|
||||
return dataStore.propose(query);
|
||||
}
|
||||
|
||||
public List<String> getFields(final DateTimeRange dateRange, final PdbIndexId index) {
|
||||
public List<String> getFields(final DateTimeRange dateRange) {
|
||||
|
||||
final List<String> fields = indexes.getOrCreateDataStore(index).getAvailableFields(dateRange);
|
||||
final List<String> fields = dataStore.getAvailableFields(dateRange);
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
public PartitionDiskStore getDataStore(final PdbIndexId index) {
|
||||
return indexes.getOrCreateDataStore(index).getDiskStorage();
|
||||
}
|
||||
|
||||
public List<PdbIndex> getIndexes() {
|
||||
return indexes.getAvailableIndexes();
|
||||
}
|
||||
|
||||
public void createIndex(final PdbIndexId id, final String name, final String description) {
|
||||
indexes.create(id, name, description);
|
||||
public PartitionDiskStore getDataStore() {
|
||||
return dataStore.getDiskStorage();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,6 @@ import java.util.concurrent.ThreadLocalRandom;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
@@ -25,7 +24,7 @@ import org.lucares.pdb.api.Query;
|
||||
import org.lucares.pdb.api.Result;
|
||||
import org.lucares.pdb.api.Tags;
|
||||
import org.lucares.pdb.datastore.Entry;
|
||||
import org.lucares.pdb.datastore.PdbIndexId;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.lucares.utils.DateUtils;
|
||||
|
||||
public class PerformanceDbTest {
|
||||
@@ -46,17 +45,13 @@ public class PerformanceDbTest {
|
||||
public void testInsertRead() throws Exception {
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final String indexId = "test";
|
||||
final PdbIndexId id = new PdbIndexId(indexId);
|
||||
db.createIndex(id, indexId, "");
|
||||
|
||||
final OffsetDateTime nowInUtc = DateUtils.nowInUtc();
|
||||
final long date = nowInUtc.toInstant().toEpochMilli();
|
||||
final long value = 1;
|
||||
final Tags tags = Tags.createAndAddToDictionary("myKey", "myValue");
|
||||
db.putEntry(id, new Entry(date, value, tags));
|
||||
db.putEntry(new Entry(date, value, tags));
|
||||
|
||||
final Result result = db.get(Query.createQuery(tags, DateTimeRange.ofDay(nowInUtc), indexId));
|
||||
final Result result = db.get(Query.createQuery(tags, DateTimeRange.ofDay(nowInUtc)));
|
||||
final LongList stream = result.singleGroup().flatMap();
|
||||
|
||||
Assertions.assertEquals(2, stream.size());
|
||||
@@ -70,9 +65,6 @@ public class PerformanceDbTest {
|
||||
public void testInsertIntoMultipleFilesRead() throws Exception {
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final String indexId = "test";
|
||||
final PdbIndexId id = new PdbIndexId(indexId);
|
||||
db.createIndex(id, indexId, "");
|
||||
final DateTimeRange dateRange = new DateTimeRange(DateUtils.getDate(2016, 11, 1, 10, 0, 0),
|
||||
DateUtils.getDate(2016, 11, 2, 12, 34, 56));
|
||||
final long dayOne = dateRange.getStartEpochMilli();
|
||||
@@ -81,10 +73,10 @@ public class PerformanceDbTest {
|
||||
final long valueTwo = 2;
|
||||
final Tags tags = Tags.createAndAddToDictionary("myKey", "myValue");
|
||||
|
||||
db.putEntry(id, new Entry(dayOne, valueOne, tags));
|
||||
db.putEntry(id, new Entry(dayTwo, valueTwo, tags));
|
||||
db.putEntry(new Entry(dayOne, valueOne, tags));
|
||||
db.putEntry(new Entry(dayTwo, valueTwo, tags));
|
||||
|
||||
final LongList stream = db.get(Query.createQuery(tags, dateRange, indexId)).singleGroup().flatMap();
|
||||
final LongList stream = db.get(Query.createQuery(tags, dateRange)).singleGroup().flatMap();
|
||||
|
||||
Assertions.assertEquals(4, stream.size());
|
||||
|
||||
@@ -118,10 +110,6 @@ public class PerformanceDbTest {
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
|
||||
final String indexId = "test";
|
||||
final PdbIndexId id = new PdbIndexId(indexId);
|
||||
db.createIndex(id, indexId, "");
|
||||
|
||||
final int year = 2016;
|
||||
final int month = 1;
|
||||
final int day = 2;
|
||||
@@ -133,9 +121,9 @@ public class PerformanceDbTest {
|
||||
|
||||
printEntries(entries, "");
|
||||
|
||||
db.putEntries(id, entries);
|
||||
db.putEntries(entries);
|
||||
|
||||
final LongList actualEntries = db.get(Query.createQuery(tags, timeRange, indexId)).singleGroup().flatMap();
|
||||
final LongList actualEntries = db.get(Query.createQuery(tags, timeRange)).singleGroup().flatMap();
|
||||
Assertions.assertEquals(entries.size() * 2, actualEntries.size());
|
||||
|
||||
for (int i = 0; i < entries.size(); i++) {
|
||||
@@ -155,12 +143,7 @@ public class PerformanceDbTest {
|
||||
public void testAppendToExistingFileWithRestart(final long numberOfEntries) throws Exception {
|
||||
final Tags tags;
|
||||
final List<Entry> expected = new ArrayList<>();
|
||||
|
||||
final String indexId = "test";
|
||||
final PdbIndexId id = new PdbIndexId(indexId);
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
db.createIndex(id, indexId, "");
|
||||
|
||||
final int year = 2016;
|
||||
final int month = 1;
|
||||
@@ -170,7 +153,7 @@ public class PerformanceDbTest {
|
||||
final DateTimeRange timeRange = DateTimeRange.ofDay(DateUtils.getDate(year, month, day, 1, 1, 1));
|
||||
|
||||
final List<Entry> entries = generateEntries(timeRange, numberOfEntries, 0, tags);
|
||||
db.putEntries(id, entries);
|
||||
db.putEntries(entries);
|
||||
expected.addAll(entries);
|
||||
}
|
||||
|
||||
@@ -181,10 +164,10 @@ public class PerformanceDbTest {
|
||||
|
||||
final DateTimeRange timeRange = DateTimeRange.ofDay(DateUtils.getDate(year, month, day, 1, 1, 1));
|
||||
final List<Entry> entries = generateEntries(timeRange, numberOfEntries, 0, tags);
|
||||
db.putEntries(id, entries);
|
||||
db.putEntries(entries);
|
||||
expected.addAll(entries);
|
||||
|
||||
final LongList actualEntries = db.get(Query.createQuery(tags, timeRange, indexId)).singleGroup().flatMap();
|
||||
final LongList actualEntries = db.get(Query.createQuery(tags, timeRange)).singleGroup().flatMap();
|
||||
Assertions.assertEquals(expected.size() * 2, actualEntries.size());
|
||||
|
||||
Assertions.assertEquals(toExpectedValues(expected), actualEntries);
|
||||
@@ -195,11 +178,6 @@ public class PerformanceDbTest {
|
||||
public void testInsertIntoMultipleFilesWithDifferentTags() throws Exception {
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
|
||||
final String indexId = "test";
|
||||
final PdbIndexId id = new PdbIndexId(indexId);
|
||||
db.createIndex(id, indexId, "");
|
||||
|
||||
final OffsetDateTime from = DateUtils.getDate(2016, 1, 1, 00, 00, 00);
|
||||
final OffsetDateTime to = DateUtils.getDate(2016, 1, 1, 23, 59, 50);
|
||||
|
||||
@@ -210,33 +188,29 @@ public class PerformanceDbTest {
|
||||
final Tags tagsCommon = Tags.createAndAddToDictionary("commonKey", "commonValue");
|
||||
final Tags tagsOne = Tags.createAndAddToDictionary("myKey", "one", "commonKey", "commonValue");
|
||||
final List<Entry> entriesOne = generateEntries(timeRange, numberOfEntries, 1, tagsOne);
|
||||
db.putEntries(id, entriesOne);
|
||||
db.putEntries(entriesOne);
|
||||
printEntries(entriesOne, "one");
|
||||
|
||||
final Tags tagsTwo = Tags.createAndAddToDictionary("myKey", "two", "commonKey", "commonValue");
|
||||
final List<Entry> entriesTwo = generateEntries(timeRange, numberOfEntries, 2, tagsTwo);
|
||||
printEntries(entriesTwo, "two");
|
||||
db.putEntries(id, entriesTwo);
|
||||
db.putEntries(entriesTwo);
|
||||
|
||||
final Tags tagsThree = Tags.createAndAddToDictionary("myKey", "three", "commonKey", "commonValue");
|
||||
final List<Entry> entriesThree = generateEntries(timeRange, numberOfEntries, 3, tagsThree);
|
||||
printEntries(entriesThree, "three");
|
||||
db.putEntries(id, entriesThree);
|
||||
db.putEntries(entriesThree);
|
||||
|
||||
final LongList actualEntriesOne = db.get(Query.createQuery(tagsOne, dateRange, indexId)).singleGroup()
|
||||
.flatMap();
|
||||
final LongList actualEntriesOne = db.get(Query.createQuery(tagsOne, dateRange)).singleGroup().flatMap();
|
||||
Assertions.assertEquals(toExpectedValues(entriesOne), actualEntriesOne);
|
||||
|
||||
final LongList actualEntriesTwo = db.get(Query.createQuery(tagsTwo, dateRange, indexId)).singleGroup()
|
||||
.flatMap();
|
||||
final LongList actualEntriesTwo = db.get(Query.createQuery(tagsTwo, dateRange)).singleGroup().flatMap();
|
||||
Assertions.assertEquals(toExpectedValues(entriesTwo), actualEntriesTwo);
|
||||
|
||||
final LongList actualEntriesThree = db.get(Query.createQuery(tagsThree, dateRange, indexId)).singleGroup()
|
||||
.flatMap();
|
||||
final LongList actualEntriesThree = db.get(Query.createQuery(tagsThree, dateRange)).singleGroup().flatMap();
|
||||
Assertions.assertEquals(toExpectedValues(entriesThree), actualEntriesThree);
|
||||
|
||||
final LongList actualEntriesAll = db.get(Query.createQuery(tagsCommon, dateRange, indexId)).singleGroup()
|
||||
.flatMap();
|
||||
final LongList actualEntriesAll = db.get(Query.createQuery(tagsCommon, dateRange)).singleGroup().flatMap();
|
||||
final List<Entry> expectedAll = CollectionUtils.collate(entriesOne,
|
||||
CollectionUtils.collate(entriesTwo, entriesThree, EntryByDateComparator.INSTANCE),
|
||||
EntryByDateComparator.INSTANCE);
|
||||
@@ -252,11 +226,6 @@ public class PerformanceDbTest {
|
||||
@Test
|
||||
public void testGroupBySingleField() throws Exception {
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
|
||||
final String indexId = "test";
|
||||
final PdbIndexId pdbIndexId = new PdbIndexId(indexId);
|
||||
db.createIndex(pdbIndexId, indexId, "");
|
||||
|
||||
final OffsetDateTime from = DateUtils.getDate(2016, 1, 1, 00, 00, 00);
|
||||
final OffsetDateTime to = DateUtils.getDate(2016, 1, 1, 23, 59, 50);
|
||||
|
||||
@@ -267,12 +236,11 @@ public class PerformanceDbTest {
|
||||
final Tags tagsOne = Tags.createAndAddToDictionary(key, "one", "commonKey", "commonValue");
|
||||
final Tags tagsTwo = Tags.createAndAddToDictionary(key, "two", "commonKey", "commonValue");
|
||||
final Tags tagsThree = Tags.createAndAddToDictionary("commonKey", "commonValue");
|
||||
final LongList entriesOne = storeEntries(db, pdbIndexId, timeRange, numberOfEntries, tagsOne, 1);
|
||||
final LongList entriesTwo = storeEntries(db, pdbIndexId, timeRange, numberOfEntries, tagsTwo, 2);
|
||||
final LongList entriesThree = storeEntries(db, pdbIndexId, timeRange, numberOfEntries, tagsThree, 3);
|
||||
final LongList entriesOne = storeEntries(db, timeRange, numberOfEntries, tagsOne, 1);
|
||||
final LongList entriesTwo = storeEntries(db, timeRange, numberOfEntries, tagsTwo, 2);
|
||||
final LongList entriesThree = storeEntries(db, timeRange, numberOfEntries, tagsThree, 3);
|
||||
|
||||
final Result result = db.get(Query.createQuery("commonKey=commonValue", timeRange, indexId),
|
||||
Arrays.asList(key));
|
||||
final Result result = db.get(Query.createQuery("commonKey=commonValue", timeRange), Arrays.asList(key));
|
||||
|
||||
final List<GroupResult> groups = result.getGroups();
|
||||
|
||||
@@ -296,11 +264,6 @@ public class PerformanceDbTest {
|
||||
@Test
|
||||
public void testGroupByMultipleFields() throws Exception {
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
|
||||
final String indexId = "test";
|
||||
final PdbIndexId dbIndexId = new PdbIndexId(indexId);
|
||||
db.createIndex(dbIndexId, indexId, "");
|
||||
|
||||
final OffsetDateTime from = DateUtils.getDate(2016, 1, 1, 00, 00, 00);
|
||||
final OffsetDateTime to = DateUtils.getDate(2016, 1, 1, 23, 59, 50);
|
||||
|
||||
@@ -314,12 +277,12 @@ public class PerformanceDbTest {
|
||||
final Tags tagsTwoB = Tags.createAndAddToDictionary(key1, "two", key2, "bbb", "commonKey", "commonValue");
|
||||
final Tags tagsThree = Tags.createAndAddToDictionary(key1, "three", "commonKey", "commonValue");
|
||||
|
||||
final LongList entriesOne = storeEntries(db, dbIndexId, timeRange, numberOfEntries, tagsOne, 1);
|
||||
final LongList entriesTwo = storeEntries(db, dbIndexId, timeRange, numberOfEntries, tagsTwoA, 2);
|
||||
entriesTwo.addAll(storeEntries(db, dbIndexId, timeRange, numberOfEntries, tagsTwoB, 3));
|
||||
final LongList entriesThree = storeEntries(db, dbIndexId, timeRange, numberOfEntries, tagsThree, 4);
|
||||
final LongList entriesOne = storeEntries(db, timeRange, numberOfEntries, tagsOne, 1);
|
||||
final LongList entriesTwo = storeEntries(db, timeRange, numberOfEntries, tagsTwoA, 2);
|
||||
entriesTwo.addAll(storeEntries(db, timeRange, numberOfEntries, tagsTwoB, 3));
|
||||
final LongList entriesThree = storeEntries(db, timeRange, numberOfEntries, tagsThree, 4);
|
||||
|
||||
final Result result = db.get(Query.createQuery("commonKey=commonValue", timeRange, indexId),
|
||||
final Result result = db.get(Query.createQuery("commonKey=commonValue", timeRange),
|
||||
Arrays.asList(key1, key2));
|
||||
|
||||
final List<GroupResult> groups = result.getGroups();
|
||||
@@ -348,10 +311,10 @@ public class PerformanceDbTest {
|
||||
}
|
||||
}
|
||||
|
||||
private LongList storeEntries(final PerformanceDb performanceDb, final PdbIndexId dbIndexId,
|
||||
final DateTimeRange timeRange, final long numberOfEntries, final Tags tags, final int addToDate) {
|
||||
private LongList storeEntries(final PerformanceDb performanceDb, final DateTimeRange timeRange,
|
||||
final long numberOfEntries, final Tags tags, final int addToDate) {
|
||||
final List<Entry> entries = generateEntries(timeRange, numberOfEntries, addToDate, tags);
|
||||
performanceDb.putEntries(dbIndexId, entries);
|
||||
performanceDb.putEntries(entries);
|
||||
|
||||
final LongList result = new LongList();
|
||||
|
||||
|
||||
Reference in New Issue
Block a user