diff --git a/build.gradle b/build.gradle
index 2181aaf..5fc459a 100644
--- a/build.gradle
+++ b/build.gradle
@@ -10,7 +10,7 @@ plugins {
ext {
- javaVersion=15
+ javaVersion=14
version_log4j2= '2.13.3' // keep in sync with spring-boot-starter-log4j2
version_spring = '2.4.5'
diff --git a/performanceDb/src/main/java/org/lucares/performance/db/Entries.java b/data-store/src/main/java/org/lucares/pdb/datastore/Entries.java
similarity index 77%
rename from performanceDb/src/main/java/org/lucares/performance/db/Entries.java
rename to data-store/src/main/java/org/lucares/pdb/datastore/Entries.java
index 552f62e..7b3d020 100644
--- a/performanceDb/src/main/java/org/lucares/performance/db/Entries.java
+++ b/data-store/src/main/java/org/lucares/pdb/datastore/Entries.java
@@ -1,4 +1,4 @@
-package org.lucares.performance.db;
+package org.lucares.pdb.datastore;
import java.util.ArrayList;
import java.util.Arrays;
@@ -9,9 +9,6 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
-import org.lucares.pdb.datastore.Entry;
-import org.lucares.pdb.datastore.PdbIndexId;
-
/**
* Wrapper for chunk of {@link Entry}s.
*
@@ -31,7 +28,7 @@ public class Entries implements Iterable {
* A special {@link Entries} instance that can be used as poison object for
* blocking queues.
*/
- public static final Entries POISON = new Entries(new PdbIndexId("poison"), 0);
+ public static final Entries POISON = new Entries(0);
private final List entries;
@@ -39,20 +36,15 @@ public class Entries implements Iterable {
private CountDownLatch flushLatch = null;
- private final PdbIndexId index;
-
- public Entries(final PdbIndexId index, final int initialSize) {
- this.index = index;
+ public Entries(final int initialSize) {
entries = new ArrayList<>(initialSize);
}
- public Entries(final PdbIndexId index, final Entry... entries) {
- this.index = index;
+ public Entries(final Entry... entries) {
this.entries = new ArrayList<>(Arrays.asList(entries));
}
- public Entries(final PdbIndexId index, final Collection entries) {
- this.index = index;
+ public Entries(final Collection entries) {
this.entries = new ArrayList<>(entries);
}
@@ -89,8 +81,4 @@ public class Entries implements Iterable {
public void notifyFlushed() {
flushLatch.countDown();
}
-
- public PdbIndexId getIndex() {
- return index;
- }
}
diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/Entry.java b/data-store/src/main/java/org/lucares/pdb/datastore/Entry.java
index 92739e8..0a23f2a 100644
--- a/data-store/src/main/java/org/lucares/pdb/datastore/Entry.java
+++ b/data-store/src/main/java/org/lucares/pdb/datastore/Entry.java
@@ -4,7 +4,6 @@ import java.time.Instant;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
-import java.util.Objects;
import org.lucares.pdb.api.Tags;
@@ -43,7 +42,12 @@ public class Entry {
@Override
public int hashCode() {
- return Objects.hash(epochMilli, tags, value);
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + (int) (epochMilli ^ (epochMilli >>> 32));
+ result = prime * result + ((tags == null) ? 0 : tags.hashCode());
+ result = prime * result + (int) (value ^ (value >>> 32));
+ return result;
}
@Override
@@ -55,7 +59,15 @@ public class Entry {
if (getClass() != obj.getClass())
return false;
final Entry other = (Entry) obj;
- return epochMilli == other.epochMilli && Objects.equals(tags, other.tags) && value == other.value;
+ if (epochMilli != other.epochMilli)
+ return false;
+ if (tags == null) {
+ if (other.tags != null)
+ return false;
+ } else if (!tags.equals(other.tags))
+ return false;
+ if (value != other.value)
+ return false;
+ return true;
}
-
}
diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/IndexNotFoundException.java b/data-store/src/main/java/org/lucares/pdb/datastore/IndexNotFoundException.java
deleted file mode 100644
index 0985754..0000000
--- a/data-store/src/main/java/org/lucares/pdb/datastore/IndexNotFoundException.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package org.lucares.pdb.datastore;
-
-public class IndexNotFoundException extends RuntimeException {
-
- private static final long serialVersionUID = 360217229200302323L;
-
- private final String id;
-
- public IndexNotFoundException(final PdbIndexId id) {
- super(id.getId());
- this.id = id.getId();
- }
-
- public String getId() {
- return id;
- }
-}
diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/Indexes.java b/data-store/src/main/java/org/lucares/pdb/datastore/Indexes.java
deleted file mode 100644
index 0f6c0ff..0000000
--- a/data-store/src/main/java/org/lucares/pdb/datastore/Indexes.java
+++ /dev/null
@@ -1,69 +0,0 @@
-package org.lucares.pdb.datastore;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.time.Duration;
-import java.util.List;
-import java.util.Optional;
-import java.util.stream.Collectors;
-
-import org.lucares.pdb.api.RuntimeIOException;
-import org.lucares.pdb.datastore.internal.DataStore;
-import org.lucares.utils.cache.HotEntryCache;
-
-public class Indexes implements Closeable {
- private final HotEntryCache dataStores = new HotEntryCache(
- Duration.ofMinutes(1), 10);
-
- private final Path dataDirectory;
-
- public Indexes(final Path dataDirectory) {
- this.dataDirectory = dataDirectory;
- }
-
- public DataStore getOrCreateDataStore(final PdbIndexId id) {
-
- return dataStores.putIfAbsent(id, idx -> {
- final PdbIndex pdbIndex = getIndexById(idx);
- return new DataStore(pdbIndex.getPath());
- });
- }
-
- private PdbIndex getIndexById(final PdbIndexId id) {
- return PdbIndex//
- .create(dataDirectory, id)//
- .orElseThrow(() -> new IndexNotFoundException(id));
- }
-
- public DataStore getOrCreateDataStore(final PdbIndex pdbIndex) {
-
- return dataStores.putIfAbsent(pdbIndex.getId(), idx -> new DataStore(pdbIndex.getPath()));
- }
-
- public List getAvailableIndexes() {
- try {
- return Files.list(dataDirectory)//
- .map(PdbIndex::create)//
- .filter(Optional::isPresent)//
- .map(Optional::get)//
- .collect(Collectors.toList());
- } catch (final IOException e) {
- throw new RuntimeIOException(e);
- }
- }
-
- public void create(final PdbIndexId id, final String name, final String description) {
- PdbIndex.init(dataDirectory, id, name, description);
- }
-
- @Override
- public void close() {
- dataStores.forEach(DataStore::close);
- }
-
- public void flush() {
- dataStores.forEach(DataStore::flush);
- }
-}
diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/PdbIndex.java b/data-store/src/main/java/org/lucares/pdb/datastore/PdbIndex.java
deleted file mode 100644
index f4a2346..0000000
--- a/data-store/src/main/java/org/lucares/pdb/datastore/PdbIndex.java
+++ /dev/null
@@ -1,164 +0,0 @@
-package org.lucares.pdb.datastore;
-
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.Reader;
-import java.io.Writer;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Objects;
-import java.util.Optional;
-import java.util.Properties;
-
-import org.lucares.pdb.api.RuntimeIOException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class PdbIndex {
-
- private static final String META_PROPERTIES = "meta.properties";
-
- private static final String INDEX_PREFIX = "index_";
-
- private final static Logger LOGGER = LoggerFactory.getLogger(PdbIndex.class);
-
- private final Path path;
-
- private final PdbIndexId id;
- private final String name;
- private final String description;
-
- public PdbIndex(final PdbIndexId id, final Path path, final String name, final String description) {
- this.id = id;
- this.path = path;
- this.name = name;
- this.description = description;
- }
-
- public static Optional create(final Path dataDirectory, final PdbIndexId id) {
- final Path indexPath = dataDirectory.resolve(INDEX_PREFIX + id);
- return create(indexPath);
- }
-
- public static Optional create(final Path path) {
-
- if (!Files.isDirectory(path)) {
- return Optional.empty();
- }
- if (!path.getFileName().toString().startsWith(INDEX_PREFIX)) {
- return Optional.empty();
- }
- final Path metadataPath = path.resolve(META_PROPERTIES);
- if (!Files.isRegularFile(metadataPath)) {
- LOGGER.warn("index folder {} is ignored, because it does not contain a meta.properties file", path);
- return Optional.empty();
- }
-
- if (!Files.isReadable(metadataPath)) {
- LOGGER.warn("meta.properties file is not readable", metadataPath);
- return Optional.empty();
- }
-
- final String id = path.getFileName().toString().substring(INDEX_PREFIX.length());
- final PdbIndexId indexId = new PdbIndexId(id);
-
- final Properties properties = readProperties(metadataPath);
- final String name = properties.getProperty("name", "no name");
- final String description = properties.getProperty("description", "");
-
- return Optional.of(new PdbIndex(indexId, path, name, description));
- }
-
- private static Properties readProperties(final Path metadataPath) {
- final Properties properties = new Properties();
-
- try (final Reader r = new FileReader(metadataPath.toFile(), StandardCharsets.UTF_8)) {
- properties.load(r);
- } catch (final IOException e) {
- throw new RuntimeIOException(e);
- }
- return properties;
- }
-
- private static void writeProperties(final Path metadataPath, final String name, final String description) {
- final Properties properties = new Properties();
- properties.setProperty("name", name);
- properties.setProperty("description", description);
-
- try (final Writer w = new FileWriter(metadataPath.toFile(), StandardCharsets.UTF_8)) {
- properties.store(w, "");
- } catch (final IOException e) {
- throw new RuntimeIOException(e);
- }
- }
-
- public PdbIndexId getId() {
- return id;
- }
-
- public Path getPath() {
- return path;
- }
-
- public String getName() {
- return name;
- }
-
- public String getDescription() {
- return description;
- }
-
- /**
- * Custom hash code implementation!
- */
- @Override
- public int hashCode() {
- return Objects.hash(id);
- }
-
- /**
- * Custom equals implementation!
- */
- @Override
- public boolean equals(final Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- final PdbIndex other = (PdbIndex) obj;
- return Objects.equals(id, other.id);
- }
-
- @Override
- public String toString() {
- final StringBuilder builder = new StringBuilder();
- builder.append("PdbIndex [path=");
- builder.append(path);
- builder.append(", name=");
- builder.append(name);
- builder.append(", description=");
- builder.append(description);
- builder.append("]");
- return builder.toString();
- }
-
- public static void init(final Path dataDirectory, final PdbIndexId id, final String name,
- final String description) {
- try {
- final Path path = dataDirectory.resolve(INDEX_PREFIX + id.getId());
- Files.createDirectories(path);
-
- final Path metadataPath = path.resolve(META_PROPERTIES);
- writeProperties(metadataPath, name, description);
-
- } catch (final IOException e) {
- throw new RuntimeIOException(e);
- }
-
- }
-
-}
diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/PdbIndexId.java b/data-store/src/main/java/org/lucares/pdb/datastore/PdbIndexId.java
deleted file mode 100644
index 2f380b7..0000000
--- a/data-store/src/main/java/org/lucares/pdb/datastore/PdbIndexId.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package org.lucares.pdb.datastore;
-
-import java.util.Objects;
-
-public class PdbIndexId {
- private final String id;
-
- public PdbIndexId(final String id) {
- super();
- this.id = id;
- }
-
- public String getId() {
- return id;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(id);
- }
-
- @Override
- public boolean equals(final Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- final PdbIndexId other = (PdbIndexId) obj;
- return Objects.equals(id, other.id);
- }
-
- @Override
- public String toString() {
- return id;
- }
-
-}
diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DataStore.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DataStore.java
index e2dcb83..849d65f 100644
--- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DataStore.java
+++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DataStore.java
@@ -121,7 +121,7 @@ public class DataStore implements AutoCloseable {
private final PartitionDiskStore diskStorage;
private final Path storageBasePath;
- public DataStore(final Path dataDirectory) {
+ public DataStore(final Path dataDirectory) throws IOException {
storageBasePath = storageDirectory(dataDirectory);
Tags.STRING_COMPRESSOR = StringCompressor.create(keyCompressionFile(storageBasePath));
@@ -148,11 +148,11 @@ public class DataStore implements AutoCloseable {
writerCache.addListener((key, value) -> value.close());
}
- private Path keyCompressionFile(final Path dataDirectory) {
+ private Path keyCompressionFile(final Path dataDirectory) throws IOException {
return dataDirectory.resolve("keys.csv");
}
- public static Path storageDirectory(final Path dataDirectory) {
+ public static Path storageDirectory(final Path dataDirectory) throws IOException {
return dataDirectory.resolve(SUBDIR_STORAGE);
}
diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/QueryCompletionIndex.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/QueryCompletionIndex.java
index a198d6c..33a7cfb 100644
--- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/QueryCompletionIndex.java
+++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/QueryCompletionIndex.java
@@ -301,7 +301,7 @@ public class QueryCompletionIndex implements AutoCloseable {
private final PartitionPersistentMap fieldToValueIndex;
private final PartitionPersistentMap fieldIndex;
- public QueryCompletionIndex(final Path basePath) {
+ public QueryCompletionIndex(final Path basePath) throws IOException {
tagToTagIndex = new PartitionPersistentMap<>(basePath, "queryCompletionTagToTagIndex.bs", new EncoderTwoTags(),
PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER));
diff --git a/data-store/src/test/java/org/lucares/pdb/datastore/internal/DataStoreTest.java b/data-store/src/test/java/org/lucares/pdb/datastore/internal/DataStoreTest.java
index 2f06219..77f707f 100644
--- a/data-store/src/test/java/org/lucares/pdb/datastore/internal/DataStoreTest.java
+++ b/data-store/src/test/java/org/lucares/pdb/datastore/internal/DataStoreTest.java
@@ -23,7 +23,6 @@ import javax.swing.JTextArea;
import javax.swing.JTextField;
import org.junit.jupiter.api.AfterEach;
-import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
@@ -37,6 +36,7 @@ import org.lucares.pdb.api.Tags;
import org.lucares.pdb.blockstorage.BSFile;
import org.lucares.pdb.datastore.Doc;
import org.lucares.pdb.datastore.Proposal;
+import org.junit.jupiter.api.Assertions;
import org.lucares.utils.CollectionUtils;
import org.lucares.utils.DateUtils;
import org.lucares.utils.file.FileUtils;
@@ -261,7 +261,7 @@ public class DataStoreTest {
final String query = input.getText();
final int caretIndex = input.getCaretPosition();
final QueryWithCaretMarker q = new QueryWithCaretMarker(query, dateRange, caretIndex,
- ResultMode.CUT_AT_DOT, null);
+ ResultMode.CUT_AT_DOT);
final List proposals = dataStore.propose(q);
@@ -284,8 +284,7 @@ public class DataStoreTest {
}
});
- final List docs = dataStore
- .search(Query.createQuery("", DateTimeRange.relative(1, ChronoUnit.DAYS), null));
+ final List docs = dataStore.search(Query.createQuery("", DateTimeRange.relative(1, ChronoUnit.DAYS)));
final StringBuilder out = new StringBuilder();
out.append("info\n");
for (final Doc doc : docs) {
@@ -305,7 +304,7 @@ public class DataStoreTest {
final String query = queryWithCaret.replace("|", "");
final int caretIndex = queryWithCaret.indexOf("|");
final List proposals = dataStore
- .propose(new QueryWithCaretMarker(query, dateRange, caretIndex, ResultMode.CUT_AT_DOT, null));
+ .propose(new QueryWithCaretMarker(query, dateRange, caretIndex, ResultMode.CUT_AT_DOT));
System.out.println(
"proposed values: " + proposals.stream().map(Proposal::getProposedTag).collect(Collectors.toList()));
@@ -318,12 +317,12 @@ public class DataStoreTest {
}
private void assertQueryFindsResults(final DateTimeRange dateRange, final String query) {
- final List result = dataStore.search(new Query(query, dateRange, null));
+ final List result = dataStore.search(new Query(query, dateRange));
Assertions.assertFalse(result.isEmpty(), "The query '" + query + "' must return a result, but didn't.");
}
private void assertSearch(final DateTimeRange dateRange, final String queryString, final Tags... tags) {
- final Query query = new Query(queryString, dateRange, null);
+ final Query query = new Query(queryString, dateRange);
final List actualDocs = dataStore.search(query);
final List actual = CollectionUtils.map(actualDocs, Doc::getRootBlockNumber);
diff --git a/data-store/src/test/java/org/lucares/pdb/datastore/internal/ProposerTest.java b/data-store/src/test/java/org/lucares/pdb/datastore/internal/ProposerTest.java
index b319b00..a94c8dd 100644
--- a/data-store/src/test/java/org/lucares/pdb/datastore/internal/ProposerTest.java
+++ b/data-store/src/test/java/org/lucares/pdb/datastore/internal/ProposerTest.java
@@ -8,7 +8,6 @@ import java.util.Collections;
import java.util.List;
import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.lucares.pdb.api.DateTimeRange;
@@ -16,6 +15,7 @@ import org.lucares.pdb.api.QueryWithCaretMarker;
import org.lucares.pdb.api.QueryWithCaretMarker.ResultMode;
import org.lucares.pdb.api.Tags;
import org.lucares.pdb.datastore.Proposal;
+import org.junit.jupiter.api.Assertions;
import org.lucares.utils.CollectionUtils;
import org.lucares.utils.file.FileUtils;
@@ -25,8 +25,6 @@ public class ProposerTest {
private static DataStore dataStore;
private static DateTimeRange dateRange;
- private static final String INDEX = "no used";
-
@BeforeAll
public static void beforeClass() throws Exception {
dataDirectory = Files.createTempDirectory("pdb");
@@ -295,7 +293,7 @@ public class ProposerTest {
final Proposal... expected) throws InterruptedException {
final List actual = dataStore
- .propose(new QueryWithCaretMarker(query, dateRange, caretIndex, resultMode, INDEX));
+ .propose(new QueryWithCaretMarker(query, dateRange, caretIndex, resultMode));
final List expectedList = Arrays.asList(expected);
Collections.sort(expectedList);
diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/Query.java b/pdb-api/src/main/java/org/lucares/pdb/api/Query.java
index 27e77ab..82ffbbc 100644
--- a/pdb-api/src/main/java/org/lucares/pdb/api/Query.java
+++ b/pdb-api/src/main/java/org/lucares/pdb/api/Query.java
@@ -4,48 +4,45 @@ import java.util.ArrayList;
import java.util.List;
public class Query {
- private final String index;
-
private final String query;
private final DateTimeRange dateRange;
- public Query(final String query, final DateTimeRange dateRange, final String index) {
+ public Query(final String query, final DateTimeRange dateRange) {
super();
this.query = query;
this.dateRange = dateRange;
- this.index = index;
}
public Query relativeMillis(final String query, final long amount) {
- return new Query(query, DateTimeRange.relativeMillis(amount), index);
+ return new Query(query, DateTimeRange.relativeMillis(amount));
}
public Query relativeSeconds(final String query, final long amount) {
- return new Query(query, DateTimeRange.relativeSeconds(amount), index);
+ return new Query(query, DateTimeRange.relativeSeconds(amount));
}
public Query relativeMinutes(final String query, final long amount) {
- return new Query(query, DateTimeRange.relativeMinutes(amount), index);
+ return new Query(query, DateTimeRange.relativeMinutes(amount));
}
public Query relativeHours(final String query, final long amount) {
- return new Query(query, DateTimeRange.relativeHours(amount), index);
+ return new Query(query, DateTimeRange.relativeHours(amount));
}
public Query relativeDays(final String query, final long amount) {
- return new Query(query, DateTimeRange.relativeDays(amount), index);
+ return new Query(query, DateTimeRange.relativeDays(amount));
}
public Query relativeMonths(final String query, final long amount) {
- return new Query(query, DateTimeRange.relativeMonths(amount), index);
+ return new Query(query, DateTimeRange.relativeMonths(amount));
}
- public static Query createQuery(final String query, final DateTimeRange dateRange, final String index) {
- return new Query(query, dateRange, index);
+ public static Query createQuery(final String query, final DateTimeRange dateRange) {
+ return new Query(query, dateRange);
}
- public static Query createQuery(final Tags tags, final DateTimeRange dateRange, final String index) {
+ public static Query createQuery(final Tags tags, final DateTimeRange dateRange) {
final List terms = new ArrayList<>();
@@ -61,11 +58,7 @@ public class Query {
terms.add(term.toString());
}
- return new Query(String.join(" and ", terms), dateRange, index);
- }
-
- public String getIndex() {
- return index;
+ return new Query(String.join(" and ", terms), dateRange);
}
public String getQuery() {
@@ -78,7 +71,7 @@ public class Query {
@Override
public String toString() {
- return "'" + query + "' [" + dateRange + "] in index " + index;
+ return "'" + query + "' [" + dateRange + "]";
}
}
diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/QueryWithCaretMarker.java b/pdb-api/src/main/java/org/lucares/pdb/api/QueryWithCaretMarker.java
index 93fa876..d1f70f3 100644
--- a/pdb-api/src/main/java/org/lucares/pdb/api/QueryWithCaretMarker.java
+++ b/pdb-api/src/main/java/org/lucares/pdb/api/QueryWithCaretMarker.java
@@ -10,8 +10,8 @@ public class QueryWithCaretMarker extends Query implements QueryConstants {
private final ResultMode resultMode;
public QueryWithCaretMarker(final String query, final DateTimeRange dateRange, final int caretIndex,
- final ResultMode resultMode, final String index) {
- super(query, dateRange, index);
+ final ResultMode resultMode) {
+ super(query, dateRange);
this.caretIndex = caretIndex;
this.resultMode = resultMode;
}
diff --git a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/PlotSettings.java b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/PlotSettings.java
index 73bfa19..e0e22ce 100644
--- a/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/PlotSettings.java
+++ b/pdb-plotting/src/main/java/org/lucares/pdb/plot/api/PlotSettings.java
@@ -9,7 +9,6 @@ import java.util.Optional;
import java.util.regex.Pattern;
import org.lucares.pdb.api.DateTimeRange;
-import org.lucares.pdb.datastore.PdbIndexId;
import org.lucares.recommind.logs.GnuplotAxis;
import org.lucares.utils.Preconditions;
@@ -19,8 +18,6 @@ public class PlotSettings {
private String query;
- private PdbIndexId index;
-
private int height;
private int width;
@@ -58,14 +55,6 @@ public class PlotSettings {
this.query = query;
}
- public PdbIndexId getIndex() {
- return index;
- }
-
- public void setIndex(final PdbIndexId index) {
- this.index = index;
- }
-
public int getHeight() {
return height;
}
diff --git a/pdb-plotting/src/main/java/org/lucares/recommind/logs/Plotter.java b/pdb-plotting/src/main/java/org/lucares/recommind/logs/Plotter.java
index cec22ac..f3ad073 100644
--- a/pdb-plotting/src/main/java/org/lucares/recommind/logs/Plotter.java
+++ b/pdb-plotting/src/main/java/org/lucares/recommind/logs/Plotter.java
@@ -22,7 +22,6 @@ import org.lucares.pdb.api.GroupResult;
import org.lucares.pdb.api.Query;
import org.lucares.pdb.api.Result;
import org.lucares.pdb.api.Tags;
-import org.lucares.pdb.datastore.PdbIndexId;
import org.lucares.pdb.plot.api.AggregatorCollection;
import org.lucares.pdb.plot.api.Limit;
import org.lucares.pdb.plot.api.PlotSettings;
@@ -68,7 +67,6 @@ public class Plotter {
final List dataSeries = Collections.synchronizedList(new ArrayList<>());
final String query = plotSettings.getQuery();
- final PdbIndexId index = plotSettings.getIndex();
final List groupBy = plotSettings.getGroupBy();
final int height = plotSettings.getHeight();
final int width = plotSettings.getWidth();
@@ -76,7 +74,7 @@ public class Plotter {
final OffsetDateTime dateFrom = dateRange.getStart();
final OffsetDateTime dateTo = dateRange.getEnd();
- final Result result = db.get(new Query(query, dateRange, index.getId()), groupBy);
+ final Result result = db.get(new Query(query, dateRange), groupBy);
final long start = System.nanoTime();
final AtomicInteger idCounter = new AtomicInteger(0);
diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/CsvReaderSettings.java b/pdb-ui/src/main/java/org/lucares/pdbui/CsvReaderSettings.java
index d4f8fa6..d32a979 100644
--- a/pdb-ui/src/main/java/org/lucares/pdbui/CsvReaderSettings.java
+++ b/pdb-ui/src/main/java/org/lucares/pdbui/CsvReaderSettings.java
@@ -166,8 +166,6 @@ public final class CsvReaderSettings {
private String comment = "#";
- private String indexId = "default";
-
public CsvReaderSettings() {
this("@timestamp", "duration", ",", new ColumnDefinitions());
}
@@ -236,14 +234,6 @@ public final class CsvReaderSettings {
return bytes[0];
}
- public void setIndexId(final String indexId) {
- this.indexId = indexId;
- }
-
- public String getIndexId() {
- return indexId;
- }
-
public void putAdditionalTag(final String field, final String value) {
additionalTags.put(field, value);
}
@@ -263,4 +253,5 @@ public final class CsvReaderSettings {
public void setColumnDefinitions(final ColumnDefinitions columnDefinitions) {
this.columnDefinitions = columnDefinitions;
}
+
}
diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/CsvToEntryTransformer.java b/pdb-ui/src/main/java/org/lucares/pdbui/CsvToEntryTransformer.java
index 5a3dc98..2521781 100644
--- a/pdb-ui/src/main/java/org/lucares/pdbui/CsvToEntryTransformer.java
+++ b/pdb-ui/src/main/java/org/lucares/pdbui/CsvToEntryTransformer.java
@@ -14,12 +14,11 @@ import java.util.function.Function;
import org.lucares.collections.IntList;
import org.lucares.pdb.api.Tags;
import org.lucares.pdb.api.TagsBuilder;
+import org.lucares.pdb.datastore.Entries;
import org.lucares.pdb.datastore.Entry;
-import org.lucares.pdb.datastore.PdbIndexId;
import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions;
import org.lucares.pdbui.CsvReaderSettings.PostProcessors;
import org.lucares.pdbui.date.FastISODateParser;
-import org.lucares.performance.db.Entries;
import org.lucares.utils.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -44,13 +43,11 @@ class CsvToEntryTransformer {
void readCSV(final InputStream in) throws IOException, InterruptedException, TimeoutException {
final int chunksize = 1000;
- PdbIndexId indexId = new PdbIndexId(settings.getIndexId());
- Entries entries = new Entries(indexId, chunksize);
+ Entries entries = new Entries(chunksize);
final byte newline = '\n';
final byte separator = settings.separatorByte();
final byte comment = settings.commentByte();
- final byte indexIdLinePrefix = 0x01; // Start of Heading (ASCII)
final byte[] line = new byte[64 * 1024]; // max line length
int offsetInLine = 0;
int offsetInBuffer = 0;
@@ -76,22 +73,18 @@ class CsvToEntryTransformer {
bytesInLine = offsetInLine + length;
separatorPositions.add(offsetInLine + i - offsetInBuffer);
- if (line[0] == indexIdLinePrefix) {
- queue.put(entries);
- indexId = new PdbIndexId(new String(line, 1, bytesInLine - 1, StandardCharsets.UTF_8));
- entries = new Entries(indexId, chunksize);
- } else if (line[0] == comment) {
+ if (line[0] == comment) {
// ignore
} else if (compressedHeaders != null) {
final Entry entry = handleCsvLine(line, bytesInLine, separatorPositions, keyTimestamp,
- keyDuration, dateParser, additionalTags, indexId);
+ keyDuration, dateParser, additionalTags);
if (entry != null) {
entries.add(entry);
}
if (entries.size() >= chunksize) {
queue.put(entries);
- entries = new Entries(indexId, chunksize);
+ entries = new Entries(chunksize);
}
} else {
handleCsvHeaderLine(line, bytesInLine, separatorPositions);
@@ -115,7 +108,7 @@ class CsvToEntryTransformer {
}
}
final Entry entry = handleCsvLine(line, bytesInLine, separatorPositions, keyTimestamp, keyDuration, dateParser,
- additionalTags, indexId);
+ additionalTags);
if (entry != null) {
entries.add(entry);
}
@@ -173,7 +166,7 @@ class CsvToEntryTransformer {
private Entry handleCsvLine(final byte[] line, final int bytesInLine, final IntList separatorPositions,
final int keyTimestamp, final int keyDuration, final FastISODateParser dateParser,
- final Tags additionalTags, final PdbIndexId indexId) {
+ final Tags additionalTags) {
try {
final int[] columns = compressedHeaders;
if (separatorPositions.size() != columns.length) {
diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/CsvUploadHandler.java b/pdb-ui/src/main/java/org/lucares/pdbui/CsvUploadHandler.java
index 8b61a8c..2d24272 100644
--- a/pdb-ui/src/main/java/org/lucares/pdbui/CsvUploadHandler.java
+++ b/pdb-ui/src/main/java/org/lucares/pdbui/CsvUploadHandler.java
@@ -12,7 +12,7 @@ import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
-import org.lucares.performance.db.Entries;
+import org.lucares.pdb.datastore.Entries;
import org.lucares.performance.db.PerformanceDb;
import org.lucares.utils.file.FileUtils;
import org.slf4j.Logger;
@@ -50,7 +50,6 @@ public class CsvUploadHandler implements PropertyKeys, DisposableBean {
synchronized (this) {
final CsvToEntryTransformer csvToEntryTransformer = new CsvToEntryTransformer(queue, settings);
try (InputStream in = file.getInputStream()) {
-
csvToEntryTransformer.readCSV(in);
} catch (final Exception e) {
LOGGER.error("csv ingestion failed", e);
diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/CustomExportFormatToEntryTransformer.java b/pdb-ui/src/main/java/org/lucares/pdbui/CustomExportFormatToEntryTransformer.java
new file mode 100644
index 0000000..300c44c
--- /dev/null
+++ b/pdb-ui/src/main/java/org/lucares/pdbui/CustomExportFormatToEntryTransformer.java
@@ -0,0 +1,127 @@
+package org.lucares.pdbui;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.regex.Pattern;
+
+import org.lucares.pdb.api.Tags;
+import org.lucares.pdb.api.TagsBuilder;
+import org.lucares.pdb.datastore.Entries;
+import org.lucares.pdb.datastore.Entry;
+import org.lucares.performance.db.PdbExport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ *
+ * File format goals: Minimal size/ minimal repetition while also providing a
+ * file format that can be used for "normal" ingestion, not just backup/restore.
+ * It should be easy to implement in any language. It should be easy to debug.
+ *
+ * Note: Line breaks are written as {@code \n}.
+ *
+ *
+ * # // # is the magic byte for the file format used to detect this format
+ * $123:key1=value1,key2=value2\n // $ marks the beginning of a dictionary entry that says: the following number will be used to refer to the following tags.
+ * // In this case the tags key1=value1,key2=value2 will be identified by 123.
+ * // The newline is used as an end marker.
+ * 1534567890,456,123\n // Defines an entry with timestamp 1534567890, duration 456 and tags key1=value1,key2=value2.
+ * 1,789,123\n // Timestamps are encoded using delta encoding. That means this triple defines
+ * // an entry with timestamp 1534567891, duration 789 and tags key1=value1,key2=value2
+ * -2,135,123\n // Timestamp delta encoding can contain negative numbers. This triple defines an entry
+ * // with timestamp 1534567889, duration 135 and tags key1=value1,key2=value2
+ *
+ */
+
+public class CustomExportFormatToEntryTransformer {
+
+ private static final int ENTRY_BUFFER_SIZE = 100;
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(CustomExportFormatToEntryTransformer.class);
+
+ private final Pattern splitByComma = Pattern.compile(",");
+
+ private final Map tagsDictionary = new HashMap<>();
+
+ private long lastEpochMilli;
+
+ public void read(final BufferedReader in, final ArrayBlockingQueue queue) throws IOException {
+
+ Entries bufferedEntries = new Entries(ENTRY_BUFFER_SIZE);
+
+ try {
+ String line;
+ while ((line = in.readLine()) != null) {
+ try {
+ if (line.startsWith(PdbExport.MARKER_DICT_ENTRY)) {
+ readDictionaryEntry(line);
+ } else {
+ final Entry entry = readEntry(line);
+ if (entry != null) {
+
+ bufferedEntries.add(entry);
+
+ if (bufferedEntries.size() == ENTRY_BUFFER_SIZE) {
+ queue.put(bufferedEntries);
+ bufferedEntries = new Entries(ENTRY_BUFFER_SIZE);
+ }
+ }
+ }
+ } catch (final Exception e) {
+ LOGGER.error("ignoring line '{}'", line, e);
+ }
+ queue.put(bufferedEntries);
+ bufferedEntries = new Entries(ENTRY_BUFFER_SIZE);
+ }
+ } catch (final InterruptedException e) {
+ Thread.currentThread().interrupt();
+ LOGGER.info("aborting because of interruption");
+ }
+ }
+
+ private Entry readEntry(final String line) {
+
+ final String[] timeValueTags = splitByComma.split(line);
+
+ final long timeDelta = Long.parseLong(timeValueTags[0]);
+ final long value = Long.parseLong(timeValueTags[1]);
+ final long tagsId = Long.parseLong(timeValueTags[2]);
+
+ lastEpochMilli = lastEpochMilli + timeDelta;
+
+ final Tags tags = tagsDictionary.get(tagsId);
+ if (tags == null) {
+ LOGGER.info("no tags available for tagsId {}. Ignoring line '{}'", tagsId, line);
+ return null;
+ }
+
+ return new Entry(lastEpochMilli, value, tags);
+ }
+
+ private void readDictionaryEntry(final String line) {
+ final String[] tagsIdToSerializedTags = line.split(Pattern.quote(PdbExport.SEPARATOR_TAG_ID));
+
+ final Long tagId = Long.parseLong(tagsIdToSerializedTags[0], 1, tagsIdToSerializedTags[0].length(), 10);
+ final Tags tags = tagsFromCsv(tagsIdToSerializedTags[1]);
+ tagsDictionary.put(tagId, tags);
+ }
+
+ public static Tags tagsFromCsv(final String line) {
+
+ final TagsBuilder tagsBuilder = new TagsBuilder();
+ final String[] tagsAsString = line.split(Pattern.quote(","));
+
+ for (final String tagAsString : tagsAsString) {
+ final String[] keyValue = tagAsString.split(Pattern.quote("="));
+
+ final int key = Tags.STRING_COMPRESSOR.put(keyValue[0]);
+ final int value = Tags.STRING_COMPRESSOR.put(keyValue[1]);
+ tagsBuilder.add(key, value);
+ }
+
+ return tagsBuilder.build();
+ }
+}
\ No newline at end of file
diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/IngestionHandler.java b/pdb-ui/src/main/java/org/lucares/pdbui/IngestionHandler.java
index 94e9d2a..385f50d 100644
--- a/pdb-ui/src/main/java/org/lucares/pdbui/IngestionHandler.java
+++ b/pdb-ui/src/main/java/org/lucares/pdbui/IngestionHandler.java
@@ -1,18 +1,26 @@
package org.lucares.pdbui;
import java.io.BufferedInputStream;
+import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
+import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.Socket;
import java.net.SocketAddress;
+import java.nio.charset.StandardCharsets;
+import java.util.Optional;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeoutException;
import java.util.zip.GZIPInputStream;
+import org.lucares.pdb.datastore.Entries;
+import org.lucares.pdb.datastore.Entry;
import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions;
-import org.lucares.performance.db.Entries;
+import org.lucares.performance.db.PdbExport;
+
+import com.fasterxml.jackson.core.JsonParseException;
public final class IngestionHandler implements Callable {
@@ -47,7 +55,12 @@ public final class IngestionHandler implements Callable {
private void handleInputStream(final InputStream in) throws IOException, InterruptedException, TimeoutException {
in.mark(1);
final byte firstByte = (byte) in.read();
- if (isGZIP(firstByte)) {
+ if (firstByte == '{') {
+ in.reset();
+ readJSON(in);
+ } else if (firstByte == PdbExport.MAGIC_BYTE) {
+ readCustomExportFormat(in);
+ } else if (isGZIP(firstByte)) {
in.reset();
final GZIPInputStream gzip = new GZIPInputStream(in);
@@ -66,4 +79,50 @@ public final class IngestionHandler implements Callable {
// I am cheap and only check the first byte
return firstByte == 0x1f;
}
+
+ private void readCustomExportFormat(final InputStream in) throws IOException {
+
+ final CustomExportFormatToEntryTransformer transformer = new CustomExportFormatToEntryTransformer();
+
+ final BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
+ transformer.read(reader, queue);
+
+ }
+
+ private void readJSON(final InputStream in) throws IOException, InterruptedException {
+ final int chunksize = 100;
+ Entries entries = new Entries(chunksize);
+
+ final BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
+
+ String line = reader.readLine();
+
+ final JsonToEntryTransformer transformer = new JsonToEntryTransformer();
+ final Optional firstEntry = transformer.toEntry(line);
+ if (firstEntry.isPresent()) {
+ TcpIngestor.LOGGER.debug("adding entry to queue: {}", firstEntry);
+ entries.add(firstEntry.get());
+ }
+
+ while ((line = reader.readLine()) != null) {
+
+ try {
+ final Optional entry = transformer.toEntry(line);
+
+ if (entry.isPresent()) {
+ TcpIngestor.LOGGER.debug("adding entry to queue: {}", entry);
+ entries.add(entry.get());
+ }
+ } catch (final JsonParseException e) {
+ TcpIngestor.LOGGER.info("json parse error in line '" + line + "'", e);
+ }
+
+ if (entries.size() == chunksize) {
+ queue.put(entries);
+ entries = new Entries(chunksize);
+ }
+ }
+ queue.put(entries);
+
+ }
}
\ No newline at end of file
diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/JsonToEntryTransformer.java b/pdb-ui/src/main/java/org/lucares/pdbui/JsonToEntryTransformer.java
new file mode 100644
index 0000000..81f5282
--- /dev/null
+++ b/pdb-ui/src/main/java/org/lucares/pdbui/JsonToEntryTransformer.java
@@ -0,0 +1,97 @@
+package org.lucares.pdbui;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.Optional;
+
+import org.lucares.pdb.api.Tags;
+import org.lucares.pdb.api.TagsBuilder;
+import org.lucares.pdb.datastore.Entry;
+import org.lucares.pdbui.date.FastISODateParser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+
+public class JsonToEntryTransformer implements LineToEntryTransformer {
+ private static final Logger LOGGER = LoggerFactory.getLogger(JsonToEntryTransformer.class);
+
+ private final TypeReference