Merge remote-tracking branch 'origin/master'

This commit is contained in:
2022-11-21 13:43:15 +01:00
99 changed files with 17240 additions and 9477 deletions

1
.gitignore vendored
View File

@@ -6,3 +6,4 @@
/build/ /build/
/target/ /target/
/test-output/ /test-output/
java_pid*

View File

@@ -3,7 +3,7 @@
## start Angular development server ## start Angular development server
``` ```
cd pdb-js cd pdb-js
build/npm/npm-v6.14.8/bin/npm run ng serve build/npm/npm-v8.1.2/bin/npm run ng serve
``` ```
or or
@@ -15,7 +15,7 @@ gradlew npm_run_ng_serve
``` ```
cd pdb-js cd pdb-js
build/npm/npm-v6.14.8/bin/npm run ng generate component component-name build/npm/npm-v8.1.2/bin/npm run ng generate component component-name
``` ```
## update JavaScript libraries ## update JavaScript libraries

View File

@@ -188,6 +188,13 @@ public class PersistentMap<K, V> implements AutoCloseable {
private long version; private long version;
/**
*
* @param path file for the index, must be child of storageBasePath
* @param storageBasePath base path
* @param keyEncoder encoder for keys
* @param valueEncoder encoder for values
*/
public PersistentMap(final Path path, final Path storageBasePath, final EncoderDecoder<K> keyEncoder, public PersistentMap(final Path path, final Path storageBasePath, final EncoderDecoder<K> keyEncoder,
final EncoderDecoder<V> valueEncoder) { final EncoderDecoder<V> valueEncoder) {
this.path = path; this.path = path;
@@ -633,6 +640,14 @@ public class PersistentMap<K, V> implements AutoCloseable {
return stats; return stats;
} }
public synchronized boolean isEmpty() {
final long rootNodeOffset = readNodeOffsetOfRootNode();
final PersistentMapDiskNode node = getNode(rootNodeOffset);
final List<NodeEntry> entries = node.getEntries();
return entries.size() == 1; // the empty map has a single NodeEntry for the PersistentMapDiskNode.MAX_KEY
}
private void swapFiles(final Path newFile) throws IOException { private void swapFiles(final Path newFile) throws IOException {
final Path backupFile = path.getParent().resolve(path.getFileName() + "." final Path backupFile = path.getParent().resolve(path.getFileName() + "."
+ DateTimeFormatter.ofPattern("yyyyMMdd-HHmmss").format(OffsetDateTime.now()) + ".backup"); + DateTimeFormatter.ofPattern("yyyyMMdd-HHmmss").format(OffsetDateTime.now()) + ".backup");

View File

@@ -4,10 +4,12 @@ import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.security.SecureRandom; import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Queue; import java.util.Queue;
@@ -57,6 +59,26 @@ public class PersistentMapTest {
} }
} }
@Test
public void testUpdateValues() throws Exception {
final Path file = dataDirectory.resolve("map.db");
try (final PersistentMap<String, String> map = new PersistentMap<>(file, dataDirectory,
PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) {
map.putValue("key", "first");
Assertions.assertEquals("first", map.getValue("key"));
map.putValue("key", "second");
Assertions.assertEquals("second", map.getValue("key"));
final List<String> allValuesInMap = new ArrayList<>();
map.forAll((k, v) -> {
allValuesInMap.add(v);
});
Assertions.assertEquals(List.of("second"), allValuesInMap);
}
}
@Test @Test
public void testManyValues() throws Exception { public void testManyValues() throws Exception {
final Path file = dataDirectory.resolve("map.db"); final Path file = dataDirectory.resolve("map.db");
@@ -375,6 +397,29 @@ public class PersistentMapTest {
} }
} }
@Test
public void testIsEmpty() throws IOException {
final Path file = dataDirectory.resolve("map.db");
try (final PersistentMap<Long, Long> map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER,
PersistentMap.LONG_CODER)) {
Assertions.assertTrue(map.isEmpty(), "new created map is empty");
}
try (final PersistentMap<Long, Long> map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER,
PersistentMap.LONG_CODER)) {
Assertions.assertTrue(map.isEmpty(), "map is empty after reading an empty map from disk");
map.putValue(1L, 2L);
Assertions.assertFalse(map.isEmpty(), "map is empty after putting a value");
}
try (final PersistentMap<Long, Long> map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER,
PersistentMap.LONG_CODER)) {
Assertions.assertFalse(map.isEmpty(), "map is empty when reading a non empty map from disk");
}
}
private void assertValuesInMap(final Map<Long, Long> insertedValues, final PersistentMap<Long, Long> map) { private void assertValuesInMap(final Map<Long, Long> insertedValues, final PersistentMap<Long, Long> map) {
final AtomicInteger counter = new AtomicInteger(); final AtomicInteger counter = new AtomicInteger();
final AtomicInteger maxDepth = new AtomicInteger(); final AtomicInteger maxDepth = new AtomicInteger();

View File

@@ -4,27 +4,27 @@ import org.apache.tools.ant.filters.ReplaceTokens
plugins { plugins {
id 'java' id 'java'
id 'eclipse' id 'eclipse'
id 'com.github.ben-manes.versions' version "0.39.0" // check for dependency updates run: gradlew dependenyUpdates id 'com.github.ben-manes.versions' version "0.42.0" // check for dependency updates run: gradlew dependenyUpdates
} }
ext { ext {
javaVersion=16 javaVersion=17
version_log4j2= '2.14.1' // keep in sync with spring-boot-starter-log4j2 version_log4j2= '2.17.2' // keep in sync with spring-boot-starter-log4j2
version_spring = '2.5.4' version_spring = '2.7.4'
version_junit = '5.7.2' version_junit = '5.9.1'
version_junit_platform = '1.7.2' version_junit_platform = '1.9.1'
version_nodejs = '14.17.3' // keep in sync with npm version_nodejs = '16.17.1' // keep in sync with npm
version_npm = '6.14.13' // keep in sync with nodejs version_npm = '8.15.0' // keep in sync with nodejs
lib_antlr = "org.antlr:antlr4:4.9.2" lib_antlr = "org.antlr:antlr4:4.11.1"
lib_commons_collections4 = 'org.apache.commons:commons-collections4:4.4' lib_commons_collections4 = 'org.apache.commons:commons-collections4:4.4'
lib_commons_csv= 'org.apache.commons:commons-csv:1.9.0' lib_commons_csv= 'org.apache.commons:commons-csv:1.9.0'
lib_commons_lang3 = 'org.apache.commons:commons-lang3:3.12.0' lib_commons_lang3 = 'org.apache.commons:commons-lang3:3.12.0'
lib_jackson_databind = 'com.fasterxml.jackson.core:jackson-databind:2.12.4' lib_jackson_databind = 'com.fasterxml.jackson.core:jackson-databind:2.13.2'
lib_log4j2_core = "org.apache.logging.log4j:log4j-core:${version_log4j2}" lib_log4j2_core = "org.apache.logging.log4j:log4j-core:${version_log4j2}"
lib_log4j2_slf4j_impl = "org.apache.logging.log4j:log4j-slf4j-impl:${version_log4j2}" lib_log4j2_slf4j_impl = "org.apache.logging.log4j:log4j-slf4j-impl:${version_log4j2}"
@@ -136,5 +136,5 @@ subprojects {
} }
wrapper { wrapper {
gradleVersion = '7.2' gradleVersion = '7.5.1'
} }

View File

@@ -37,7 +37,7 @@ public class Entry {
public String toString() { public String toString() {
final OffsetDateTime date = Instant.ofEpochMilli(epochMilli).atOffset(ZoneOffset.UTC); final OffsetDateTime date = Instant.ofEpochMilli(epochMilli).atOffset(ZoneOffset.UTC);
return date.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + " = " + value + " (" + tags.asString() + ")"; return date.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + " = " + value + " (" + tags + ")";
} }
@Override @Override

View File

@@ -40,8 +40,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
public class DataStore implements AutoCloseable { public class DataStore implements AutoCloseable {
private static final String ALL_DOCS_KEY = "\ue001allDocs"; // \ue001 is the second character in the private use
// area
private static final Logger EXECUTE_QUERY_LOGGER = LoggerFactory private static final Logger EXECUTE_QUERY_LOGGER = LoggerFactory
.getLogger("org.lucares.metrics.dataStore.executeQuery"); .getLogger("org.lucares.metrics.dataStore.executeQuery");
private static final Logger MAP_DOCS_TO_DOCID = LoggerFactory private static final Logger MAP_DOCS_TO_DOCID = LoggerFactory
@@ -60,8 +59,6 @@ public class DataStore implements AutoCloseable {
// ids when getting them from the BSFiles) // ids when getting them from the BSFiles)
private static final AtomicLong NEXT_DOC_ID = new AtomicLong(System.currentTimeMillis()); private static final AtomicLong NEXT_DOC_ID = new AtomicLong(System.currentTimeMillis());
public static Tag TAG_ALL_DOCS = null;
private static final class PartitionedTagsCacheKey { private static final class PartitionedTagsCacheKey {
private final Tags tags; private final Tags tags;
private final ParititionId partitionId; private final ParititionId partitionId;
@@ -121,20 +118,18 @@ public class DataStore implements AutoCloseable {
private final PartitionDiskStore diskStorage; private final PartitionDiskStore diskStorage;
private final Path storageBasePath; private final Path storageBasePath;
private final StringCompressor stringCompressor;
public DataStore(final Path dataDirectory) throws IOException { public DataStore(final Path dataDirectory) throws IOException {
storageBasePath = storageDirectory(dataDirectory); storageBasePath = storageDirectory(dataDirectory);
Tags.STRING_COMPRESSOR = StringCompressor.create(keyCompressionFile(storageBasePath)); stringCompressor = StringCompressor.create(storageBasePath);
Tags.STRING_COMPRESSOR.put(ALL_DOCS_KEY); Tags.STRING_COMPRESSOR = stringCompressor;
Tags.STRING_COMPRESSOR.put("");
TAG_ALL_DOCS = Tags.STRING_COMPRESSOR.createTag(ALL_DOCS_KEY, ""); // Tag(String, String) uses the
// StringCompressor internally, so it
// must be initialized after the string compressor has been created
diskStorage = new PartitionDiskStore(storageBasePath, "data.bs"); diskStorage = new PartitionDiskStore(storageBasePath, "data.bs");
tagToDocsId = new PartitionPersistentMap<>(storageBasePath, "keyToValueToDocIdsIndex.bs", tagToDocsId = new PartitionPersistentMap<>(storageBasePath, "keyToValueToDocIdsIndex.bs",
new TagEncoderDecoder(), PartitionAwareWrapper.wrap(PersistentMap.LONG_CODER)); new TagEncoderDecoder(stringCompressor), PartitionAwareWrapper.wrap(PersistentMap.LONG_CODER));
tagsToDocId = new PartitionPersistentMap<>(storageBasePath, "tagsToDocIdIndex.bs", new TagsEncoderDecoder(), tagsToDocId = new PartitionPersistentMap<>(storageBasePath, "tagsToDocIdIndex.bs", new TagsEncoderDecoder(),
PartitionAwareWrapper.wrap(PersistentMap.LONG_CODER)); PartitionAwareWrapper.wrap(PersistentMap.LONG_CODER));
@@ -142,16 +137,12 @@ public class DataStore implements AutoCloseable {
docIdToDoc = new PartitionPersistentMap<>(storageBasePath, "docIdToDocIndex.bs", PersistentMap.LONG_CODER, docIdToDoc = new PartitionPersistentMap<>(storageBasePath, "docIdToDocIndex.bs", PersistentMap.LONG_CODER,
new DocEncoderDecoder()); new DocEncoderDecoder());
queryCompletionIndex = new QueryCompletionIndex(storageBasePath); queryCompletionIndex = new QueryCompletionIndex(storageBasePath, stringCompressor);
writerCache = new HotEntryCache<>(Duration.ofSeconds(10), 1000); writerCache = new HotEntryCache<>(Duration.ofSeconds(10), 1000);
writerCache.addListener((key, value) -> value.close()); writerCache.addListener((key, value) -> value.close());
} }
private Path keyCompressionFile(final Path dataDirectory) throws IOException {
return dataDirectory.resolve("keys.csv");
}
public static Path storageDirectory(final Path dataDirectory) throws IOException { public static Path storageDirectory(final Path dataDirectory) throws IOException {
return dataDirectory.resolve(SUBDIR_STORAGE); return dataDirectory.resolve(SUBDIR_STORAGE);
} }
@@ -163,11 +154,15 @@ public class DataStore implements AutoCloseable {
final long start = System.nanoTime(); final long start = System.nanoTime();
writer.write(dateAsEpochMilli, value); writer.write(dateAsEpochMilli, value);
final double duration = (System.nanoTime() - start) / 1_000_000.0; final double duration = (System.nanoTime() - start) / 1_000_000.0;
if (duration > 1) { if (duration > 10) {
System.out.println(" write took: " + duration + " ms " + tags); System.out.println(" write took: " + duration + " ms " + tags);
} }
} }
public StringCompressor getStringCompressor() {
return stringCompressor;
}
// visible for test // visible for test
QueryCompletionIndex getQueryCompletionIndex() { QueryCompletionIndex getQueryCompletionIndex() {
return queryCompletionIndex; return queryCompletionIndex;
@@ -186,7 +181,7 @@ public class DataStore implements AutoCloseable {
// store mapping from tag to docId, so that we can find all docs for a given tag // store mapping from tag to docId, so that we can find all docs for a given tag
final List<Tag> ts = new ArrayList<>(tags.toTags()); final List<Tag> ts = new ArrayList<>(tags.toTags());
ts.add(TAG_ALL_DOCS); ts.add(StringCompressor.TAG_ALL_DOCS);
for (final Tag tag : ts) { for (final Tag tag : ts) {
Long diskStoreOffsetForDocIdsOfTag = tagToDocsId.getValue(partitionId, tag); Long diskStoreOffsetForDocIdsOfTag = tagToDocsId.getValue(partitionId, tag);
@@ -270,13 +265,13 @@ public class DataStore implements AutoCloseable {
final Set<String> keys = new HashSet<>(); final Set<String> keys = new HashSet<>();
final Tag keyPrefix = Tags.STRING_COMPRESSOR.createTag("", ""); // will find everything final Tag keyPrefix = stringCompressor.createTag("", ""); // will find everything
final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange);
tagToDocsId.visitValues(partitionIdSource, keyPrefix, tagToDocsId.visitValues(partitionIdSource, keyPrefix,
(tag, __) -> keys.add(Tags.STRING_COMPRESSOR.getKeyAsString(tag))); (tag, __) -> keys.add(stringCompressor.getKeyAsString(tag)));
keys.remove(ALL_DOCS_KEY); keys.remove(StringCompressor.ALL_DOCS_KEY);
final List<String> result = new ArrayList<>(keys); final List<String> result = new ArrayList<>(keys);
Collections.sort(result); Collections.sort(result);
return result; return result;
@@ -286,9 +281,9 @@ public class DataStore implements AutoCloseable {
private PartitionLongList executeQuery(final Query query) { private PartitionLongList executeQuery(final Query query) {
final long start = System.nanoTime(); final long start = System.nanoTime();
synchronized (docIdToDoc) { synchronized (docIdToDoc) {
final Expression expression = QueryLanguageParser.parse(query.getQuery()); final Expression expression = QueryLanguageParser.parse(query.getQuery(), stringCompressor);
final ExpressionToDocIdVisitor visitor = new ExpressionToDocIdVisitor(query.getDateRange(), tagToDocsId, final ExpressionToDocIdVisitor visitor = new ExpressionToDocIdVisitor(query.getDateRange(), tagToDocsId,
diskStorage); diskStorage, stringCompressor);
final PartitionLongList docIdsList = expression.visit(visitor); final PartitionLongList docIdsList = expression.visit(visitor);
EXECUTE_QUERY_LOGGER.debug("executeQuery({}) took {}ms returned {} results ", query, EXECUTE_QUERY_LOGGER.debug("executeQuery({}) took {}ms returned {} results ", query,
(System.nanoTime() - start) / 1_000_000.0, docIdsList.size()); (System.nanoTime() - start) / 1_000_000.0, docIdsList.size());
@@ -372,7 +367,7 @@ public class DataStore implements AutoCloseable {
public List<Proposal> propose(final QueryWithCaretMarker query) { public List<Proposal> propose(final QueryWithCaretMarker query) {
final NewProposerParser newProposerParser = new NewProposerParser(queryCompletionIndex); final NewProposerParser newProposerParser = new NewProposerParser(queryCompletionIndex, stringCompressor);
final List<Proposal> proposals = newProposerParser.propose(query); final List<Proposal> proposals = newProposerParser.propose(query);
LOGGER.debug("Proposals for query {}: {}", query, proposals); LOGGER.debug("Proposals for query {}: {}", query, proposals);
return proposals; return proposals;
@@ -387,7 +382,7 @@ public class DataStore implements AutoCloseable {
final PartitionedTagsCacheKey cacheKey = new PartitionedTagsCacheKey(tags, partitionId); final PartitionedTagsCacheKey cacheKey = new PartitionedTagsCacheKey(tags, partitionId);
final PdbWriter result = writerCache.putIfAbsent(cacheKey, t -> getWriterInternal(partitionId, tags)); final PdbWriter result = writerCache.putIfAbsent(cacheKey, t -> getWriterInternal(partitionId, tags));
final double duration = (System.nanoTime() - start) / 1_000_000.0; final double duration = (System.nanoTime() - start) / 1_000_000.0;
if (duration > 1) { if (duration > 100) {
System.out.println(" get Writer took: " + duration + " ms " + tags); System.out.println(" get Writer took: " + duration + " ms " + tags);
} }
return result; return result;
@@ -408,7 +403,7 @@ public class DataStore implements AutoCloseable {
final PdbFile pdbFile = new PdbFile(partitionId, doc.getRootBlockNumber(), tags); final PdbFile pdbFile = new PdbFile(partitionId, doc.getRootBlockNumber(), tags);
writer = new PdbWriter(pdbFile, diskStorage.getExisting(partitionId)); writer = new PdbWriter(pdbFile, diskStorage.getExisting(partitionId));
final double duration = (System.nanoTime() - start) / 1_000_000.0; final double duration = (System.nanoTime() - start) / 1_000_000.0;
if (duration > 1) { if (duration > 100) {
System.out.println(" init existing writer took: " + duration + " ms " + tags); System.out.println(" init existing writer took: " + duration + " ms " + tags);
} }
} catch (final RuntimeException e) { } catch (final RuntimeException e) {
@@ -427,7 +422,7 @@ public class DataStore implements AutoCloseable {
final PdbWriter result = new PdbWriter(pdbFile, diskStorage.getExisting(partitionId)); final PdbWriter result = new PdbWriter(pdbFile, diskStorage.getExisting(partitionId));
final double duration = (System.nanoTime() - start) / 1_000_000.0; final double duration = (System.nanoTime() - start) / 1_000_000.0;
if (duration > 1) { if (duration > 10) {
METRICS_LOGGER_NEW_WRITER.info("newPdbWriter took {}ms tags: {}", duration, tags); METRICS_LOGGER_NEW_WRITER.info("newPdbWriter took {}ms tags: {}", duration, tags);
} }
return result; return result;

View File

@@ -9,6 +9,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
import org.lucares.collections.LongList; import org.lucares.collections.LongList;
import org.lucares.pdb.api.DateTimeRange; import org.lucares.pdb.api.DateTimeRange;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.api.Tag; import org.lucares.pdb.api.Tag;
import org.lucares.pdb.api.Tags; import org.lucares.pdb.api.Tags;
import org.lucares.pdb.datastore.lang.QueryCompletionExpressionOptimizer; import org.lucares.pdb.datastore.lang.QueryCompletionExpressionOptimizer;
@@ -152,12 +153,6 @@ public class QueryCompletionIndex implements AutoCloseable {
this.tagB = tagB; this.tagB = tagB;
} }
public TwoTags(final String fieldB, final String fieldA, final String valueA, final String valueB) {
tagA = Tags.STRING_COMPRESSOR.createTag(fieldA, valueA);
tagB = Tags.STRING_COMPRESSOR.createTag(fieldB, valueB);
}
public Tag getTagA() { public Tag getTagA() {
return tagA; return tagA;
} }
@@ -275,6 +270,12 @@ public class QueryCompletionIndex implements AutoCloseable {
private static final class EncoderField implements EncoderDecoder<String> { private static final class EncoderField implements EncoderDecoder<String> {
private final StringCompressor stringCompressor;
public EncoderField(final StringCompressor stringCompressor) {
this.stringCompressor = stringCompressor;
}
@Override @Override
public byte[] encode(final String field) { public byte[] encode(final String field) {
@@ -282,13 +283,13 @@ public class QueryCompletionIndex implements AutoCloseable {
return new byte[0]; return new byte[0];
} }
return VariableByteEncoder.encode(Tags.STRING_COMPRESSOR.put(field)); return VariableByteEncoder.encode(stringCompressor.putString(field));
} }
@Override @Override
public String decode(final byte[] bytes) { public String decode(final byte[] bytes) {
final long compressedString = VariableByteEncoder.decodeFirstValue(bytes); final long compressedString = VariableByteEncoder.decodeFirstValue(bytes);
return Tags.STRING_COMPRESSOR.get((int) compressedString); return stringCompressor.getString((int) compressedString);
} }
@Override @Override
@@ -300,16 +301,18 @@ public class QueryCompletionIndex implements AutoCloseable {
private final PartitionPersistentMap<TwoTags, Empty, Empty> tagToTagIndex; private final PartitionPersistentMap<TwoTags, Empty, Empty> tagToTagIndex;
private final PartitionPersistentMap<Tag, Empty, Empty> fieldToValueIndex; private final PartitionPersistentMap<Tag, Empty, Empty> fieldToValueIndex;
private final PartitionPersistentMap<String, Empty, Empty> fieldIndex; private final PartitionPersistentMap<String, Empty, Empty> fieldIndex;
private final StringCompressor stringCompressor;
public QueryCompletionIndex(final Path basePath) throws IOException { public QueryCompletionIndex(final Path basePath, final StringCompressor stringCompressor) throws IOException {
this.stringCompressor = stringCompressor;
tagToTagIndex = new PartitionPersistentMap<>(basePath, "queryCompletionTagToTagIndex.bs", new EncoderTwoTags(), tagToTagIndex = new PartitionPersistentMap<>(basePath, "queryCompletionTagToTagIndex.bs", new EncoderTwoTags(),
PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER)); PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER));
fieldToValueIndex = new PartitionPersistentMap<>(basePath, "queryCompletionFieldToValueIndex.bs", fieldToValueIndex = new PartitionPersistentMap<>(basePath, "queryCompletionFieldToValueIndex.bs",
new EncoderTag(), PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER)); new EncoderTag(), PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER));
fieldIndex = new PartitionPersistentMap<>(basePath, "queryCompletionFieldIndex.bs", new EncoderField(), fieldIndex = new PartitionPersistentMap<>(basePath, "queryCompletionFieldIndex.bs",
PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER)); new EncoderField(stringCompressor), PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER));
} }
public void addTags(final ParititionId partitionId, final Tags tags) throws IOException { public void addTags(final ParititionId partitionId, final Tags tags) throws IOException {
@@ -328,10 +331,10 @@ public class QueryCompletionIndex implements AutoCloseable {
// create indices of all tags and all fields // create indices of all tags and all fields
for (final Tag tag : listOfTagsA) { for (final Tag tag : listOfTagsA) {
fieldToValueIndex.putValue(partitionId, tag, Empty.INSTANCE); fieldToValueIndex.putValue(partitionId, tag, Empty.INSTANCE);
fieldIndex.putValue(partitionId, Tags.STRING_COMPRESSOR.getKeyAsString(tag), Empty.INSTANCE); fieldIndex.putValue(partitionId, stringCompressor.getKeyAsString(tag), Empty.INSTANCE);
} }
final double d = (System.nanoTime() - start) / 1_000_000.0; final double d = (System.nanoTime() - start) / 1_000_000.0;
if (d > 1) { if (d > 10) {
System.out.println(" addTags: " + d + " ms"); System.out.println(" addTags: " + d + " ms");
} }
} }
@@ -356,15 +359,16 @@ public class QueryCompletionIndex implements AutoCloseable {
final SortedSet<String> result = new TreeSet<>(); final SortedSet<String> result = new TreeSet<>();
final TwoTags keyPrefix = new TwoTags(fieldB, fieldA, null, null); final TwoTags keyPrefix = new TwoTags(stringCompressor.createTag(fieldA, null),
stringCompressor.createTag(fieldB, null));
final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange);
tagToTagIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> { tagToTagIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> {
final String vA = Tags.STRING_COMPRESSOR.getValueAsString(k.getTagA()); final String vA = stringCompressor.getValueAsString(k.getTagA());
if (valueA.matches(vA)) { if (valueA.matches(vA)) {
result.add(Tags.STRING_COMPRESSOR.getValueAsString(k.getTagB())); result.add(stringCompressor.getValueAsString(k.getTagB()));
} }
}); });
@@ -383,14 +387,14 @@ public class QueryCompletionIndex implements AutoCloseable {
public SortedSet<String> find(final DateTimeRange dateRange, final Tag tag, final String field) { public SortedSet<String> find(final DateTimeRange dateRange, final Tag tag, final String field) {
final SortedSet<String> result = new TreeSet<>(); final SortedSet<String> result = new TreeSet<>();
final int tagBKey = Tags.STRING_COMPRESSOR.put(field); final int tagBKey = stringCompressor.putString(field);
final Tag tagB = new Tag(tagBKey, -1); // the value must be negative for the prefix search to work. See final Tag tagB = new Tag(tagBKey, -1); // the value must be negative for the prefix search to work. See
// EncoderTwoTags // EncoderTwoTags
final TwoTags keyPrefix = new TwoTags(tag, tagB); final TwoTags keyPrefix = new TwoTags(tag, tagB);
final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange);
tagToTagIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> { tagToTagIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> {
result.add(Tags.STRING_COMPRESSOR.getValueAsString(k.getTagB())); result.add(stringCompressor.getValueAsString(k.getTagB()));
}); });
return result; return result;
@@ -406,12 +410,12 @@ public class QueryCompletionIndex implements AutoCloseable {
public SortedSet<String> findAllValuesForField(final DateTimeRange dateRange, final String field) { public SortedSet<String> findAllValuesForField(final DateTimeRange dateRange, final String field) {
final SortedSet<String> result = new TreeSet<>(); final SortedSet<String> result = new TreeSet<>();
final int tagKey = Tags.STRING_COMPRESSOR.put(field); final int tagKey = stringCompressor.putString(field);
final Tag keyPrefix = new Tag(tagKey, -1); // the value must be negative for the prefix search to work. See final Tag keyPrefix = new Tag(tagKey, -1); // the value must be negative for the prefix search to work. See
final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange);
fieldToValueIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> { fieldToValueIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> {
result.add(Tags.STRING_COMPRESSOR.getValueAsString(k)); result.add(stringCompressor.getValueAsString(k));
}); });
return result; return result;
@@ -431,7 +435,7 @@ public class QueryCompletionIndex implements AutoCloseable {
final String field) { final String field) {
final SortedSet<String> result = new TreeSet<>(); final SortedSet<String> result = new TreeSet<>();
final TwoTags keyPrefix = new TwoTags(field, Tags.STRING_COMPRESSOR.getKeyAsString(tag), null, null); final TwoTags keyPrefix = new TwoTags(tag.unsetValue(), stringCompressor.createTag(field, null));
final int negatedValueA = tag.getValue(); final int negatedValueA = tag.getValue();
final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange); final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange);
@@ -439,7 +443,7 @@ public class QueryCompletionIndex implements AutoCloseable {
final int valueA = k.getTagA().getValue(); final int valueA = k.getTagA().getValue();
if (valueA != negatedValueA) { if (valueA != negatedValueA) {
result.add(Tags.STRING_COMPRESSOR.getValueAsString(k.getTagB())); result.add(stringCompressor.getValueAsString(k.getTagB()));
} }
}); });

View File

@@ -1,26 +1,32 @@
package org.lucares.pdb.datastore.internal; package org.lucares.pdb.datastore.internal;
import org.lucares.collections.LongList; import org.lucares.collections.LongList;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.api.Tag; import org.lucares.pdb.api.Tag;
import org.lucares.pdb.api.Tags;
import org.lucares.pdb.map.PersistentMap.EncoderDecoder; import org.lucares.pdb.map.PersistentMap.EncoderDecoder;
import org.lucares.utils.byteencoder.VariableByteEncoder; import org.lucares.utils.byteencoder.VariableByteEncoder;
class TagEncoderDecoder implements EncoderDecoder<Tag> { class TagEncoderDecoder implements EncoderDecoder<Tag> {
private final StringCompressor stringCompressor;
public TagEncoderDecoder(final StringCompressor stringCompressor) {
this.stringCompressor = stringCompressor;
}
@Override @Override
public byte[] encode(final Tag tag) { public byte[] encode(final Tag tag) {
final LongList keyAndValueCompressed = new LongList(2); final LongList keyAndValueCompressed = new LongList(2);
final String key = Tags.STRING_COMPRESSOR.getKeyAsString(tag); final String key = stringCompressor.getKeyAsString(tag);
final byte[] result; final byte[] result;
if (!key.isEmpty()) { if (!key.isEmpty()) {
final Integer keyAsLong = Tags.STRING_COMPRESSOR.put(key); final Integer keyAsLong = stringCompressor.putString(key);
keyAndValueCompressed.add(keyAsLong); keyAndValueCompressed.add(keyAsLong);
final String value = Tags.STRING_COMPRESSOR.getValueAsString(tag); final String value = stringCompressor.getValueAsString(tag);
if (!value.isEmpty()) { if (!value.isEmpty()) {
final Integer valueAsLong = Tags.STRING_COMPRESSOR.put(value); final Integer valueAsLong = stringCompressor.putString(value);
keyAndValueCompressed.add(valueAsLong); keyAndValueCompressed.add(valueAsLong);
} }
result = VariableByteEncoder.encode(keyAndValueCompressed); result = VariableByteEncoder.encode(keyAndValueCompressed);
@@ -38,17 +44,17 @@ class TagEncoderDecoder implements EncoderDecoder<Tag> {
switch (compressedStrings.size()) { switch (compressedStrings.size()) {
case 0: case 0:
result = Tags.STRING_COMPRESSOR.createTag("", ""); result = stringCompressor.createTag("", "");
break; break;
case 1: case 1:
final String k = Tags.STRING_COMPRESSOR.get((int) compressedStrings.get(0)); final String k = stringCompressor.getString((int) compressedStrings.get(0));
result = Tags.STRING_COMPRESSOR.createTag(k, ""); result = stringCompressor.createTag(k, "");
break; break;
case 2: case 2:
final String key = Tags.STRING_COMPRESSOR.get((int) compressedStrings.get(0)); final String key = stringCompressor.getString((int) compressedStrings.get(0));
final String value = Tags.STRING_COMPRESSOR.get((int) compressedStrings.get(1)); final String value = stringCompressor.getString((int) compressedStrings.get(1));
result = Tags.STRING_COMPRESSOR.createTag(key, value); result = stringCompressor.createTag(key, value);
break; break;
default: default:
throw new IllegalStateException("too many values: " + compressedStrings); throw new IllegalStateException("too many values: " + compressedStrings);

View File

@@ -9,10 +9,9 @@ import java.util.stream.Collectors;
import org.lucares.collections.LongList; import org.lucares.collections.LongList;
import org.lucares.pdb.api.DateTimeRange; import org.lucares.pdb.api.DateTimeRange;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.api.Tag; import org.lucares.pdb.api.Tag;
import org.lucares.pdb.api.Tags;
import org.lucares.pdb.blockstorage.LongStreamFile; import org.lucares.pdb.blockstorage.LongStreamFile;
import org.lucares.pdb.datastore.internal.DataStore;
import org.lucares.pdb.datastore.internal.DatePartitioner; import org.lucares.pdb.datastore.internal.DatePartitioner;
import org.lucares.pdb.datastore.internal.ParititionId; import org.lucares.pdb.datastore.internal.ParititionId;
import org.lucares.pdb.datastore.internal.PartitionDiskStore; import org.lucares.pdb.datastore.internal.PartitionDiskStore;
@@ -34,8 +33,12 @@ public class ExpressionToDocIdVisitor extends ExpressionVisitor<PartitionLongLis
private final DatePartitioner datePartitioner; private final DatePartitioner datePartitioner;
private final StringCompressor stringCompressor;
public ExpressionToDocIdVisitor(final DateTimeRange dateRange, public ExpressionToDocIdVisitor(final DateTimeRange dateRange,
final PartitionPersistentMap<Tag, Long, Long> keyToValueToDocsId, final PartitionDiskStore diskStorage) { final PartitionPersistentMap<Tag, Long, Long> keyToValueToDocsId, final PartitionDiskStore diskStorage,
final StringCompressor stringCompressor) {
this.stringCompressor = stringCompressor;
this.datePartitioner = new DatePartitioner(dateRange); this.datePartitioner = new DatePartitioner(dateRange);
this.keyToValueToDocId = keyToValueToDocsId; this.keyToValueToDocId = keyToValueToDocsId;
this.diskStorage = diskStorage; this.diskStorage = diskStorage;
@@ -131,7 +134,7 @@ public class ExpressionToDocIdVisitor extends ExpressionVisitor<PartitionLongLis
final Set<ParititionId> availablePartitionIds = keyToValueToDocId.getAvailablePartitionIds(datePartitioner); final Set<ParititionId> availablePartitionIds = keyToValueToDocId.getAvailablePartitionIds(datePartitioner);
for (final ParititionId partitionId : availablePartitionIds) { for (final ParititionId partitionId : availablePartitionIds) {
final Long blockOffset = keyToValueToDocId.getValue(partitionId, DataStore.TAG_ALL_DOCS); final Long blockOffset = keyToValueToDocId.getValue(partitionId, StringCompressor.TAG_ALL_DOCS);
if (blockOffset != null) { if (blockOffset != null) {
final LongStreamFile bsFile = diskStorage.streamExistingFile(blockOffset, partitionId); final LongStreamFile bsFile = diskStorage.streamExistingFile(blockOffset, partitionId);
@@ -149,9 +152,9 @@ public class ExpressionToDocIdVisitor extends ExpressionVisitor<PartitionLongLis
final Set<ParititionId> availablePartitionIds = keyToValueToDocId.getAvailablePartitionIds(datePartitioner); final Set<ParititionId> availablePartitionIds = keyToValueToDocId.getAvailablePartitionIds(datePartitioner);
for (final ParititionId partitionId : availablePartitionIds) { for (final ParititionId partitionId : availablePartitionIds) {
final List<LongList> docIdsForPartition = new ArrayList<>(); final List<LongList> docIdsForPartition = new ArrayList<>();
keyToValueToDocId.visitValues(partitionId, Tags.STRING_COMPRESSOR.createTag(propertyName, ""), keyToValueToDocId.visitValues(partitionId, stringCompressor.createTag(propertyName, ""),
(tag, blockOffsetToDocIds) -> { (tag, blockOffsetToDocIds) -> {
if (valuePattern.matcher(Tags.STRING_COMPRESSOR.getValueAsString(tag)).matches()) { if (valuePattern.matcher(stringCompressor.getValueAsString(tag)).matches()) {
try (final LongStreamFile bsFile = diskStorage.streamExistingFile(blockOffsetToDocIds, try (final LongStreamFile bsFile = diskStorage.streamExistingFile(blockOffsetToDocIds,
partitionId)) { partitionId)) {

View File

@@ -1,11 +1,17 @@
package org.lucares.pdb.datastore.lang; package org.lucares.pdb.datastore.lang;
import org.lucares.pdb.api.Tags; import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.lang.Expression.InExpression; import org.lucares.pdb.datastore.lang.Expression.InExpression;
import org.lucares.pdb.datastore.lang.Expression.Property; import org.lucares.pdb.datastore.lang.Expression.Property;
public class ExpressionValidationVisitor extends IdentityExpressionVisitor { public class ExpressionValidationVisitor extends IdentityExpressionVisitor {
private final StringCompressor stringCompressor;
public ExpressionValidationVisitor(final StringCompressor stringCompressor) {
this.stringCompressor = stringCompressor;
}
@Override @Override
public Expression visit(final Property expression) { public Expression visit(final Property expression) {
@@ -27,13 +33,13 @@ public class ExpressionValidationVisitor extends IdentityExpressionVisitor {
} }
private void assertValueExists(final String value) { private void assertValueExists(final String value) {
if (Tags.STRING_COMPRESSOR.getIfPresent(value) < 0) { if (stringCompressor.getIfPresent(value) < 0) {
throw new UnkownTokenSyntaxException(value); throw new UnkownTokenSyntaxException(value);
} }
} }
public static void validate(final Expression expression) { public static void validate(final Expression expression, final StringCompressor stringCompressor) {
expression.visit(new ExpressionValidationVisitor()); expression.visit(new ExpressionValidationVisitor(stringCompressor));
} }
} }

View File

@@ -13,6 +13,7 @@ import org.lucares.pdb.api.DateTimeRange;
import org.lucares.pdb.api.QueryConstants; import org.lucares.pdb.api.QueryConstants;
import org.lucares.pdb.api.QueryWithCaretMarker; import org.lucares.pdb.api.QueryWithCaretMarker;
import org.lucares.pdb.api.QueryWithCaretMarker.ResultMode; import org.lucares.pdb.api.QueryWithCaretMarker.ResultMode;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.Proposal; import org.lucares.pdb.datastore.Proposal;
import org.lucares.pdb.datastore.internal.QueryCompletionIndex; import org.lucares.pdb.datastore.internal.QueryCompletionIndex;
import org.lucares.utils.CollectionUtils; import org.lucares.utils.CollectionUtils;
@@ -34,8 +35,11 @@ public class NewProposerParser implements QueryConstants {
private final QueryCompletionIndex queryCompletionIndex; private final QueryCompletionIndex queryCompletionIndex;
public NewProposerParser(final QueryCompletionIndex queryCompletionIndex) { private final StringCompressor stringCompressor;
public NewProposerParser(final QueryCompletionIndex queryCompletionIndex, final StringCompressor stringCompressor) {
this.queryCompletionIndex = queryCompletionIndex; this.queryCompletionIndex = queryCompletionIndex;
this.stringCompressor = stringCompressor;
} }
public List<Proposal> propose(final QueryWithCaretMarker query) { public List<Proposal> propose(final QueryWithCaretMarker query) {
@@ -159,7 +163,7 @@ public class NewProposerParser implements QueryConstants {
final String queryWithCaretMarker = query.getQueryWithCaretMarker(); final String queryWithCaretMarker = query.getQueryWithCaretMarker();
// parse the query // parse the query
final Expression expression = QueryLanguageParser.parse(queryWithCaretMarker); final Expression expression = QueryLanguageParser.parse(queryWithCaretMarker, stringCompressor);
// normalize it, so that we can use the queryCompletionIndex to search for // normalize it, so that we can use the queryCompletionIndex to search for
// candidate values // candidate values

View File

@@ -1,9 +1,10 @@
package org.lucares.pdb.datastore.lang; package org.lucares.pdb.datastore.lang;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.lucares.pdb.api.StringCompressor;
public class QueryLanguageParser { public class QueryLanguageParser {
public static Expression parse(final String query) { public static Expression parse(final String query, final StringCompressor stringCompressor) {
final Expression result; final Expression result;
if (StringUtils.isEmpty(query)) { if (StringUtils.isEmpty(query)) {
@@ -12,7 +13,7 @@ public class QueryLanguageParser {
final QueryLanguage lang = new QueryLanguage(); final QueryLanguage lang = new QueryLanguage();
result = lang.parse(query); result = lang.parse(query);
} }
ExpressionValidationVisitor.validate(result); ExpressionValidationVisitor.validate(result, stringCompressor);
return result; return result;
} }
} }

View File

@@ -23,6 +23,7 @@ import javax.swing.JTextArea;
import javax.swing.JTextField; import javax.swing.JTextField;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.ParameterizedTest;
@@ -36,7 +37,6 @@ import org.lucares.pdb.api.Tags;
import org.lucares.pdb.blockstorage.BSFile; import org.lucares.pdb.blockstorage.BSFile;
import org.lucares.pdb.datastore.Doc; import org.lucares.pdb.datastore.Doc;
import org.lucares.pdb.datastore.Proposal; import org.lucares.pdb.datastore.Proposal;
import org.junit.jupiter.api.Assertions;
import org.lucares.utils.CollectionUtils; import org.lucares.utils.CollectionUtils;
import org.lucares.utils.DateUtils; import org.lucares.utils.DateUtils;
import org.lucares.utils.file.FileUtils; import org.lucares.utils.file.FileUtils;
@@ -66,11 +66,13 @@ public class DataStoreTest {
final DateTimeRange dateRange = DateTimeRange.relativeHours(1); final DateTimeRange dateRange = DateTimeRange.relativeHours(1);
final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0); final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0);
final Tags eagleTim = Tags.createAndAddToDictionary("bird", "eagle", "name", "Tim"); final Tags eagleTim = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "eagle", "name", "Tim");
final Tags pigeonJennifer = Tags.createAndAddToDictionary("bird", "pigeon", "name", "Jennifer"); final Tags pigeonJennifer = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "pigeon", "name",
final Tags flamingoJennifer = Tags.createAndAddToDictionary("bird", "flamingo", "name", "Jennifer"); "Jennifer");
final Tags labradorJenny = Tags.createAndAddToDictionary("dog", "labrador", "name", "Jenny"); final Tags flamingoJennifer = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "flamingo", "name",
final Tags labradorTim = Tags.createAndAddToDictionary("dog", "labrador", "name", "Tim"); "Jennifer");
final Tags labradorJenny = Tags.STRING_COMPRESSOR.createAndAddToDictionary("dog", "labrador", "name", "Jenny");
final Tags labradorTim = Tags.STRING_COMPRESSOR.createAndAddToDictionary("dog", "labrador", "name", "Tim");
tagsToBlockStorageRootBlockNumber = new HashMap<>(); tagsToBlockStorageRootBlockNumber = new HashMap<>();
tagsToBlockStorageRootBlockNumber.put(eagleTim, dataStore.createNewFile(partitionId, eagleTim)); tagsToBlockStorageRootBlockNumber.put(eagleTim, dataStore.createNewFile(partitionId, eagleTim));
@@ -115,8 +117,10 @@ public class DataStoreTest {
dataStore = new DataStore(dataDirectory); dataStore = new DataStore(dataDirectory);
tagsToBlockStorageRootBlockNumber = new LinkedHashMap<>(); tagsToBlockStorageRootBlockNumber = new LinkedHashMap<>();
final Tags pigeonJennifer = Tags.createAndAddToDictionary("bird", "pigeon", "name", "Jennifer"); final Tags pigeonJennifer = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "pigeon", "name",
final Tags flamingoJennifer = Tags.createAndAddToDictionary("bird", "flamingo", "name", "Jennifer"); "Jennifer");
final Tags flamingoJennifer = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "flamingo", "name",
"Jennifer");
final ParititionId partitionId = new ParititionId("partitionA"); final ParititionId partitionId = new ParititionId("partitionA");
tagsToBlockStorageRootBlockNumber.put(pigeonJennifer, dataStore.createNewFile(partitionId, pigeonJennifer)); tagsToBlockStorageRootBlockNumber.put(pigeonJennifer, dataStore.createNewFile(partitionId, pigeonJennifer));
@@ -130,7 +134,7 @@ public class DataStoreTest {
public void testBlockAlignment() throws IOException { public void testBlockAlignment() throws IOException {
dataStore = new DataStore(dataDirectory); dataStore = new DataStore(dataDirectory);
final Tags eagleTim = Tags.createAndAddToDictionary("bird", "eagle", "name", "Tim"); final Tags eagleTim = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "eagle", "name", "Tim");
final long eagleTimBlockOffset = dataStore.createNewFile(new ParititionId("partitionA"), eagleTim); final long eagleTimBlockOffset = dataStore.createNewFile(new ParititionId("partitionA"), eagleTim);
Assertions.assertEquals(0, eagleTimBlockOffset % BSFile.BLOCK_SIZE); Assertions.assertEquals(0, eagleTimBlockOffset % BSFile.BLOCK_SIZE);
} }
@@ -183,18 +187,28 @@ public class DataStoreTest {
final DateTimeRange dateRange = DateTimeRange.relativeHours(1); final DateTimeRange dateRange = DateTimeRange.relativeHours(1);
final List<Tags> tags = Arrays.asList( final List<Tags> tags = Arrays.asList(
Tags.createAndAddToDictionary("type", "bird", "subtype", "eagle", "age", "three", "name", "Tim"), Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "bird", "subtype", "eagle", "age", "three",
Tags.createAndAddToDictionary("type", "bird", "subtype", "pigeon", "age", "two", "name", "Jennifer"), "name", "Tim"),
Tags.createAndAddToDictionary("type", "bird", "subtype", "flamingo", "age", "one", "name", "Jennifer"), Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "bird", "subtype", "pigeon", "age", "two",
"name", "Jennifer"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "bird", "subtype", "flamingo", "age", "one",
"name", "Jennifer"),
Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", "Jenny"), Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three",
Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", "Tim"), "name", "Jenny"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three",
"name", "Tim"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "one", "name", "Timothy"), Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "one", "name",
Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "two", "name", "Paul"), "Timothy"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "three", "name", "Jane"), Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "two", "name",
Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "Sam"), "Paul"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "John")); Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "three",
"name", "Jane"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name",
"Sam"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name",
"John"));
tags.forEach(t -> dataStore.createNewFile(partitionId, t)); tags.forEach(t -> dataStore.createNewFile(partitionId, t));
@@ -208,7 +222,7 @@ public class DataStoreTest {
final long timestamp = DateUtils.getDate(2016, 1, 1, 13, 1, 1).toInstant().toEpochMilli(); final long timestamp = DateUtils.getDate(2016, 1, 1, 13, 1, 1).toInstant().toEpochMilli();
final Tags tags = Tags.createAndAddToDictionary("myKey", "myValue"); final Tags tags = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "myValue");
dataStore.write(timestamp, tags, 1); dataStore.write(timestamp, tags, 1);
dataStore.write(timestamp, tags, 2); dataStore.write(timestamp, tags, 2);
@@ -222,21 +236,28 @@ public class DataStoreTest {
try (final DataStore dataStore = new DataStore(dir)) { try (final DataStore dataStore = new DataStore(dir)) {
final List<Tags> tags = Arrays.asList( final List<Tags> tags = Arrays.asList(
Tags.createAndAddToDictionary("type", "bird", "subtype", "eagle", "age", "three", "name", "Tim"), Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "bird", "subtype", "eagle", "age", "three",
Tags.createAndAddToDictionary("type", "bird", "subtype", "pigeon", "age", "two", "name", "name", "Tim"),
"Jennifer"), Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "bird", "subtype", "pigeon", "age", "two",
Tags.createAndAddToDictionary("type", "bird", "subtype", "flamingo", "age", "one", "name", "name", "Jennifer"),
"Jennifer"), Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "bird", "subtype", "flamingo", "age", "one",
"name", "Jennifer"),
Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age",
"Jenny"), "three", "name", "Jenny"),
Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", "Tim"), Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age",
"three", "name", "Tim"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "one", "name", "Timothy"), Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "one",
Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "two", "name", "Paul"), "name", "Timothy"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "three", "name", "Jane"), Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "two",
Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "Sam"), "name", "Paul"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "John")); Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "three",
"name", "Jane"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four",
"name", "Sam"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four",
"name", "John"));
final DateTimeRange dateRange = DateTimeRange.relativeMillis(0); final DateTimeRange dateRange = DateTimeRange.relativeMillis(0);
final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0); final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0);

View File

@@ -8,6 +8,7 @@ import java.util.Collections;
import java.util.List; import java.util.List;
import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.lucares.pdb.api.DateTimeRange; import org.lucares.pdb.api.DateTimeRange;
@@ -15,7 +16,6 @@ import org.lucares.pdb.api.QueryWithCaretMarker;
import org.lucares.pdb.api.QueryWithCaretMarker.ResultMode; import org.lucares.pdb.api.QueryWithCaretMarker.ResultMode;
import org.lucares.pdb.api.Tags; import org.lucares.pdb.api.Tags;
import org.lucares.pdb.datastore.Proposal; import org.lucares.pdb.datastore.Proposal;
import org.junit.jupiter.api.Assertions;
import org.lucares.utils.CollectionUtils; import org.lucares.utils.CollectionUtils;
import org.lucares.utils.file.FileUtils; import org.lucares.utils.file.FileUtils;
@@ -44,18 +44,23 @@ public class ProposerTest {
dateRange = DateTimeRange.now(); dateRange = DateTimeRange.now();
final ParititionId now = DateIndexExtension.toPartitionIds(dateRange).get(0); final ParititionId now = DateIndexExtension.toPartitionIds(dateRange).get(0);
final Tags eagleTim = Tags.createAndAddToDictionary("bird", "eagle", "name", "Tim"); final Tags eagleTim = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "eagle", "name", "Tim");
final Tags eagleTimothy = Tags.createAndAddToDictionary("bird", "eagle", "name", "Timothy"); final Tags eagleTimothy = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "eagle", "name", "Timothy");
final Tags pigeonJennifer = Tags.createAndAddToDictionary("bird", "pigeon", "name", "Jennifer"); final Tags pigeonJennifer = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "pigeon", "name",
final Tags flamingoJennifer = Tags.createAndAddToDictionary("bird", "flamingo", "name", "Jennifer"); "Jennifer");
final Tags labradorJenny = Tags.createAndAddToDictionary("dog", "labrador", "name", "Jenny"); final Tags flamingoJennifer = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "flamingo", "name",
final Tags labradorTim = Tags.createAndAddToDictionary("dog", "labrador", "name", "Tim"); "Jennifer");
final Tags labradorJenny = Tags.STRING_COMPRESSOR.createAndAddToDictionary("dog", "labrador", "name", "Jenny");
final Tags labradorTim = Tags.STRING_COMPRESSOR.createAndAddToDictionary("dog", "labrador", "name", "Tim");
final Tags methodA = Tags.createAndAddToDictionary("method", "FooController.doImportantStuff", "source", "web"); final Tags methodA = Tags.STRING_COMPRESSOR.createAndAddToDictionary("method", "FooController.doImportantStuff",
final Tags methodB = Tags.createAndAddToDictionary("method", "FooService.doImportantStuff", "source", "source", "web");
"service"); final Tags methodB = Tags.STRING_COMPRESSOR.createAndAddToDictionary("method", "FooService.doImportantStuff",
final Tags methodC = Tags.createAndAddToDictionary("method", "BarController.doBoringStuff", "source", "web"); "source", "service");
final Tags methodD = Tags.createAndAddToDictionary("method", "FooBarService.doOtherStuff", "source", "service"); final Tags methodC = Tags.STRING_COMPRESSOR.createAndAddToDictionary("method", "BarController.doBoringStuff",
"source", "web");
final Tags methodD = Tags.STRING_COMPRESSOR.createAndAddToDictionary("method", "FooBarService.doOtherStuff",
"source", "service");
dataStore.createNewFile(now, eagleTim); dataStore.createNewFile(now, eagleTim);
dataStore.createNewFile(now, eagleTimothy); dataStore.createNewFile(now, eagleTimothy);

View File

@@ -35,18 +35,22 @@ public class QueryCompletionIndexTest {
@Test @Test
public void test() throws Exception { public void test() throws Exception {
Tags.STRING_COMPRESSOR = new StringCompressor(new UniqueStringIntegerPairs()); final StringCompressor stringCompressor = new StringCompressor(new UniqueStringIntegerPairs());
Tags.STRING_COMPRESSOR = stringCompressor;
final List<Tags> tags = Arrays.asList(// final List<Tags> tags = Arrays.asList(//
Tags.createAndAddToDictionary("firstname", "John", "lastname", "Doe", "country", "Atlantis"), // A stringCompressor.createAndAddToDictionary("firstname", "John", "lastname", "Doe", "country",
Tags.createAndAddToDictionary("firstname", "Jane", "lastname", "Doe", "country", "ElDorado"), // B "Atlantis"), // A
Tags.createAndAddToDictionary("firstname", "John", "lastname", "Miller", "country", "Atlantis")// C stringCompressor.createAndAddToDictionary("firstname", "Jane", "lastname", "Doe", "country",
"ElDorado"), // B
stringCompressor.createAndAddToDictionary("firstname", "John", "lastname", "Miller", "country",
"Atlantis")// C
); );
final DateTimeRange dateRange = DateTimeRange.relativeMillis(1); final DateTimeRange dateRange = DateTimeRange.relativeMillis(1);
final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0); final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0);
try (QueryCompletionIndex index = new QueryCompletionIndex(dataDirectory)) { try (QueryCompletionIndex index = new QueryCompletionIndex(dataDirectory, stringCompressor)) {
for (final Tags t : tags) { for (final Tags t : tags) {
index.addTags(partitionId, t); index.addTags(partitionId, t);
} }
@@ -54,13 +58,13 @@ public class QueryCompletionIndexTest {
// all firstnames where lastname=Doe are returned sorted alphabetically. // all firstnames where lastname=Doe are returned sorted alphabetically.
// tags A and B match // tags A and B match
final SortedSet<String> firstnamesWithLastnameDoe = index.find(dateRange, final SortedSet<String> firstnamesWithLastnameDoe = index.find(dateRange,
Tags.STRING_COMPRESSOR.createTag("lastname", "Doe"), "firstname"); stringCompressor.createTag("lastname", "Doe"), "firstname");
Assertions.assertEquals(new TreeSet<>(Set.of("Jane", "John")), firstnamesWithLastnameDoe); Assertions.assertEquals(new TreeSet<>(Set.of("Jane", "John")), firstnamesWithLastnameDoe);
// no duplicates are returned: // no duplicates are returned:
// tags A and C match firstname=John, but both have country=Atlantis // tags A and C match firstname=John, but both have country=Atlantis
final SortedSet<String> countryWithFirstnameJohn = index.find(dateRange, final SortedSet<String> countryWithFirstnameJohn = index.find(dateRange,
Tags.STRING_COMPRESSOR.createTag("firstname", "John"), "country"); stringCompressor.createTag("firstname", "John"), "country");
Assertions.assertEquals(new TreeSet<>(Arrays.asList("Atlantis")), countryWithFirstnameJohn); Assertions.assertEquals(new TreeSet<>(Arrays.asList("Atlantis")), countryWithFirstnameJohn);
// findAllValuesForField sorts alphabetically // findAllValuesForField sorts alphabetically

Binary file not shown.

View File

@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.2-bin.zip distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-bin.zip
zipStoreBase=GRADLE_USER_HOME zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists zipStorePath=wrapper/dists

6
gradlew vendored
View File

@@ -205,6 +205,12 @@ set -- \
org.gradle.wrapper.GradleWrapperMain \ org.gradle.wrapper.GradleWrapperMain \
"$@" "$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args. # Use "xargs" to parse quoted args.
# #
# With -n1 it outputs one arg per line, with the quotes and backslashes removed. # With -n1 it outputs one arg per line, with the quotes and backslashes removed.

10
gradlew.bat vendored
View File

@@ -40,7 +40,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1 %JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute if %ERRORLEVEL% equ 0 goto execute
echo. echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
@@ -75,13 +75,15 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
:end :end
@rem End local scope for the variables with windows NT shell @rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd if %ERRORLEVEL% equ 0 goto mainEnd
:fail :fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code! rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 set EXIT_CODE=%ERRORLEVEL%
exit /b 1 if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd :mainEnd
if "%OS%"=="Windows_NT" endlocal if "%OS%"=="Windows_NT" endlocal

View File

@@ -1,5 +1,6 @@
dependencies { dependencies {
implementation project(':pdb-utils') implementation project(':pdb-utils')
implementation project(':block-storage')
implementation lib_primitive_collections implementation lib_primitive_collections
} }

View File

@@ -1,8 +1,5 @@
package org.lucares.pdb.api; package org.lucares.pdb.api;
import java.util.ArrayList;
import java.util.List;
public class Query { public class Query {
private final String query; private final String query;
@@ -42,25 +39,6 @@ public class Query {
return new Query(query, dateRange); return new Query(query, dateRange);
} }
public static Query createQuery(final Tags tags, final DateTimeRange dateRange) {
final List<String> terms = new ArrayList<>();
for (final String key : tags.getKeys()) {
final String value = tags.getValue(key);
final StringBuilder term = new StringBuilder();
term.append(key);
term.append("=");
term.append(value);
term.append(" ");
terms.add(term.toString());
}
return new Query(String.join(" and ", terms), dateRange);
}
public String getQuery() { public String getQuery() {
return query; return query;
} }

View File

@@ -8,39 +8,52 @@ import java.util.function.Function;
*/ */
public class StringCompressor { public class StringCompressor {
public static final String ALL_DOCS_KEY = "\ue001allDocs"; // \ue001 is the second character in the private use
// area
private static final String DEFAULT_GROUP = "<none>";
public static Tag TAG_ALL_DOCS;
private final UniqueStringIntegerPairs usip; private final UniqueStringIntegerPairs usip;
public StringCompressor(final UniqueStringIntegerPairs usip) throws RuntimeIOException { public StringCompressor(final UniqueStringIntegerPairs usip) throws RuntimeIOException {
this.usip = usip; this.usip = usip;
} }
public static StringCompressor create(final Path path) { public static StringCompressor create(final Path storageBasePath) {
final UniqueStringIntegerPairs mapsi = new UniqueStringIntegerPairs(path); final UniqueStringIntegerPairs mapsi = new UniqueStringIntegerPairs(storageBasePath);
return new StringCompressor(mapsi); final StringCompressor result = new StringCompressor(mapsi);
result.putString(ALL_DOCS_KEY);
result.putString("");
TAG_ALL_DOCS = result.createTag(ALL_DOCS_KEY, "");
return result;
} }
public int put(final String string) { public int putString(final String string) {
return usip.computeIfAbsent(string, s -> usip.getHighestInteger() + 1); return usip.computeIfAbsent(string);
} }
public int put(final byte[] bytes, final int start, final int endExclusive, public int putStringFromBytes(final byte[] bytes, final int start, final int endExclusive,
final Function<String, String> postProcess) { final Function<String, String> postProcess) {
return usip.computeIfAbsent(bytes, start, endExclusive, postProcess); return usip.computeIfAbsent(bytes, start, endExclusive, postProcess);
} }
public int put(final String value, final Function<String, String> postProcess) { public int putString(final String value, final Function<String, String> postProcess) {
final String processedValue = postProcess.apply(value); final String processedValue = postProcess.apply(value);
return usip.computeIfAbsentWithPostprocess(processedValue, postProcess); return usip.computeIfAbsent(processedValue);
} }
public String get(final int integer) { public String getString(final int integer) {
return usip.getKey(integer); return usip.getString(integer);
} }
public int getIfPresent(final String string) { public int getIfPresent(final String string) {
final Integer integer = usip.get(string); final Integer integer = usip.getInt(string);
return integer != null ? integer : -1; return integer != null ? integer : -1;
} }
@@ -51,17 +64,104 @@ public class StringCompressor {
* @param value the value * @param value the value
*/ */
public Tag createTag(final String field, final String value) { public Tag createTag(final String field, final String value) {
final int f = field != null ? Tags.STRING_COMPRESSOR.getIfPresent(field) : -1; final int f = field != null ? getIfPresent(field) : -1;
final int v = value != null ? Tags.STRING_COMPRESSOR.getIfPresent(value) : -1; final int v = value != null ? getIfPresent(value) : -1;
return new Tag(f, v); return new Tag(f, v);
} }
public String getKeyAsString(final Tag tag) { public String getKeyAsString(final Tag tag) {
return get(tag.getKey()); return getString(tag.getKey());
} }
public String getValueAsString(final Tag tag) { public String getValueAsString(final Tag tag) {
return get(tag.getValue()); return getString(tag.getValue());
} }
public Tags createAndAddToDictionary(final String key, final String value) {
final int keyAsInt = putString(key);
final int valueAsInt = putString(value);
return TagsBuilder.create().add(keyAsInt, valueAsInt).build();
}
public Tags createAndAddToDictionary(final String key1, final String value1, final String key2,
final String value2) {
final int key1AsInt = putString(key1);
final int value1AsInt = putString(value1);
final int key2AsInt = putString(key2);
final int value2AsInt = putString(value2);
final Tags result = TagsBuilder.create().add(key1AsInt, value1AsInt).add(key2AsInt, value2AsInt).build();
return result;
}
public Tags createAndAddToDictionary(final String key1, final String value1, final String key2, final String value2,
final String key3, final String value3) {
final int key1AsInt = putString(key1);
final int value1AsInt = putString(value1);
final int key2AsInt = putString(key2);
final int value2AsInt = putString(value2);
final int key3AsInt = putString(key3);
final int value3AsInt = putString(value3);
final Tags result = TagsBuilder.create().add(key1AsInt, value1AsInt).add(key2AsInt, value2AsInt)
.add(key3AsInt, value3AsInt).build();
return result;
}
public Tags createAndAddToDictionary(final String key1, final String value1, final String key2, final String value2,
final String key3, final String value3, final String key4, final String value4) {
final int key1AsInt = putString(key1);
final int value1AsInt = putString(value1);
final int key2AsInt = putString(key2);
final int value2AsInt = putString(value2);
final int key3AsInt = putString(key3);
final int value3AsInt = putString(value3);
final int key4AsInt = putString(key4);
final int value4AsInt = putString(value4);
final Tags result = TagsBuilder.create().add(key1AsInt, value1AsInt).add(key2AsInt, value2AsInt)
.add(key3AsInt, value3AsInt).add(key4AsInt, value4AsInt).build();
return result;
}
public String asValueString(final Tags tags) {
final StringBuilder result = new StringBuilder();
if (tags.isEmpty()) {
result.append(DEFAULT_GROUP);
} else {
for (final Tag tag : tags.toTags()) {
final String value = getString(tag.getValue());
if (result.length() > 0) {
result.append(" / ");
}
result.append(value);
}
}
return result.toString();
}
public String asString(final Tags tags) {
final StringBuilder result = new StringBuilder();
for (final Tag tag : tags.toTags()) {
if (result.length() > 0) {
result.append(", ");
}
result.append(getString(tag.getKey()));
result.append("=");
result.append(getString(tag.getValue()));
}
return result.toString();
}
} }

View File

@@ -6,13 +6,14 @@ package org.lucares.pdb.api;
* 'Sam' is the value. * 'Sam' is the value.
*/ */
public class Tag implements Comparable<Tag> { public class Tag implements Comparable<Tag> {
private final int field; private final int field;
private final int value; private final int value;
/** /**
* Create a new tag with field and value specified as int. See * Create a new tag with field and value specified as int. See
* {@link Tags#STRING_COMPRESSOR} for the mapping between Strings and ints. * {@link StringCompressor} for the mapping between Strings and ints.
* *
* @param field the field as int * @param field the field as int
* @param value the value as int * @param value the value as int
@@ -42,6 +43,10 @@ public class Tag implements Comparable<Tag> {
return value; return value;
} }
public Tag unsetValue() {
return new Tag(field, -1);
}
@Override @Override
public String toString() { public String toString() {
return field + "=" + value; return field + "=" + value;

View File

@@ -7,7 +7,6 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.TreeSet; import java.util.TreeSet;
import java.util.function.BiConsumer;
import java.util.function.Function; import java.util.function.Function;
import org.lucares.collections.IntList; import org.lucares.collections.IntList;
@@ -16,7 +15,6 @@ import org.lucares.utils.byteencoder.VariableByteEncoder;
public class Tags implements Comparable<Tags> { public class Tags implements Comparable<Tags> {
private static final String DEFAULT_GROUP = "<none>";
public static StringCompressor STRING_COMPRESSOR = null; public static StringCompressor STRING_COMPRESSOR = null;
private static final byte[] EMPTY_BYTES = new byte[0]; private static final byte[] EMPTY_BYTES = new byte[0];
public static final Tags EMPTY = new Tags(); public static final Tags EMPTY = new Tags();
@@ -60,33 +58,6 @@ public class Tags implements Comparable<Tags> {
return result; return result;
} }
public static Tags createAndAddToDictionary(final String key, final String value) {
return TagsBuilder.create().addAndAddToDictionary(key, value).build();
}
public static Tags createAndAddToDictionary(final String key1, final String value1, final String key2,
final String value2) {
final Tags result = TagsBuilder.create().addAndAddToDictionary(key1, value1).addAndAddToDictionary(key2, value2)
.build();
return result;
}
public static Tags createAndAddToDictionary(final String key1, final String value1, final String key2,
final String value2, final String key3, final String value3) {
final Tags result = TagsBuilder.create().addAndAddToDictionary(key1, value1).addAndAddToDictionary(key2, value2)
.addAndAddToDictionary(key3, value3).build();
return result;
}
public static Tags createAndAddToDictionary(final String key1, final String value1, final String key2,
final String value2, final String key3, final String value3, final String key4, final String value4) {
final Tags result = TagsBuilder.create().addAndAddToDictionary(key1, value1).addAndAddToDictionary(key2, value2)
.addAndAddToDictionary(key3, value3).addAndAddToDictionary(key4, value4).build();
return result;
}
public static Tags fromBytes(final byte[] bytes) { public static Tags fromBytes(final byte[] bytes) {
final List<Tag> result = new ArrayList<>(); final List<Tag> result = new ArrayList<>();
@@ -157,18 +128,18 @@ public class Tags implements Comparable<Tags> {
} }
public String getValue(final String key) { public String getValue(final String key) {
final Tag needle = new Tag(STRING_COMPRESSOR.put(key), 0); final Tag needle = new Tag(STRING_COMPRESSOR.putString(key), 0);
final int index = Collections.binarySearch(tags, needle, TagByKeyComparator.INSTANCE); final int index = Collections.binarySearch(tags, needle, TagByKeyComparator.INSTANCE);
if (index >= 0) { if (index >= 0) {
final Tag tag = tags.get(index); final Tag tag = tags.get(index);
return STRING_COMPRESSOR.get(tag.getValue()); return STRING_COMPRESSOR.getString(tag.getValue());
} }
return null; return null;
} }
public int getValueAsInt(final String key) { public int getValueAsInt(final String key) {
final Tag needle = new Tag(STRING_COMPRESSOR.put(key), 0); final Tag needle = new Tag(STRING_COMPRESSOR.putString(key), 0);
final int index = Collections.binarySearch(tags, needle, TagByKeyComparator.INSTANCE); final int index = Collections.binarySearch(tags, needle, TagByKeyComparator.INSTANCE);
if (index >= 0) { if (index >= 0) {
@@ -181,7 +152,7 @@ public class Tags implements Comparable<Tags> {
public Set<String> getKeys() { public Set<String> getKeys() {
final TreeSet<String> result = new TreeSet<>(); final TreeSet<String> result = new TreeSet<>();
for (final Tag tag : tags) { for (final Tag tag : tags) {
result.add(STRING_COMPRESSOR.get(tag.getKey())); result.add(STRING_COMPRESSOR.getString(tag.getKey()));
} }
return result; return result;
} }
@@ -202,15 +173,6 @@ public class Tags implements Comparable<Tags> {
return tags; return tags;
} }
public void forEach(final BiConsumer<String, String> keyValueConsumer) {
for (final Tag tag : tags) {
final String key = STRING_COMPRESSOR.get(tag.getKey());
final String value = STRING_COMPRESSOR.get(tag.getValue());
keyValueConsumer.accept(key, value);
}
}
public Tags mapTags(final Function<Tag, Tag> tagMapFuntion) { public Tags mapTags(final Function<Tag, Tag> tagMapFuntion) {
final List<Tag> mappedTags = new ArrayList<>(tags.size()); final List<Tag> mappedTags = new ArrayList<>(tags.size());
for (final Tag tag : tags) { for (final Tag tag : tags) {
@@ -270,39 +232,4 @@ public class Tags implements Comparable<Tags> {
public boolean isEmpty() { public boolean isEmpty() {
return tags.isEmpty(); return tags.isEmpty();
} }
/**
* @return User facing readable representation
*/
public String asString() {
final StringBuilder result = new StringBuilder();
for (final Tag tag : tags) {
if (result.length() > 0) {
result.append(", ");
}
result.append(STRING_COMPRESSOR.get(tag.getKey()));
result.append("=");
result.append(STRING_COMPRESSOR.get(tag.getValue()));
}
return result.toString();
}
public String asValueString() {
final StringBuilder result = new StringBuilder();
if (isEmpty()) {
result.append(DEFAULT_GROUP);
} else {
forEach((k, v) -> {
if (result.length() > 0) {
result.append(" / ");
}
result.append(v);
});
}
return result.toString();
}
} }

View File

@@ -25,16 +25,9 @@ public class TagsBuilder {
return this; return this;
} }
public TagsBuilder add(final String key, final String value) { public TagsBuilder add(final Tag tag) {
final int keyAsInt = Tags.STRING_COMPRESSOR.getIfPresent(key); tags.add(tag);
final int valueAsInt = Tags.STRING_COMPRESSOR.getIfPresent(value); return this;
return add(keyAsInt, valueAsInt);
}
public TagsBuilder addAndAddToDictionary(final String key, final String value) {
final int keyAsInt = Tags.STRING_COMPRESSOR.put(key);
final int valueAsInt = Tags.STRING_COMPRESSOR.put(value);
return add(keyAsInt, valueAsInt);
} }
public Tags build() { public Tags build() {

View File

@@ -2,11 +2,8 @@ package org.lucares.pdb.api;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
@@ -19,6 +16,8 @@ import java.util.Set;
import java.util.function.Function; import java.util.function.Function;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.lucares.pdb.map.PersistentMap;
/** /**
* A very simple {@link Set}-like or {@link Map}-like data structure that stores * A very simple {@link Set}-like or {@link Map}-like data structure that stores
* unique&sup1; pairs of Strings and integers persistently. * unique&sup1; pairs of Strings and integers persistently.
@@ -33,8 +32,6 @@ import java.util.regex.Pattern;
public class UniqueStringIntegerPairs { public class UniqueStringIntegerPairs {
private static final String SEPARATOR = "\t"; private static final String SEPARATOR = "\t";
private static final boolean APPEND = true;
private static final class ByteArray implements Comparable<ByteArray> { private static final class ByteArray implements Comparable<ByteArray> {
private final byte[] array; private final byte[] array;
private final int start; private final int start;
@@ -97,33 +94,50 @@ public class UniqueStringIntegerPairs {
*/ */
private final List<String> intToString = new ArrayList<>(); private final List<String> intToString = new ArrayList<>();
private final Path file; final PersistentMap<String, Long> persistentMap;
public UniqueStringIntegerPairs() { public UniqueStringIntegerPairs() {
this(null); this(null);
} }
public UniqueStringIntegerPairs(final Path file) { public UniqueStringIntegerPairs(final Path storageBasePath) {
this.file = file;
if (file != null) { if (storageBasePath != null) {
init(file); persistentMap = new PersistentMap<>(storageBasePath.resolve("keys.bs"), storageBasePath,
PersistentMap.STRING_CODER, PersistentMap.LONG_CODER);
final Path oldKeysCsvFile = keyCompressionFile(storageBasePath);
if (persistentMap.isEmpty() && Files.exists(oldKeysCsvFile)) {
upgradeFromCsvFile(oldKeysCsvFile);
} else {
init();
}
} else {
// some unit tests disable the persistence and use this class memory only
persistentMap = null;
} }
} }
private void init(final Path file) throws RuntimeIOException { private void init() {
persistentMap.forAll((string, integer) -> {
intToStringPut(integer.intValue(), string);
stringToInt.put(string, integer.intValue());
bytesToInt.put(new ByteArray(string), integer.intValue());
});
}
private Path keyCompressionFile(final Path dataDirectory) {
return dataDirectory.resolve("keys.csv");
}
private void upgradeFromCsvFile(final Path file) throws RuntimeIOException {
try { try {
Files.createDirectories(file.getParent());
if (!Files.exists(file)) {
Files.createFile(file);
}
try (final BufferedReader reader = new BufferedReader( try (final BufferedReader reader = new BufferedReader(
new InputStreamReader(new FileInputStream(file.toFile()), StandardCharsets.UTF_8))) { new InputStreamReader(new FileInputStream(file.toFile()), StandardCharsets.UTF_8))) {
String line; String line;
while ((line = reader.readLine()) != null) { while ((line = reader.readLine()) != null) {
// TODO use more efficient code to read the CSV -> improves startup time
final String[] tokens = line.split(Pattern.quote(SEPARATOR)); final String[] tokens = line.split(Pattern.quote(SEPARATOR));
if (tokens.length == 2) { if (tokens.length == 2) {
@@ -132,6 +146,7 @@ public class UniqueStringIntegerPairs {
intToStringPut(integer, string); intToStringPut(integer, string);
stringToInt.put(string, integer); stringToInt.put(string, integer);
bytesToInt.put(new ByteArray(string), integer); bytesToInt.put(new ByteArray(string), integer);
persistentMap.putValue(string, (long) integer);
} }
} }
} }
@@ -150,20 +165,13 @@ public class UniqueStringIntegerPairs {
intToString.set(value, string); intToString.set(value, string);
} }
void put(final String string, final int integer) { void putStringAndInteger(final String string, final int integer) {
if (stringToInt.containsKey(string) || (intToString.size() > integer && intToString.get(integer) != null)) { if (stringToInt.containsKey(string) || (intToString.size() > integer && intToString.get(integer) != null)) {
throw new IllegalArgumentException("Unique key constraint violation for (" + string + ", " + integer + ")"); throw new IllegalArgumentException("Unique key constraint violation for (" + string + ", " + integer + ")");
} }
if (file != null) { if (persistentMap != null) {
try (final Writer writer = new OutputStreamWriter(new FileOutputStream(file.toFile(), APPEND), persistentMap.putValue(string, (long) integer);
StandardCharsets.UTF_8)) {
writer.write(string + SEPARATOR + integer + "\n");
} catch (final IOException e) {
throw new RuntimeIOException(e);
}
} }
intToStringPut(integer, string); intToStringPut(integer, string);
@@ -171,16 +179,16 @@ public class UniqueStringIntegerPairs {
bytesToInt.put(new ByteArray(string), integer); bytesToInt.put(new ByteArray(string), integer);
} }
public Integer get(final String string) { public Integer getInt(final String string) {
return stringToInt.get(string); return stringToInt.get(string);
} }
public String getKey(final int second) { public String getString(final int second) {
return intToString.get(second); return intToString.get(second);
} }
public Integer getHighestInteger() { Integer getHighestInteger() {
return intToString.size() == 0 ? -1 : intToString.size() - 1; return intToString.size() == 0 ? -1 : intToString.size() - 1;
} }
@@ -189,7 +197,7 @@ public class UniqueStringIntegerPairs {
synchronized (stringToInt) { synchronized (stringToInt) {
if (!stringToInt.containsKey(string)) { if (!stringToInt.containsKey(string)) {
final Integer second = mappingFunction.apply(string); final Integer second = mappingFunction.apply(string);
put(string, second); putStringAndInteger(string, second);
} }
} }
} }
@@ -197,6 +205,17 @@ public class UniqueStringIntegerPairs {
return stringToInt.get(string); return stringToInt.get(string);
} }
public Integer computeIfAbsent(final String string) {
if (!stringToInt.containsKey(string)) {
synchronized (stringToInt) {
final Integer integer = intToString.size();
putStringAndInteger(string, integer);
}
}
return stringToInt.get(string);
}
public Integer computeIfAbsent(final byte[] bytes, final int start, final int endExclusive, public Integer computeIfAbsent(final byte[] bytes, final int start, final int endExclusive,
final Function<String, String> postProcess) { final Function<String, String> postProcess) {
@@ -204,33 +223,9 @@ public class UniqueStringIntegerPairs {
Integer result = bytesToInt.get(byteArray); Integer result = bytesToInt.get(byteArray);
if (result == null) { if (result == null) {
final String string = new String(bytes, start, endExclusive - start, StandardCharsets.UTF_8); final String string = new String(bytes, start, endExclusive - start, StandardCharsets.UTF_8);
result = computeIfAbsentWithPostprocess(string, postProcess); final String postProcessed = postProcess.apply(string);
result = computeIfAbsent(postProcessed);
} }
return result; return result;
} }
public Integer computeIfAbsentWithPostprocess(final String string, final Function<String, String> postProcess) {
final ByteArray byteArray = new ByteArray(string);
Integer result = bytesToInt.get(byteArray);
if (result == null) {
synchronized (stringToInt) {
if (!bytesToInt.containsKey(byteArray)) {
final String normalizedString = postProcess.apply(string);
result = get(normalizedString);
if (result != null) {
return result;
}
final Integer integer = intToString.size();
put(normalizedString, integer); // adds the normalized String to stringToInt and bytesToInt
bytesToInt.put(byteArray, integer); // also add the original String to bytesToInt, because it is
// used as cache
}
result = bytesToInt.get(byteArray);
}
}
return result;
}
} }

View File

@@ -7,6 +7,7 @@ import java.util.LinkedHashMap;
import java.util.Map; import java.util.Map;
import org.lucares.pdb.api.StringCompressor; import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.api.Tag;
import org.lucares.pdb.api.Tags; import org.lucares.pdb.api.Tags;
import org.lucares.pdb.api.TagsBuilder; import org.lucares.pdb.api.TagsBuilder;
import org.lucares.pdb.api.UniqueStringIntegerPairs; import org.lucares.pdb.api.UniqueStringIntegerPairs;
@@ -72,30 +73,31 @@ public class MemoryScale {
} }
} }
private static Object createTag() { private static Tag createTag() {
return Tags.STRING_COMPRESSOR.createTag("", ""); return Tags.STRING_COMPRESSOR.createTag("", "");
} }
private static Object createTags0() { private static Tags createTags0() {
return new Tags(); return new Tags();
} }
private static Object createTags1() { private static Tags createTags1() {
return Tags.createAndAddToDictionary("k1", "v1"); return Tags.STRING_COMPRESSOR.createAndAddToDictionary("k1", "v1");
} }
private static Object createTags2() { private static Tags createTags2() {
return Tags.createAndAddToDictionary("k1", "v1", "k2", "v2"); return Tags.STRING_COMPRESSOR.createAndAddToDictionary("k1", "v1", "k2", "v2");
} }
private static Object createTags6() { private static Tags createTags6() {
TagsBuilder result = TagsBuilder.create(); final TagsBuilder result = TagsBuilder.create();
result = result.add("k1", "v1");
result = result.add("k2", "v2"); result.add(Tags.STRING_COMPRESSOR.createTag("k1", "v1"));
result = result.add("k3", "v3"); result.add(Tags.STRING_COMPRESSOR.createTag("k2", "v2"));
result = result.add("k4", "v4"); result.add(Tags.STRING_COMPRESSOR.createTag("k3", "v3"));
result = result.add("k5", "v5"); result.add(Tags.STRING_COMPRESSOR.createTag("k4", "v4"));
result = result.add("k6", "v6"); result.add(Tags.STRING_COMPRESSOR.createTag("k5", "v5"));
result.add(Tags.STRING_COMPRESSOR.createTag("k6", "v6"));
return result.build(); return result.build();
} }

View File

@@ -11,9 +11,9 @@ import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Assertions;
import org.lucares.utils.file.FileUtils; import org.lucares.utils.file.FileUtils;
public class StringCompressorTest { public class StringCompressorTest {
@@ -34,8 +34,8 @@ public class StringCompressorTest {
final StringCompressor keyValueCompressor = StringCompressor.create(dataDirectory.resolve("key.csv")); final StringCompressor keyValueCompressor = StringCompressor.create(dataDirectory.resolve("key.csv"));
final String value = "foo"; final String value = "foo";
final Integer intFoo = keyValueCompressor.put(value); final Integer intFoo = keyValueCompressor.putString(value);
final String actual = keyValueCompressor.get(intFoo); final String actual = keyValueCompressor.getString(intFoo);
Assertions.assertEquals(value, actual); Assertions.assertEquals(value, actual);
} }
@@ -47,12 +47,12 @@ public class StringCompressorTest {
{ {
final StringCompressor keyValueCompressor = StringCompressor.create(database); final StringCompressor keyValueCompressor = StringCompressor.create(database);
keyValueCompressor.put(value); keyValueCompressor.putString(value);
} }
{ {
final StringCompressor keyValueCompressor = StringCompressor.create(database); final StringCompressor keyValueCompressor = StringCompressor.create(database);
keyValueCompressor.get(0); keyValueCompressor.getString(0);
} }
} }

View File

@@ -21,7 +21,7 @@ final class StringInserter implements Callable<List<String>> {
final List<String> result = new ArrayList<>(); final List<String> result = new ArrayList<>();
for (int i = 0; i < numEntries; i++) { for (int i = 0; i < numEntries; i++) {
final String s = UUID.randomUUID().toString(); final String s = UUID.randomUUID().toString();
stringCompressor.put(s); stringCompressor.putString(s);
result.add(s); result.add(s);
} }
return result; return result;

View File

@@ -5,9 +5,9 @@ import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Assertions;
import org.lucares.utils.file.FileUtils; import org.lucares.utils.file.FileUtils;
public class UniqueStringIntegerPairsTest { public class UniqueStringIntegerPairsTest {
@@ -33,16 +33,16 @@ public class UniqueStringIntegerPairsTest {
{ {
final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database); final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database);
usip.put(first, second); usip.putStringAndInteger(first, second);
Assertions.assertEquals(second, usip.get(first)); Assertions.assertEquals(second, usip.getInt(first));
Assertions.assertEquals(first, usip.getKey(second)); Assertions.assertEquals(first, usip.getString(second));
} }
{ {
final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database); final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database);
Assertions.assertEquals(second, usip.get(first)); Assertions.assertEquals(second, usip.getInt(first));
Assertions.assertEquals(first, usip.getKey(second)); Assertions.assertEquals(first, usip.getString(second));
} }
} }
@@ -53,11 +53,11 @@ public class UniqueStringIntegerPairsTest {
final Integer second = 1; final Integer second = 1;
final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database); final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database);
usip.put(first, second); usip.putStringAndInteger(first, second);
try { try {
// cannot add another pair with the first key // cannot add another pair with the first key
final int another = second + 1; final int another = second + 1;
usip.put(first, another); usip.putStringAndInteger(first, another);
Assertions.fail("expected an IllegalArgumentException"); Assertions.fail("expected an IllegalArgumentException");
} catch (final IllegalArgumentException e) { } catch (final IllegalArgumentException e) {
// expected // expected
@@ -66,7 +66,7 @@ public class UniqueStringIntegerPairsTest {
try { try {
// cannot add another pair with the same second value // cannot add another pair with the same second value
final String another = first + 1; final String another = first + 1;
usip.put(another, second); usip.putStringAndInteger(another, second);
Assertions.fail("expected an IllegalArgumentException"); Assertions.fail("expected an IllegalArgumentException");
} catch (final IllegalArgumentException e) { } catch (final IllegalArgumentException e) {
// expected // expected

View File

@@ -2,11 +2,15 @@
# For additional information regarding the format and rule options, please see: # For additional information regarding the format and rule options, please see:
# https://github.com/browserslist/browserslist#queries # https://github.com/browserslist/browserslist#queries
# For the full list of supported browsers by the Angular framework, please see:
# https://angular.io/guide/browser-support
# You can see what browsers were selected by your queries by running: # You can see what browsers were selected by your queries by running:
# npx browserslist # npx browserslist
> 0.5% last 1 Chrome version
last 2 versions last 1 Firefox version
last 2 Edge major versions
last 2 Safari major versions
last 2 iOS major versions
Firefox ESR Firefox ESR
not dead
not IE 9-11 # For IE 9-11 support, remove 'not'.

View File

@@ -8,6 +8,9 @@ indent_size = 2
insert_final_newline = true insert_final_newline = true
trim_trailing_whitespace = true trim_trailing_whitespace = true
[*.ts]
quote_type = single
[*.md] [*.md]
max_line_length = off max_line_length = off
trim_trailing_whitespace = false trim_trailing_whitespace = false

5
pdb-js/.gitignore vendored
View File

@@ -1,6 +1,7 @@
# See http://help.github.com/ignore-files/ for more about ignoring files. # See http://help.github.com/ignore-files/ for more about ignoring files.
# compiled output # compiled output
/dist
/build /build
/bin /bin
/tmp /tmp
@@ -14,7 +15,6 @@
# profiling files # profiling files
chrome-profiler-events*.json chrome-profiler-events*.json
speed-measure-plugin*.json
# IDEs and editors # IDEs and editors
/.idea /.idea
@@ -34,6 +34,7 @@ speed-measure-plugin*.json
.history/* .history/*
# misc # misc
/.angular/cache
/.sass-cache /.sass-cache
/connect.lock /connect.lock
/coverage /coverage
@@ -46,4 +47,6 @@ testem.log
# System Files # System Files
.DS_Store .DS_Store
Thumbs.db Thumbs.db
#
/env.sh /env.sh

4
pdb-js/.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,4 @@
{
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=827846
"recommendations": ["angular.ng-template"]
}

20
pdb-js/.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,20 @@
{
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "ng serve",
"type": "pwa-chrome",
"request": "launch",
"preLaunchTask": "npm: start",
"url": "http://localhost:4200/"
},
{
"name": "ng test",
"type": "chrome",
"request": "launch",
"preLaunchTask": "npm: test",
"url": "http://localhost:9876/debug.html"
}
]
}

42
pdb-js/.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,42 @@
{
// For more information, visit: https://go.microsoft.com/fwlink/?LinkId=733558
"version": "2.0.0",
"tasks": [
{
"type": "npm",
"script": "start",
"isBackground": true,
"problemMatcher": {
"owner": "typescript",
"pattern": "$tsc",
"background": {
"activeOnStart": true,
"beginsPattern": {
"regexp": "(.*?)"
},
"endsPattern": {
"regexp": "bundle generation complete"
}
}
}
},
{
"type": "npm",
"script": "test",
"isBackground": true,
"problemMatcher": {
"owner": "typescript",
"pattern": "$tsc",
"background": {
"activeOnStart": true,
"beginsPattern": {
"regexp": "(.*?)"
},
"endsPattern": {
"regexp": "bundle generation complete"
}
}
}
}
]
}

View File

@@ -3,16 +3,19 @@
"version": 1, "version": 1,
"newProjectRoot": "projects", "newProjectRoot": "projects",
"projects": { "projects": {
"pdb": { "pdb-js": {
"projectType": "application", "projectType": "application",
"schematics": { "schematics": {
"@schematics/angular:component": { "@schematics/angular:component": {
"style": "scss" "style": "scss"
},
"@schematics/angular:application": {
"strict": true
} }
}, },
"root": "", "root": "",
"sourceRoot": "src", "sourceRoot": "src",
"prefix": "pdb", "prefix": "app",
"architect": { "architect": {
"build": { "build": {
"builder": "@angular-devkit/build-angular:browser", "builder": "@angular-devkit/build-angular:browser",
@@ -22,69 +25,68 @@
"main": "src/main.ts", "main": "src/main.ts",
"polyfills": "src/polyfills.ts", "polyfills": "src/polyfills.ts",
"tsConfig": "tsconfig.app.json", "tsConfig": "tsconfig.app.json",
"inlineStyleLanguage": "scss",
"assets": [ "assets": [
"src/favicon.ico", "src/favicon.ico",
"src/assets" "src/assets"
], ],
"styles": [ "styles": [
"src/styles.scss", "src/styles.scss"
"src/custom-theme.scss"
], ],
"scripts": [], "scripts": []
"vendorChunk": true,
"extractLicenses": false,
"buildOptimizer": false,
"sourceMap": true,
"optimization": false,
"namedChunks": true
}, },
"configurations": { "configurations": {
"production": { "production": {
"budgets": [
{
"type": "initial",
"maximumWarning": "500kb",
"maximumError": "2mb"
},
{
"type": "anyComponentStyle",
"maximumWarning": "2kb",
"maximumError": "4kb"
}
],
"fileReplacements": [ "fileReplacements": [
{ {
"replace": "src/environments/environment.ts", "replace": "src/environments/environment.ts",
"with": "src/environments/environment.prod.ts" "with": "src/environments/environment.prod.ts"
} }
], ],
"optimization": true, "outputHashing": "all"
"outputHashing": "all",
"sourceMap": false,
"namedChunks": false,
"extractLicenses": true,
"vendorChunk": false,
"buildOptimizer": true,
"budgets": [
{
"type": "initial",
"maximumWarning": "2mb",
"maximumError": "5mb"
}, },
{ "development": {
"type": "anyComponentStyle", "buildOptimizer": false,
"maximumWarning": "6kb", "optimization": false,
"maximumError": "10kb" "vendorChunk": true,
} "extractLicenses": false,
] "sourceMap": true,
"namedChunks": true
} }
}, },
"defaultConfiguration": "" "defaultConfiguration": "production"
}, },
"serve": { "serve": {
"builder": "@angular-devkit/build-angular:dev-server", "builder": "@angular-devkit/build-angular:dev-server",
"options": { "options": {
"browserTarget": "pdb:build",
"proxyConfig": "proxy.conf.json" "proxyConfig": "proxy.conf.json"
}, },
"configurations": { "configurations": {
"production": { "production": {
"browserTarget": "pdb:build:production" "browserTarget": "pdb-js:build:production"
} },
"development": {
"browserTarget": "pdb-js:build:development"
} }
}, },
"defaultConfiguration": "development"
},
"extract-i18n": { "extract-i18n": {
"builder": "@angular-devkit/build-angular:extract-i18n", "builder": "@angular-devkit/build-angular:extract-i18n",
"options": { "options": {
"browserTarget": "pdb:build" "browserTarget": "pdb-js:build"
} }
}, },
"test": { "test": {
@@ -94,6 +96,7 @@
"polyfills": "src/polyfills.ts", "polyfills": "src/polyfills.ts",
"tsConfig": "tsconfig.spec.json", "tsConfig": "tsconfig.spec.json",
"karmaConfig": "karma.conf.js", "karmaConfig": "karma.conf.js",
"inlineStyleLanguage": "scss",
"assets": [ "assets": [
"src/favicon.ico", "src/favicon.ico",
"src/assets" "src/assets"
@@ -103,33 +106,8 @@
], ],
"scripts": [] "scripts": []
} }
},
"lint": {
"builder": "@angular-devkit/build-angular:tslint",
"options": {
"tsConfig": [
"tsconfig.app.json",
"tsconfig.spec.json",
"e2e/tsconfig.json"
],
"exclude": [
"**/node_modules/**"
]
}
},
"e2e": {
"builder": "@angular-devkit/build-angular:protractor",
"options": {
"protractorConfig": "e2e/protractor.conf.js",
"devServerTarget": "pdb:serve"
},
"configurations": {
"production": {
"devServerTarget": "pdb:serve:production"
} }
} }
} }
} }
}},
"defaultProject": "pdb"
} }

View File

@@ -2,7 +2,7 @@ import java.nio.file.Files
import java.nio.file.Paths import java.nio.file.Paths
plugins { plugins {
id("com.github.node-gradle.node") version "3.1.0" id("com.github.node-gradle.node") version "3.4.0"
} }

View File

@@ -1,32 +0,0 @@
// @ts-check
// Protractor configuration file, see link for more information
// https://github.com/angular/protractor/blob/master/lib/config.ts
const { SpecReporter } = require('jasmine-spec-reporter');
/**
* @type { import("protractor").Config }
*/
exports.config = {
allScriptsTimeout: 11000,
specs: [
'./src/**/*.e2e-spec.ts'
],
capabilities: {
'browserName': 'chrome'
},
directConnect: true,
baseUrl: 'http://localhost:4200/',
framework: 'jasmine',
jasmineNodeOpts: {
showColors: true,
defaultTimeoutInterval: 30000,
print: function() {}
},
onPrepare() {
require('ts-node').register({
project: require('path').join(__dirname, './tsconfig.json')
});
jasmine.getEnv().addReporter(new SpecReporter({ spec: { displayStacktrace: true } }));
}
};

View File

@@ -1,23 +0,0 @@
import { AppPage } from './app.po';
import { browser, logging } from 'protractor';
describe('workspace-project App', () => {
let page: AppPage;
beforeEach(() => {
page = new AppPage();
});
it('should display welcome message', () => {
page.navigateTo();
expect(page.getTitleText()).toEqual('pdb app is running!');
});
afterEach(async () => {
// Assert that there are no errors emitted from the browser
const logs = await browser.manage().logs().get(logging.Type.BROWSER);
expect(logs).not.toContain(jasmine.objectContaining({
level: logging.Level.SEVERE,
} as logging.Entry));
});
});

View File

@@ -1,11 +0,0 @@
import { browser, by, element } from 'protractor';
export class AppPage {
navigateTo() {
return browser.get(browser.baseUrl) as Promise<any>;
}
getTitleText() {
return element(by.css('app-root .content span')).getText() as Promise<string>;
}
}

View File

@@ -1,13 +0,0 @@
{
"extends": "../tsconfig.base.json",
"compilerOptions": {
"outDir": "../out-tsc/e2e",
"module": "commonjs",
"target": "es2018",
"types": [
"jasmine",
"jasminewd2",
"node"
]
}
}

View File

@@ -9,16 +9,28 @@ module.exports = function (config) {
require('karma-jasmine'), require('karma-jasmine'),
require('karma-chrome-launcher'), require('karma-chrome-launcher'),
require('karma-jasmine-html-reporter'), require('karma-jasmine-html-reporter'),
require('karma-coverage-istanbul-reporter'), require('karma-coverage'),
require('@angular-devkit/build-angular/plugins/karma') require('@angular-devkit/build-angular/plugins/karma')
], ],
client: { client: {
jasmine: {
// you can add configuration options for Jasmine here
// the possible options are listed at https://jasmine.github.io/api/edge/Configuration.html
// for example, you can disable the random execution with `random: false`
// or set a specific seed with `seed: 4321`
},
clearContext: false // leave Jasmine Spec Runner output visible in browser clearContext: false // leave Jasmine Spec Runner output visible in browser
}, },
coverageIstanbulReporter: { jasmineHtmlReporter: {
dir: require('path').join(__dirname, './coverage/pdb'), suppressAll: true // removes the duplicated traces
reports: ['html', 'lcovonly', 'text-summary'], },
fixWebpackSourcePaths: true coverageReporter: {
dir: require('path').join(__dirname, './coverage/pdb-js'),
subdir: '.',
reporters: [
{ type: 'html' },
{ type: 'text-summary' }
]
}, },
reporters: ['progress', 'kjhtml'], reporters: ['progress', 'kjhtml'],
port: 9876, port: 9876,

24014
pdb-js/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,52 +1,46 @@
{ {
"name": "pdb", "name": "pdb-js",
"version": "0.0.0", "version": "0.0.0",
"scripts": { "scripts": {
"ng": "ng", "ng": "ng",
"start": "ng serve", "start": "ng serve",
"build": "ng build", "build": "ng build",
"releasebuild": "ng build --configuration production", "watch": "ng build --watch --configuration development",
"test": "ng test", "test": "ng test",
"lint": "ng lint", "lint": "ng lint",
"e2e": "ng e2e" "e2e": "ng e2e",
"releasebuild": "ng build --configuration production"
}, },
"private": true, "private": true,
"dependencies": { "dependencies": {
"@angular/animations": "^12.1.1", "@angular/animations": "^14.2.5",
"@angular/cdk": "^12.1.1", "@angular/cdk": "^14.2.4",
"@angular/common": "^12.1.1", "@angular/common": "^14.2.5",
"@angular/compiler": "^12.1.1", "@angular/compiler": "^14.2.5",
"@angular/core": "^12.1.1", "@angular/core": "^14.2.5",
"@angular/forms": "^12.1.1", "@angular/forms": "^14.2.5",
"@angular/material": "^12.1.1", "@angular/material": "^14.2.4",
"@angular/platform-browser": "^12.1.1", "@angular/platform-browser": "^14.2.5",
"@angular/platform-browser-dynamic": "^12.1.1", "@angular/platform-browser-dynamic": "^14.2.5",
"@angular/router": "^12.1.1", "@angular/router": "^14.2.5",
"moment": "^2.29.1", "moment": "^2.29.1",
"rxjs": "^6.6.7", "rxjs": "~7.5.0",
"rxjs-compat": "^6.6.7", "rxjs-compat": "^6.6.7",
"tslib": "^2.3.0", "tslib": "^2.3.0",
"zone.js": "~0.11.4" "zone.js": "~0.11.4"
}, },
"devDependencies": { "devDependencies": {
"@angular-devkit/build-angular": "^12.1.1", "@angular-devkit/build-angular": "^14.2.5",
"@angular/cli": "^12.1.1", "@angular/cli": "^14.2.5",
"@angular/compiler-cli": "^12.1.1", "@angular/compiler-cli": "^14.2.5",
"@angular/language-service": "^12.1.1", "@types/jasmine": "~3.10.0",
"@types/jasmine": "^3.8.1", "@types/node": "^12.11.1",
"@types/jasminewd2": "^2.0.10", "jasmine-core": "~3.10.0",
"@types/node": "^12.20.16", "karma": "~6.3.0",
"codelyzer": "^6.0.2",
"jasmine-core": "~3.6.0",
"jasmine-spec-reporter": "~5.0.0",
"karma": "~6.3.4",
"karma-chrome-launcher": "~3.1.0", "karma-chrome-launcher": "~3.1.0",
"karma-coverage-istanbul-reporter": "~3.0.2", "karma-coverage": "~2.1.0",
"karma-jasmine": "~4.0.0", "karma-jasmine": "~4.0.0",
"karma-jasmine-html-reporter": "^1.7.0", "karma-jasmine-html-reporter": "~1.7.0",
"protractor": "~7.0.0", "typescript": "~4.6.2"
"ts-node": "~8.5.4",
"tslint": "^6.1.0",
"typescript": "4.3.5"
} }
} }

View File

@@ -10,7 +10,7 @@
</mat-select> </mat-select>
</mat-form-field> </mat-form-field>
<pdb-image-toggle *ngIf="filterBy" images="{{compareImages}}" (valueChanged)="comparatorChanged($event)"></pdb-image-toggle> <pdb-image-toggle *ngIf="filterBy" images="{{compareImages}}" (valueChanged)="comparatorChanged()"></pdb-image-toggle>
<mat-form-field *ngIf="filterBy" class="pdb-form-number-long"> <mat-form-field *ngIf="filterBy" class="pdb-form-number-long">
<input matInput type="number" placeholder="" min="0" [(ngModel)]="value"> <input matInput type="number" placeholder="" min="0" [(ngModel)]="value">

View File

@@ -15,10 +15,10 @@
</mat-form-field> </mat-form-field>
<pdb-image-toggle images="{{ascDescImages}}" (valueChanged)="sortOrderChanged($event)"></pdb-image-toggle> <pdb-image-toggle images="{{ascDescImages}}" (valueChanged)="sortOrderChanged()"></pdb-image-toggle>
<pdb-gallery-filter-view (valueChanged)="filterChanged($event)"></pdb-gallery-filter-view> <pdb-gallery-filter-view (valueChanged)="filterChanged()"></pdb-gallery-filter-view>
<mat-checkbox [(ngModel)]="showDetails">Show Details</mat-checkbox> <mat-checkbox [(ngModel)]="showDetails">Show Details</mat-checkbox>
</div> </div>

View File

@@ -46,8 +46,7 @@ export class GalleryFilterView {
@Output() @Output()
valueChanged : EventEmitter<GalleryFilterData> = new EventEmitter<GalleryFilterData>(); valueChanged : EventEmitter<GalleryFilterData> = new EventEmitter<GalleryFilterData>();
comparatorChanged(newComparator: string){ comparatorChanged(){
this._comparator = newComparator;
this.valueChanged.emit(undefined); this.valueChanged.emit(undefined);
} }
@@ -115,7 +114,7 @@ export class GalleryViewComponent implements OnInit {
showDetails = false; showDetails = false;
@ViewChild(GalleryFilterView) @ViewChild(GalleryFilterView)
filter : GalleryFilterView; filter! : GalleryFilterView;
ascDescImages = JSON.stringify([ ascDescImages = JSON.stringify([
{ {
@@ -133,7 +132,7 @@ export class GalleryViewComponent implements OnInit {
constructor(private plotService: PlotService, private snackBar: MatSnackBar) { constructor(private plotService: PlotService, private snackBar: MatSnackBar) {
} }
showError(message) { showError(message: string) {
this.snackBar.open(message, "", { this.snackBar.open(message, "", {
duration: 5000, duration: 5000,
verticalPosition: 'top' verticalPosition: 'top'
@@ -193,8 +192,8 @@ export class GalleryViewComponent implements OnInit {
filterPredicate(galleryItem: GalleryItem){ filterPredicate(galleryItem: GalleryItem){
const predicate = this.filter.comparator == 'LESS_EQUAL' const predicate = this.filter.comparator == 'LESS_EQUAL'
? function(a, b) { return a <= b; } ? function(a: number, b: number) { return a <= b; }
: function(a, b) { return a >= b; }; : function(a: number, b: number) { return a >= b; };
const millis = this.timeUnitToMillis(this.filter.value, this.filter.unit); const millis = this.timeUnitToMillis(this.filter.value, this.filter.unit);
switch(this.filter.filterBy){ switch(this.filter.filterBy){
case 'NONE': case 'NONE':
@@ -214,7 +213,7 @@ export class GalleryViewComponent implements OnInit {
throw "unhandled option: " + this.filter.filterBy; throw "unhandled option: " + this.filter.filterBy;
} }
timeUnitToMillis(value, unit) timeUnitToMillis(value: number, unit: string)
{ {
switch(unit){ switch(unit){
case 'NO_UNIT': case 'NO_UNIT':
@@ -260,7 +259,7 @@ export class GalleryViewComponent implements OnInit {
return; return;
} }
const splitByValue = this.splitByValuesQueue.pop(); const splitByValue = <string>this.splitByValuesQueue.pop();
let request = masterRequest.copy(); let request = masterRequest.copy();
request.query = "("+request.query+") and " + splitByField+"="+ splitByValue; request.query = "("+request.query+") and " + splitByField+"="+ splitByValue;
@@ -283,7 +282,7 @@ export class GalleryViewComponent implements OnInit {
that.sortAndFilterGallery(); that.sortAndFilterGallery();
that.renderGalleryRecursively(masterRequest, splitByField); that.renderGalleryRecursively(masterRequest, splitByField);
}, },
error => { (error:any) => {
that.showError(error.error.message); that.showError(error.error.message);
}); });
} }
@@ -295,12 +294,11 @@ export class GalleryViewComponent implements OnInit {
get sortBy(): string { return this._sortBy; } get sortBy(): string { return this._sortBy; }
sortOrderChanged(event){ sortOrderChanged(){
this.sortOrder = event;
this.sortAndFilterGallery(); this.sortAndFilterGallery();
} }
filterChanged(event){ filterChanged(){
this.sortAndFilterGallery(); this.sortAndFilterGallery();
} }
@@ -313,7 +311,7 @@ export class GalleryViewComponent implements OnInit {
}) })
export class GalleryItemView { export class GalleryItemView {
@Input() @Input()
data: GalleryItem; data!: GalleryItem;
@Input() @Input()
showDetails: boolean = false; showDetails: boolean = false;
@@ -338,7 +336,7 @@ export class GalleryItem {
imageUrl: string; imageUrl: string;
stats: PlotResponseStats; stats: PlotResponseStats;
splitByValue : string; splitByValue : string;
show : boolean; show : boolean = false;
constructor(splitByValue: string, plotResponse: PlotResponse){ constructor(splitByValue: string, plotResponse: PlotResponse){
this.thumbnailUrl = plotResponse.thumbnailUrl; this.thumbnailUrl = plotResponse.thumbnailUrl;

View File

@@ -17,7 +17,7 @@ export class ImageToggleComponent implements OnInit {
text = undefined; text = undefined;
_states : Array<any>; _states : Array<any> = [];
constructor() { } constructor() { }
@@ -34,7 +34,7 @@ export class ImageToggleComponent implements OnInit {
return this._states[this.index].title; return this._states[this.index].title;
} }
toggle(event){ toggle(event: any){
this.index = (this.index+1) % this._states.length; this.index = (this.index+1) % this._states.length;
this.text = this._states[this.index].text; this.text = this._states[this.index].text;
this.valueChanged.emit(this._states[this.index].value); this.valueChanged.emit(this._states[this.index].value);

View File

@@ -17,7 +17,7 @@
<td><div class="{{ pointTypeClass(stat.dashTypeAndColor) }}" title="{{ stat.name }}"></div></td> <td><div class="{{ pointTypeClass(stat.dashTypeAndColor) }}" title="{{ stat.name }}"></div></td>
<td>{{ stat.values }}</td> <td>{{ stat.values }}</td>
<td>{{ utils.format(stat.average, valueFormat) }}</td> <td>{{ utils.format(stat.average, valueFormat) }}</td>
<td *ngFor="let key of percentilesToPlot.keys()">{{utils.format(stat.percentiles[percentilesToPlot.get(key)], valueFormat)}}</td> <td *ngFor="let key of percentilesToPlot.keys()">{{percentileStat(key, stat)}}</td>
<td>{{ utils.format(stat.maxValue, valueFormat)}}</td> <td>{{ utils.format(stat.maxValue, valueFormat)}}</td>
</tr> </tr>
</table> </table>
@@ -51,7 +51,7 @@
<tr *ngFor="let statsRow of stats.dataSeriesStats"> <tr *ngFor="let statsRow of stats.dataSeriesStats">
<td><div class="{{ pointTypeClass(statsRow.dashTypeAndColor) }}" title="{{ statsRow.name }}"></div></td> <td><div class="{{ pointTypeClass(statsRow.dashTypeAndColor) }}" title="{{ statsRow.name }}"></div></td>
<td *ngFor="let statsCol of stats.dataSeriesStats"> <td *ngFor="let statsCol of stats.dataSeriesStats">
{{ utils.toPercent(statsRow.percentiles[percentilesToPlot.get(p)] / statsCol.percentiles[percentilesToPlot.get(p)]) }} {{ toPercent(statsRow, statsCol, p) }}
</td> </td>
</tr> </tr>
</table> </table>

View File

@@ -1,5 +1,5 @@
import { Component, OnInit, Input, Output, ViewChild, EventEmitter, ɵpublishDefaultGlobalUtils } from '@angular/core'; import { Component, OnInit, Input, Output, ViewChild, EventEmitter, ɵpublishDefaultGlobalUtils } from '@angular/core';
import { DashTypeAndColor, PlotResponseStats } from '../plot.service'; import { DashTypeAndColor, PlotResponseStats, DataSeriesStats } from '../plot.service';
import { UtilService } from '../utils.service'; import { UtilService } from '../utils.service';
@Component({ @Component({
@@ -10,7 +10,7 @@ import { UtilService } from '../utils.service';
export class PlotDetailsComponent { export class PlotDetailsComponent {
@Input() @Input()
stats: PlotResponseStats; stats!: PlotResponseStats;
hasPercentiles = false; hasPercentiles = false;
@@ -25,6 +25,9 @@ export class PlotDetailsComponent {
ngOnInit() { ngOnInit() {
this.hasPercentiles = false; this.hasPercentiles = false;
this.percentilesToPlot.clear(); this.percentilesToPlot.clear();
console.log("plotdetails.stats: " + JSON.stringify(this.stats));
if (this.stats) {
for (let i = 0; i < this.stats.dataSeriesStats.length; i++) for (let i = 0; i < this.stats.dataSeriesStats.length; i++)
{ {
const stat = this.stats.dataSeriesStats[i]; const stat = this.stats.dataSeriesStats[i];
@@ -39,6 +42,7 @@ export class PlotDetailsComponent {
} }
} }
} }
}
percentile(value: number): string { percentile(value: number): string {
return this.utils.format(value, this.valueFormat); return this.utils.format(value, this.valueFormat);
@@ -49,4 +53,28 @@ export class PlotDetailsComponent {
+" plot-details-plotType_"+typeAndColor.pointType +" plot-details-plotType_"+typeAndColor.pointType
+" plot-details-plotType_"+typeAndColor.color.toLocaleLowerCase(); +" plot-details-plotType_"+typeAndColor.color.toLocaleLowerCase();
} }
toPercent(statsRow: DataSeriesStats, statsCol: DataSeriesStats, key: string){
const percentile = this.percentilesToPlot.get(key);
if (percentile) {
const rowValue = (<any>statsRow.percentiles)[percentile];
const columnValue = (<any>statsCol.percentiles)[percentile];
if (rowValue !== undefined && columnValue !== undefined) {
return this.utils.toPercent(rowValue / columnValue);
}
}
return "?%"
}
percentileStat(key: string, stat: DataSeriesStats): string{
const plotKey = this.percentilesToPlot.get(key);
if (plotKey !== undefined){
console.log("stat.percentiles: ", stat.percentiles);
const value = (<any>stat.percentiles)[plotKey];
if (value !== undefined){
return this.utils.format(value, this.valueFormat);
}
}
return "no value";
}
} }

View File

@@ -29,7 +29,7 @@
[style.width]="zoomInSliderStyleWidth" [style.width]="zoomInSliderStyleWidth"
></div> ></div>
</div> </div>
<div *ngIf="showStats" class="plot-view-overlay"> <div *ngIf="showStats && stats != null" class="plot-view-overlay">
<pdb-plot-details [stats]="stats"></pdb-plot-details> <pdb-plot-details [stats]="stats"></pdb-plot-details>
<div class="top-right"> <div class="top-right">
<img <img

View File

@@ -15,10 +15,10 @@ export class PlotViewComponent implements OnInit {
readonly gnuplotBMargin = 76; // The bottom margin configured for gnuplot readonly gnuplotBMargin = 76; // The bottom margin configured for gnuplot
imageUrl : string; imageUrl! : string;
stats : PlotResponseStats; stats: PlotResponseStats | null = null;
axes: AxesTypes; axes!: AxesTypes;
@Output() @Output()
zoomRange : EventEmitter<SelectionRange> = new EventEmitter<SelectionRange>(); zoomRange : EventEmitter<SelectionRange> = new EventEmitter<SelectionRange>();
@@ -49,20 +49,20 @@ export class PlotViewComponent implements OnInit {
hideZoomInSlider() { hideZoomInSlider() {
this.zoomInSliderStyleDisplay = "none"; this.zoomInSliderStyleDisplay = "none";
} }
update_cursor(event){ update_cursor(event: MouseEvent){
//$('#result-image').css('cursor', this.isInPlot(event) ? 'crosshair' : 'default'); //$('#result-image').css('cursor', this.isInPlot(event) ? 'crosshair' : 'default');
this.imageCursor = this.isInPlot(event) ? 'crosshair' : 'default'; this.imageCursor = this.isInPlot(event) ? 'crosshair' : 'default';
} }
imageWidth() { imageWidth() {
return Math.floor(document.getElementById('result-image').offsetWidth); return Math.floor(document.getElementById('result-image')!.offsetWidth);
} }
imageHeight() { imageHeight() {
return Math.floor(document.getElementById('result-image').offsetHeight); return Math.floor(document.getElementById('result-image')!.offsetHeight);
} }
positionInImage(event) : any { positionInImage(event: MouseEvent) : any {
const rect = (<HTMLImageElement>document.getElementById('result-image')).getBoundingClientRect(); const rect = (<HTMLImageElement>document.getElementById('result-image')).getBoundingClientRect();
const x= event.clientX - rect.left; const x= event.clientX - rect.left;
const y= event.clientY - rect.top; const y= event.clientY - rect.top;
@@ -73,7 +73,7 @@ export class PlotViewComponent implements OnInit {
return {x: x, y: y}; return {x: x, y: y};
} }
isInPlot(event) : boolean{ isInPlot(event: MouseEvent) : boolean{
const pos = this.positionInImage(event); const pos = this.positionInImage(event);
return pos.x > this.gnuplotLMargin return pos.x > this.gnuplotLMargin
@@ -82,7 +82,7 @@ export class PlotViewComponent implements OnInit {
&& pos.y < this.imageHeight()- this.gnuplotBMargin; && pos.y < this.imageHeight()- this.gnuplotBMargin;
} }
isInImage(event) : boolean{ isInImage(event: MouseEvent) : boolean{
const pos = this.positionInImage(event); const pos = this.positionInImage(event);
return pos.x > 0 return pos.x > 0
@@ -91,7 +91,7 @@ export class PlotViewComponent implements OnInit {
&& pos.y < this.imageHeight(); && pos.y < this.imageHeight();
} }
dragStart(event) { dragStart(event: MouseEvent) {
//console.log("dragStart inPlot: " + this.isInPlot(event)); //console.log("dragStart inPlot: " + this.isInPlot(event));
event.preventDefault(); event.preventDefault();
@@ -104,7 +104,7 @@ export class PlotViewComponent implements OnInit {
} }
} }
dragging(event) { dragging(event: MouseEvent) {
//console.log("dragging " + this.isInPlot(event)); //console.log("dragging " + this.isInPlot(event));
this.update_cursor(event); this.update_cursor(event);
@@ -128,7 +128,7 @@ export class PlotViewComponent implements OnInit {
} }
} }
dragStop(event) { dragStop(event: MouseEvent) {
if (this.in_drag_mode){ if (this.in_drag_mode){
this.in_drag_mode = false; this.in_drag_mode = false;
this.hideZoomInSlider(); this.hideZoomInSlider();
@@ -152,7 +152,7 @@ export class PlotViewComponent implements OnInit {
} }
} }
dragAbort(event) { dragAbort(event: MouseEvent) {
//console.log("drag_abort"); //console.log("drag_abort");
if (this.in_drag_mode && !this.isInImage(event)) { if (this.in_drag_mode && !this.isInImage(event)) {
this.in_drag_mode = false; this.in_drag_mode = false;
@@ -162,7 +162,7 @@ export class PlotViewComponent implements OnInit {
} }
} }
zoomByScroll(event) { zoomByScroll(event: WheelEvent) {
if (this.isInImage(event) && event.deltaY != 0 && this.axes.hasXAxis(DataType.Time)) { if (this.isInImage(event) && event.deltaY != 0 && this.axes.hasXAxis(DataType.Time)) {
this.in_drag_mode = false; this.in_drag_mode = false;
this.hideZoomInSlider(); this.hideZoomInSlider();

View File

@@ -1,6 +1,6 @@
import { Injectable, OnInit } from '@angular/core'; import { Injectable, OnInit } from '@angular/core';
import { HttpClient, HttpParams } from '@angular/common/http'; import { HttpClient, HttpParams } from '@angular/common/http';
import { Observable } from 'rxjs/Observable'; import { Observable } from 'rxjs';
import { map } from 'rxjs/operators'; import { map } from 'rxjs/operators';
@@ -17,7 +17,8 @@ export class PlotService {
this.plotTypes.push(new PlotType("CUM_DISTRIBUTION", "Cumulative Distribution", "cumulative-distribution-chart", true, DataType.Percent, DataType.Duration)); this.plotTypes.push(new PlotType("CUM_DISTRIBUTION", "Cumulative Distribution", "cumulative-distribution-chart", true, DataType.Percent, DataType.Duration));
this.plotTypes.push(new PlotType("HISTOGRAM", "Histogram", "histogram", true, DataType.HistogramBin, DataType.HistogramCount)); this.plotTypes.push(new PlotType("HISTOGRAM", "Histogram", "histogram", true, DataType.HistogramBin, DataType.HistogramCount));
this.plotTypes.push(new PlotType("PARALLEL", "Parallel Requests", "parallel-requests-chart", true, DataType.Time, DataType.Count)); this.plotTypes.push(new PlotType("PARALLEL", "Parallel Requests", "parallel-requests-chart", true, DataType.Time, DataType.Count));
this.plotTypes.push(new PlotType("BAR", "Bar", "bar-chart", true, DataType.Group, DataType.Count)); this.plotTypes.push(new PlotType("BAR", "Bar (number of requests)", "bar-chart", true, DataType.Group, DataType.Count));
this.plotTypes.push(new PlotType("BOX", "Box", "box-plot", true, DataType.Time, DataType.Duration));
this.plotTypes.push(new PlotType("HEATMAP", "Heatmap", "heatmap", false, DataType.Other, DataType.Other)); this.plotTypes.push(new PlotType("HEATMAP", "Heatmap", "heatmap", false, DataType.Other, DataType.Other));
this.plotTypes.push(new PlotType("CONTOUR", "Contour", "contour-chart", false, DataType.Time, DataType.Duration)); this.plotTypes.push(new PlotType("CONTOUR", "Contour", "contour-chart", false, DataType.Time, DataType.Duration));
@@ -80,7 +81,7 @@ export class PlotService {
const q = "("+query+") and "+splitBy+"="; const q = "("+query+") and "+splitBy+"=";
return this.autocomplete(q, q.length+1, ResultMode.FULL_VALUES).pipe( return this.autocomplete(q, q.length+1, ResultMode.FULL_VALUES).pipe(
map( map(
autocompleteResult => autocompleteResult.proposals.map(suggestion => suggestion.value) (autocompleteResult: AutocompleteResult) => autocompleteResult.proposals.map((suggestion:Suggestion) => suggestion.value)
) )
); );
} }
@@ -181,39 +182,44 @@ export class AxesTypes {
const x2 = this.getXAxisDataType(2); const x2 = this.getXAxisDataType(2);
const y2 = this.getYAxisDataType(2); const y2 = this.getYAxisDataType(2);
return "x1:"+DataType[x1]+ " y1:"+DataType[y1]+ " x2:"+DataType[x2]+ " y2:"+DataType[y2]; return (x1 ? "x1:"+DataType[x1] : "")
+ (y1 ? " y1:"+DataType[y1] : "")
+ (x2 ? " x2:"+DataType[x2] : "")
+ (y2 ? " y2:"+DataType[y2] : "");
} }
} }
export class Suggestion { export class Suggestion {
value: string; constructor(
newQuery: string; public value: string,
newCaretPosition: number; public newQuery: string,
public newCaretPosition: number){}
} }
export class AutocompleteResult{ export class AutocompleteResult{
proposals: Array<Suggestion>; constructor(public proposals: Array<Suggestion>){}
} }
export class PlotRequest { export class PlotRequest {
query : string; constructor(
height : number; public query : string,
width : number; public height : number,
thumbnailMaxWidth : number = 300; public width : number,
thumbnailMaxHeight : number = 200; public thumbnailMaxWidth : number = 300,
groupBy : Array<string>; public thumbnailMaxHeight : number = 200,
limitBy : string; public groupBy : Array<string>,
limit : number; public limitBy : string,
y1:YAxisDefinition; public limit : number,
y2:YAxisDefinition; public y1:YAxisDefinition,
dateRange : string; public y2:YAxisDefinition|undefined,
aggregates : Array<string>; public dateRange : string,
keyOutside : boolean = false; public aggregates : Array<string>,
generateThumbnail : boolean; public keyOutside : boolean = false,
intervalUnit: string; public generateThumbnail : boolean,
intervalValue: number; public intervalUnit: string,
renderBarChartTickLabels: boolean = false; public intervalValue: number,
public renderBarChartTickLabels: boolean = false){}
copy(): PlotRequest { copy(): PlotRequest {
return JSON.parse(JSON.stringify(this)); return JSON.parse(JSON.stringify(this));
@@ -221,46 +227,52 @@ export class PlotRequest {
} }
export class YAxisDefinition { export class YAxisDefinition {
axisScale : string; constructor(
rangeMin : number; public axisScale : string,
rangeMax : number; public rangeMin : number,
rangeUnit : string; public rangeMax : number,
public rangeUnit : string){}
} }
export class PlotResponse { export class PlotResponse {
imageUrl : string; constructor(
stats : PlotResponseStats; public imageUrl : string,
thumbnailUrl : string; public stats : PlotResponseStats,
public thumbnailUrl : string){}
} }
export class PlotResponseStats { export class PlotResponseStats {
maxValue : number; constructor(
values : number; public maxValue : number,
average : number ; public values : number,
plottedValues : number; public average : number,
maxAvgRatio: number; public plottedValues : number,
dataSeriesStats : Array<DataSeriesStats>; public maxAvgRatio: number,
public dataSeriesStats : Array<DataSeriesStats>){}
} }
export class DataSeriesStats { export class DataSeriesStats {
name: string; constructor(
values : number; public name: string,
maxValue : number; public values : number,
average : number; public maxValue : number,
plottedValues : number; public average : number ,
dashTypeAndColor: DashTypeAndColor; public plottedValues : number,
percentiles: Map<string, number> public dashTypeAndColor: DashTypeAndColor,
public percentiles: Map<string, number>){}
} }
export class DashTypeAndColor { export class DashTypeAndColor {
color: string; constructor(
pointType: number; public color: string,
public pointType: number) {}
} }
export class FilterDefaults { export class FilterDefaults {
groupBy: Array<string>; constructor(
fields: Array<string>; public groupBy: Array<string>,
splitBy: string; public fields: Array<string>,
public splitBy: string){}
} }
export enum ResultMode { export enum ResultMode {

View File

@@ -1,5 +1,5 @@
import { Component, OnInit, Input, ViewChild } from '@angular/core'; import { Component, OnInit, Input, ViewChild } from '@angular/core';
import {FormControl} from '@angular/forms'; import {UntypedFormControl} from '@angular/forms';
import {Observable} from 'rxjs'; import {Observable} from 'rxjs';
import {startWith, map} from 'rxjs/operators'; import {startWith, map} from 'rxjs/operators';
import {MatAutocompleteTrigger } from '@angular/material/autocomplete'; import {MatAutocompleteTrigger } from '@angular/material/autocomplete';
@@ -12,16 +12,16 @@ import { PlotService, PlotType, AutocompleteResult, Suggestion, ResultMode } fro
}) })
export class QueryAutocompleteComponent implements OnInit { export class QueryAutocompleteComponent implements OnInit {
queryField = new FormControl(''); queryField = new UntypedFormControl('');
suggestions = new FormControl(); suggestions = new UntypedFormControl();
filteredSuggestions: Observable<Suggestion[]>; filteredSuggestions!: Observable<Suggestion[]>;
query : string; query : string = "";
@ViewChild(MatAutocompleteTrigger) @ViewChild(MatAutocompleteTrigger)
autocomplete: MatAutocompleteTrigger; autocomplete!: MatAutocompleteTrigger;
constructor(private plotService: PlotService) {} constructor(private plotService: PlotService) {}
@@ -72,12 +72,12 @@ export class QueryAutocompleteComponent implements OnInit {
that.autocomplete.openPanel(); that.autocomplete.openPanel();
}, },
error => console.log(error) (error:any) => console.log(error)
); );
} }
displaySuggestion(suggestion?: Suggestion): string | undefined { displaySuggestion(suggestion?: Suggestion): string {
//console.log("suggestion: "+JSON.stringify(suggestion)); //console.log("suggestion: "+JSON.stringify(suggestion));
return suggestion ? suggestion.newQuery : undefined; return suggestion ? suggestion.newQuery : '';
} }
} }

View File

@@ -17,7 +17,7 @@ export class UtilService {
} }
} }
formatMs(valueInMs):string { formatMs(valueInMs: number):string {
const ms = Math.floor(valueInMs % 1000); const ms = Math.floor(valueInMs % 1000);
const s = Math.floor((valueInMs / 1000) % 60); const s = Math.floor((valueInMs / 1000) % 60);
const m = Math.floor((valueInMs / (60*1000)) % 60); const m = Math.floor((valueInMs / (60*1000)) % 60);

View File

@@ -31,7 +31,7 @@
</mat-form-field> </mat-form-field>
<pdb-limit-by #limitbycomponent></pdb-limit-by> <pdb-limit-by #limitbycomponent></pdb-limit-by>
<div [hidden]="!selectedPlotTypesContains('BAR')"> <div [hidden]="!selectedPlotTypesContains(['BAR', 'BOX'])">
<mat-form-field > <mat-form-field >
<mat-label>Intervals (only bar chart):</mat-label> <mat-label>Intervals (only bar chart):</mat-label>
<mat-select [(value)]="intervalUnit"> <mat-select [(value)]="intervalUnit">
@@ -46,7 +46,7 @@
</mat-select> </mat-select>
</mat-form-field> </mat-form-field>
</div> </div>
<div [hidden]="!selectedPlotTypesContains('BAR')"> <div [hidden]="!selectedPlotTypesContains(['BAR', 'BOX'])">
<mat-checkbox [(ngModel)]="renderBarChartTickLabels">Show Tic Labels (bar chart)</mat-checkbox> <mat-checkbox [(ngModel)]="renderBarChartTickLabels">Show Tic Labels (bar chart)</mat-checkbox>
</div> </div>
<pdb-y-axis-definition #y1AxisDefinitionComponent yIndex="1"></pdb-y-axis-definition> <pdb-y-axis-definition #y1AxisDefinitionComponent yIndex="1"></pdb-y-axis-definition>

View File

@@ -1,7 +1,6 @@
import { Component, OnInit, ViewChild } from '@angular/core'; import { Component, OnInit, ViewChild } from '@angular/core';
import { PlotService, PlotType, PlotRequest, PlotResponse, TagField, FilterDefaults, DataType, YAxisDefinition, AxesTypes } from '../plot.service'; import { PlotService, PlotType, PlotRequest, PlotResponse, TagField, FilterDefaults, DataType, YAxisDefinition, AxesTypes } from '../plot.service';
import { Observable } from 'rxjs/Observable'; import { UntypedFormControl, Validators } from '@angular/forms';
import { FormControl, Validators } from '@angular/forms';
import { MatSnackBar } from '@angular/material/snack-bar'; import { MatSnackBar } from '@angular/material/snack-bar';
import { LimitByComponent } from '../limit-by/limit-by.component'; import { LimitByComponent } from '../limit-by/limit-by.component';
import { YAxisDefinitionComponent } from '../y-axis-definition/y-axis-definition.component'; import { YAxisDefinitionComponent } from '../y-axis-definition/y-axis-definition.component';
@@ -19,36 +18,36 @@ export class VisualizationPageComponent implements OnInit {
readonly DATE_PATTERN = "YYYY-MM-DD HH:mm:ss"; // for moment-JS readonly DATE_PATTERN = "YYYY-MM-DD HH:mm:ss"; // for moment-JS
dateRange = new FormControl('2019-10-05 00:00:00 - 2019-10-11 23:59:59'); dateRange = new UntypedFormControl('2019-10-05 00:00:00 - 2019-10-11 23:59:59');
selectedPlotType = []; selectedPlotType = new Array<PlotType>();
plotTypes: Array<any>; plotTypes: Array<any> = [];
tagFields: Array<TagField> = new Array<TagField>(); tagFields: Array<TagField> = new Array<TagField>();
groupBy = new Array<TagField>(); groupBy = new Array<TagField>();
@ViewChild('limitbycomponent') @ViewChild('limitbycomponent')
private limitbycomponent : LimitByComponent; private limitbycomponent! : LimitByComponent;
@ViewChild('y1AxisDefinitionComponent', { read: YAxisDefinitionComponent }) @ViewChild('y1AxisDefinitionComponent', { read: YAxisDefinitionComponent })
private y1AxisDefinitionComponent : YAxisDefinitionComponent; private y1AxisDefinitionComponent! : YAxisDefinitionComponent;
@ViewChild('y2AxisDefinitionComponent', { read: YAxisDefinitionComponent }) @ViewChild('y2AxisDefinitionComponent', { read: YAxisDefinitionComponent })
private y2AxisDefinitionComponent : YAxisDefinitionComponent; private y2AxisDefinitionComponent! : YAxisDefinitionComponent;
@ViewChild('query') @ViewChild('query')
query: QueryAutocompleteComponent; query!: QueryAutocompleteComponent;
@ViewChild('plotView') @ViewChild('plotView')
plotView: PlotViewComponent; plotView!: PlotViewComponent;
@ViewChild('galleryView') @ViewChild('galleryView')
galleryView: GalleryViewComponent; galleryView!: GalleryViewComponent;
enableGallery = false; enableGallery = false;
splitBy = null; splitBy : TagField | undefined = undefined;
y2AxisAvailable = false; y2AxisAvailable = false;
intervalUnit = 'NO_INTERVAL'; intervalUnit = 'NO_INTERVAL';
@@ -58,7 +57,7 @@ export class VisualizationPageComponent implements OnInit {
constructor(private plotService: PlotService, private snackBar: MatSnackBar) { constructor(private plotService: PlotService, private snackBar: MatSnackBar) {
} }
showError(message) { showError(message:string) {
this.snackBar.open(message, "", { this.snackBar.open(message, "", {
duration: 5000, duration: 5000,
verticalPosition: 'top' verticalPosition: 'top'
@@ -70,12 +69,12 @@ export class VisualizationPageComponent implements OnInit {
this.plotTypes = this.plotService.getPlotTypes(); this.plotTypes = this.plotService.getPlotTypes();
this.selectedPlotType.push(this.plotTypes[0]); this.selectedPlotType.push(this.plotTypes[0]);
that.plotService.getFilterDefaults().subscribe(function(filterDefaults) { that.plotService.getFilterDefaults().subscribe(function(filterDefaults: FilterDefaults) {
filterDefaults.fields.forEach(function(name) { filterDefaults.fields.forEach(function(name:string) {
that.tagFields.push(new TagField(name)); that.tagFields.push(new TagField(name));
}, },
error => { (error: any) => {
that.showError(error.error.message); that.showError(error.error.message);
}); });
@@ -93,8 +92,8 @@ export class VisualizationPageComponent implements OnInit {
this.y2AxisAvailable = axesTypes.y.length == 2; this.y2AxisAvailable = axesTypes.y.length == 2;
} }
selectedPlotTypesContains(plotTypeId: string){ selectedPlotTypesContains(plotTypeIds: Array<string>){
return this.selectedPlotType.filter(pt => pt.id == plotTypeId).length > 0; return this.selectedPlotType.filter(pt => plotTypeIds.includes(pt.id)).length > 0;
} }
@@ -103,12 +102,16 @@ export class VisualizationPageComponent implements OnInit {
} }
gallery(){ gallery(){
if (this.splitBy != null){
const that = this; const that = this;
this.plotView.imageUrl = ''; this.plotView.imageUrl = '';
that.plotView.stats = null; that.plotView.stats = null;
that.galleryView.show=true; that.galleryView.show=true;
const request = this.createPlotRequest(); const request = this.createPlotRequest();
this.galleryView.renderGallery(request, this.splitBy.name); this.galleryView.renderGallery(request, this.splitBy.name);
} else {
console.error("variable splitBy was null when rendering gallery");
}
} }
getAxes() : AxesTypes { getAxes() : AxesTypes {
@@ -141,43 +144,47 @@ export class VisualizationPageComponent implements OnInit {
const request = this.createPlotRequest(); const request = this.createPlotRequest();
this.plotService.sendPlotRequest(request).subscribe(function(plotResponse){ this.plotService.sendPlotRequest(request).subscribe({
console.log("response: " + JSON.stringify(plotResponse)); next: (plotResponse: PlotResponse) => {
that.plotView.imageUrl = "http://"+window.location.hostname+':'+window.location.port+'/'+plotResponse.imageUrl; this.plotView.imageUrl = "http://"+window.location.hostname+':'+window.location.port+'/'+plotResponse.imageUrl;
that.plotView.stats = plotResponse.stats; this.plotView.stats = plotResponse.stats;
document.dispatchEvent(new Event("invadersPause", {})); document.dispatchEvent(new Event("invadersPause", {}));
}, },
error => { error: (error:any) => {
that.plotView.imageUrl = ''; this.plotView.imageUrl = '';
that.plotView.stats = null; this.plotView.stats = null;
that.showError(error.error.message); this.showError(error.error.message);
document.dispatchEvent(new Event("invadersPause", {})); document.dispatchEvent(new Event("invadersPause", {}));
}
}); });
} }
createPlotRequest(): PlotRequest { createPlotRequest(): PlotRequest {
const aggregates = []; const aggregates = new Array<string>();
this.selectedPlotType.forEach(a => aggregates.push(a.id)); this.selectedPlotType.forEach(a => aggregates.push(a.id));
const y1 = this.y1AxisDefinitionComponent.getAxisDefinition(); const y1 = this.y1AxisDefinitionComponent.getAxisDefinition();
const y2 = this.y2AxisDefinitionComponent ? this.y2AxisDefinitionComponent.getAxisDefinition() : undefined; const y2 = this.y2AxisDefinitionComponent ? this.y2AxisDefinitionComponent.getAxisDefinition() : undefined;
const results = document.getElementById("results");
const request = new PlotRequest(); const request = new PlotRequest(
request.query = this.query.query; this.query.query,
request.height = document.getElementById("results").offsetHeight-1; results != null ? results.offsetHeight-1: 1024,
request.width = document.getElementById("results").offsetWidth-1; results != null ? results.offsetWidth-1 : 1024,
request.groupBy = this.groupBy.map(o => o.name); 300, // thumbnailMaxWidth
request.limitBy = this.limitbycomponent.limitBy; 200, // thumbnailMaxHeight
request.limit = this.limitbycomponent.limit; this.groupBy.map(o => o.name),
request.y1 = y1; this.limitbycomponent.limitBy,
request.y2 = y2; this.limitbycomponent.limit,
request.dateRange = this.dateRangeAsString(); y1,
request.aggregates = aggregates; y2,
request.keyOutside = false; this.dateRangeAsString(), // dateRange
request.generateThumbnail = this.enableGallery; aggregates, // aggregates
request.intervalUnit = this.intervalUnit; false, // keyOutside
request.intervalValue = this.intervalValue; this.enableGallery, // generateThumbnail
request.renderBarChartTickLabels = this.renderBarChartTickLabels; this.intervalUnit,
this.intervalValue,
this.renderBarChartTickLabels);
return request; return request;
} }
@@ -222,7 +229,6 @@ export class VisualizationPageComponent implements OnInit {
} }
parseDateRange(dateRangeAsString : string) : DateRange { parseDateRange(dateRangeAsString : string) : DateRange {
if (dateRangeAsString) {
const startDate = moment(dateRangeAsString.slice(0, 19)); const startDate = moment(dateRangeAsString.slice(0, 19));
const endDate = moment(dateRangeAsString.slice(22, 41)); const endDate = moment(dateRangeAsString.slice(22, 41));
@@ -232,7 +238,6 @@ export class VisualizationPageComponent implements OnInit {
duration: moment.duration(endDate.diff(startDate)) duration: moment.duration(endDate.diff(startDate))
}; };
} }
}
setDateRange(startDate: any, endDate: any) { setDateRange(startDate: any, endDate: any) {
const formattedStartDate = startDate.format(this.DATE_PATTERN); const formattedStartDate = startDate.format(this.DATE_PATTERN);
@@ -260,9 +265,11 @@ export class DateRange {
duration: any; duration: any;
} }
/*
export class AxesUsed { export class AxesUsed {
x1: DataType; x1: DataType;
y1: DataType; y1: DataType;
x2: DataType; x2: DataType;
y2: DataType; y2: DataType;
} }
*/

View File

@@ -20,11 +20,6 @@ export class YAxisDefinitionComponent {
} }
getAxisDefinition() { getAxisDefinition() {
const result = new YAxisDefinition(); return new YAxisDefinition(this.yAxisScale,this.minYValue,this.maxYValue,this.yAxisUnit);
result.axisScale = this.yAxisScale;
result.rangeMin = this.minYValue;
result.rangeMax = this.maxYValue;
result.rangeUnit = this.yAxisUnit;
return result;
} }
} }

View File

@@ -0,0 +1,25 @@
<svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 32 32">
<g transform="translate(0.5,0.5)">
<rect x="5" y="8" width="8" height="15" style="fill: none; stroke: black;stroke-width:2;" />
<line x1="6" y1="3" x2="12" y2="3" style="stroke:black;stroke-width:2;"/>
<line x1="9" y1="8" x2="9" y2="3" style="stroke:black;stroke-width:2;"/>
<line x1="5" y1="15" x2="13" y2="15" style="stroke:black;stroke-width:2;"/>
<line x1="9" y1="23" x2="9" y2="28" style="stroke:black;stroke-width:2;"/>
<line x1="6" y1="28" x2="12" y2="28" style="stroke:black;stroke-width:2;"/>
<rect x="18" y="6" width="8" height="13" style="fill: none; stroke: black; stroke-width:2;" />
<line x1="19" y1="2" x2="25" y2="2" style="stroke:black;stroke-width:2;"/>
<line x1="22" y1="6" x2="22" y2="2" style="stroke:black;stroke-width:2;"/>
<line x1="18" y1="13" x2="26" y2="13" style="stroke:black;stroke-width:2;"/>
<line x1="22" y1="19" x2="22" y2="26" style="stroke:black;stroke-width:2;"/>
<line x1="19" y1="26" x2="25" y2="26" style="stroke:black;stroke-width:2;"/>
<path d="M1,0
L1,30
L32,30"
style="stroke:black; stroke-width: 3px; fill:none;"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -1,24 +0,0 @@
@use '~@angular/material' as mat;
// Plus imports for other components in your app.
// Include the common styles for Angular Material. We include this here so that you only
// have to load a single css file for Angular Material in your app.
// Be sure that you only ever include this mixin once!
@include mat.core();
// Define the palettes for your theme using the Material Design palettes available in palette.scss
// (imported above). For each palette, you can optionally specify a default, lighter, and darker
// hue. Available color palettes: https://material.io/design/color/
$candy-app-primary: mat.define-palette(mat.$blue-palette);
$candy-app-accent: mat.define-palette(mat.$blue-palette, A200, A100, A400);
// The warn palette is optional (defaults to red).
$candy-app-warn: mat.define-palette(mat.$red-palette);
// Create the theme object (a Sass map containing all of the palettes).
$candy-app-theme: mat.define-light-theme($candy-app-primary, $candy-app-accent, $candy-app-warn);
// Include theme styles for core and each component used in your app.
// Alternatively, you can import and @include the theme mixins for each component
// that you are using.
@include mat.all-component-themes($candy-app-theme);

View File

@@ -8,8 +8,8 @@
* file. * file.
* *
* The current setup is for so-called "evergreen" browsers; the last versions of browsers that * The current setup is for so-called "evergreen" browsers; the last versions of browsers that
* automatically update themselves. This includes Safari >= 10, Chrome >= 55 (including Opera), * automatically update themselves. This includes recent versions of Safari, Chrome (including
* Edge >= 13 on the desktop, and iOS 10 and Chrome on mobile. * Opera), Edge on the desktop, and iOS and Chrome on mobile.
* *
* Learn more in https://angular.io/guide/browser-support * Learn more in https://angular.io/guide/browser-support
*/ */
@@ -18,16 +18,6 @@
* BROWSER POLYFILLS * BROWSER POLYFILLS
*/ */
/** IE10 and IE11 requires the following for NgClass support on SVG elements */
// import 'classlist.js'; // Run `npm install --save classlist.js`.
/**
* Web Animations `@angular/platform-browser/animations`
* Only required if AnimationBuilder is used within the application and using IE/Edge or Safari.
* Standard animation support in Angular DOES NOT require any polyfills (as of Angular 6.0).
*/
// import 'web-animations-js'; // Run `npm install --save web-animations-js`.
/** /**
* By default, zone.js will patch all possible macroTask and DomEvents * By default, zone.js will patch all possible macroTask and DomEvents
* user can disable parts of macroTask/DomEvents patch by setting following flags * user can disable parts of macroTask/DomEvents patch by setting following flags
@@ -35,7 +25,7 @@
* will put import in the top of bundle, so user need to create a separate file * will put import in the top of bundle, so user need to create a separate file
* in this directory (for example: zone-flags.ts), and put the following flags * in this directory (for example: zone-flags.ts), and put the following flags
* into that file, and then add the following code before importing zone.js. * into that file, and then add the following code before importing zone.js.
* import './zone-flags.ts'; * import './zone-flags';
* *
* The flags allowed in zone-flags.ts are listed here. * The flags allowed in zone-flags.ts are listed here.
* *

View File

@@ -1,5 +1,39 @@
/* You can add global styles to this file, and also import other style files */ // Custom Theming for Angular Material
@use '~@angular/material' as mat; // For more information: https://material.angular.io/guide/theming
@use '@angular/material' as mat;
// Plus imports for other components in your app.
// Include the common styles for Angular Material. We include this here so that you only
// have to load a single css file for Angular Material in your app.
// Be sure that you only ever include this mixin once!
@include mat.core();
// Define the palettes for your theme using the Material Design palettes available in palette.scss
// (imported above). For each palette, you can optionally specify a default, lighter, and darker
// hue. Available color palettes: https://material.io/design/color/
$candy-app-primary: mat.define-palette(mat.$blue-palette);
$candy-app-accent: mat.define-palette(mat.$blue-palette, A200, A100, A400);
// The warn palette is optional (defaults to red).
$candy-app-warn: mat.define-palette(mat.$red-palette);
// Create the theme object. A theme consists of configurations for individual
// theming systems such as "color" or "typography".
$candy-app-theme: mat.define-light-theme((
color: (
primary: $candy-app-primary,
accent: $candy-app-accent,
warn: $candy-app-warn,
)
));
// Include theme styles for core and each component used in your app.
// Alternatively, you can import and @include the theme mixins for each component
// that you are using.
@include mat.all-component-themes($candy-app-theme);
/* /*
blue blue
#CBD7F4 #CBD7F4
@@ -20,11 +54,6 @@ grey
*/ */
$background-color: #CBD7F4; $background-color: #CBD7F4;
//@import '~@angular/material/prebuilt-themes/deeppurple-amber.css';
@import 'custom-theme.scss';
*, body { *, body {
font-family: Arial; font-family: Arial;
font-size: 14px; font-size: 14px;

View File

@@ -7,13 +7,19 @@ import {
platformBrowserDynamicTesting platformBrowserDynamicTesting
} from '@angular/platform-browser-dynamic/testing'; } from '@angular/platform-browser-dynamic/testing';
declare const require: any; declare const require: {
context(path: string, deep?: boolean, filter?: RegExp): {
<T>(id: string): T;
keys(): string[];
};
};
// First, initialize the Angular testing environment. // First, initialize the Angular testing environment.
getTestBed().initTestEnvironment( getTestBed().initTestEnvironment(
BrowserDynamicTestingModule, BrowserDynamicTestingModule,
platformBrowserDynamicTesting() platformBrowserDynamicTesting(),
); );
// Then we find all the tests. // Then we find all the tests.
const context = require.context('./', true, /\.spec\.ts$/); const context = require.context('./', true, /\.spec\.ts$/);
// And load the modules. // And load the modules.

View File

@@ -1,5 +1,6 @@
/* To learn more about this file see: https://angular.io/config/tsconfig. */
{ {
"extends": "./tsconfig.base.json", "extends": "./tsconfig.json",
"compilerOptions": { "compilerOptions": {
"outDir": "./out-tsc/app", "outDir": "./out-tsc/app",
"types": [] "types": []
@@ -10,9 +11,5 @@
], ],
"include": [ "include": [
"src/**/*.d.ts" "src/**/*.d.ts"
],
"exclude": [
"src/test.ts",
"src/**/*.spec.ts"
] ]
} }

View File

@@ -1,26 +0,0 @@
{
"compileOnSave": false,
"compilerOptions": {
"baseUrl": "./",
"outDir": "./dist/out-tsc",
"sourceMap": true,
"declaration": false,
"downlevelIteration": true,
"experimentalDecorators": true,
"module": "es2020",
"moduleResolution": "node",
"importHelpers": true,
"target": "es2015",
"typeRoots": [
"node_modules/@types"
],
"lib": [
"es2018",
"dom"
]
},
"angularCompilerOptions": {
"fullTemplateTypeCheck": true,
"strictInjectionParameters": true
}
}

View File

@@ -1,17 +1,32 @@
/* /* To learn more about this file see: https://angular.io/config/tsconfig. */
This is a "Solution Style" tsconfig.json file, and is used by editors and TypeScripts language server to improve development experience.
It is not intended to be used to perform a compilation.
To learn more about this file see: https://angular.io/config/solution-tsconfig.
*/
{ {
"files": [], "compileOnSave": false,
"references": [ "compilerOptions": {
{ "baseUrl": "./",
"path": "./tsconfig.app.json" "outDir": "./dist/out-tsc",
}, "forceConsistentCasingInFileNames": true,
{ "strict": true,
"path": "./tsconfig.spec.json" "noImplicitOverride": true,
} "noPropertyAccessFromIndexSignature": true,
"noImplicitReturns": true,
"noFallthroughCasesInSwitch": true,
"sourceMap": true,
"declaration": false,
"downlevelIteration": true,
"experimentalDecorators": true,
"moduleResolution": "node",
"importHelpers": true,
"target": "es2020",
"module": "es2020",
"lib": [
"es2020",
"dom"
] ]
},
"angularCompilerOptions": {
"enableI18nLegacyMessageIdFormat": false,
"strictInjectionParameters": true,
"strictInputAccessModifiers": true,
"strictTemplates": true
}
} }

View File

@@ -1,10 +1,10 @@
/* To learn more about this file see: https://angular.io/config/tsconfig. */
{ {
"extends": "./tsconfig.base.json", "extends": "./tsconfig.json",
"compilerOptions": { "compilerOptions": {
"outDir": "./out-tsc/spec", "outDir": "./out-tsc/spec",
"types": [ "types": [
"jasmine", "jasmine"
"node"
] ]
}, },
"files": [ "files": [

View File

@@ -1,155 +0,0 @@
{
"extends": "tslint:recommended",
"rules": {
"align": {
"options": [
"parameters",
"statements"
]
},
"array-type": false,
"arrow-parens": false,
"arrow-return-shorthand": true,
"curly": true,
"deprecation": {
"severity": "warning"
},
"component-class-suffix": true,
"contextual-lifecycle": true,
"directive-class-suffix": true,
"directive-selector": [
true,
"attribute",
"app",
"camelCase"
],
"component-selector": [
true,
"element",
"app",
"kebab-case"
],
"eofline": true,
"import-blacklist": [
true,
"rxjs/Rx"
],
"import-spacing": true,
"indent": {
"options": [
"spaces"
]
},
"interface-name": false,
"max-classes-per-file": false,
"max-line-length": [
true,
140
],
"member-access": false,
"member-ordering": [
true,
{
"order": [
"static-field",
"instance-field",
"static-method",
"instance-method"
]
}
],
"no-consecutive-blank-lines": false,
"no-console": [
true,
"debug",
"info",
"time",
"timeEnd",
"trace"
],
"no-empty": false,
"no-inferrable-types": [
true,
"ignore-params"
],
"no-non-null-assertion": true,
"no-redundant-jsdoc": true,
"no-switch-case-fall-through": true,
"no-var-requires": false,
"object-literal-key-quotes": [
true,
"as-needed"
],
"object-literal-sort-keys": false,
"ordered-imports": false,
"quotemark": [
true,
"single"
],
"semicolon": {
"options": [
"always"
]
},
"space-before-function-paren": {
"options": {
"anonymous": "never",
"asyncArrow": "always",
"constructor": "never",
"method": "never",
"named": "never"
}
},
"trailing-comma": false,
"no-conflicting-lifecycle": true,
"no-host-metadata-property": true,
"no-input-rename": true,
"no-inputs-metadata-property": true,
"no-output-native": true,
"no-output-on-prefix": true,
"no-output-rename": true,
"no-outputs-metadata-property": true,
"template-banana-in-box": true,
"template-no-negated-async": true,
"typedef-whitespace": {
"options": [
{
"call-signature": "nospace",
"index-signature": "nospace",
"parameter": "nospace",
"property-declaration": "nospace",
"variable-declaration": "nospace"
},
{
"call-signature": "onespace",
"index-signature": "onespace",
"parameter": "onespace",
"property-declaration": "onespace",
"variable-declaration": "onespace"
}
]
},
"use-lifecycle-interface": true,
"use-pipe-transform-interface": true
, "variable-name": {
"options": [
"ban-keywords",
"check-format",
"allow-pascal-case"
]
},
"whitespace": {
"options": [
"check-branch",
"check-decl",
"check-operator",
"check-separator",
"check-type",
"check-typecast"
]
}
},
"rulesDirectory": [
"codelyzer"
]
}

View File

@@ -18,7 +18,9 @@ public enum Aggregate {
*/ */
CUM_DISTRIBUTION("Cumulative Distribution"), CUM_DISTRIBUTION("Cumulative Distribution"),
HISTOGRAM("Histogram"); HISTOGRAM("Histogram"),
BOX("Box");
private final String axisLabel; private final String axisLabel;

View File

@@ -30,7 +30,7 @@ public class BarChartAggregatorForIntervals implements CustomAggregator, Indexed
public BarChartAggregatorForIntervals(final PlotSettings settings) { public BarChartAggregatorForIntervals(final PlotSettings settings) {
this.settings = settings; this.settings = settings;
this.interval = settings.getInterval().get(); this.interval = settings.getInterval().get();
buckets = interval.getBuckets(); buckets = interval.getBuckets(AtomicLong::new);
} }
@Override @Override

View File

@@ -0,0 +1,76 @@
package org.lucares.pdb.plot.api;
import java.util.Locale;
import java.util.UUID;
import org.lucares.collections.LongObjHashMap;
import org.lucares.recommind.logs.GnuplotAxis;
public class BoxAggregator implements CustomAggregator {
private final String dataName = "$data" + UUID.randomUUID().toString().replace("-", "");
private final Interval interval;
private final LongObjHashMap<PercentilesAggregator> buckets;
public BoxAggregator(final PlotSettings settings) {
this.interval = settings.getInterval().get();
this.buckets = interval.getMiddleTimeBuckets(PercentilesAggregator::new);
}
@Override
public void addValue(final long epochMilli, final long value) {
final long bucketId = interval.toBucketMiddleTime(epochMilli);
final PercentilesAggregator bucket = buckets.get(bucketId);
bucket.addValue(epochMilli, value);
}
@Override
public AggregatedData getAggregatedData() {
// not needed - usually this method is used to write the data to file, but bar
// charts use inline data
return null;
}
@Override
public Aggregate getType() {
return Aggregate.BOX;
}
public Object getDataName() {
return dataName;
}
public Interval getInterval() {
return interval;
}
public String asCsv(final boolean renderLabels) {
final StringBuilder csv = new StringBuilder();
buckets.forEachOrdered((final long bucketId, final PercentilesAggregator percentilesAggregator) -> {
final Percentiles percentiles = percentilesAggregator.getPercentiles();
if (percentiles.get("25.000") != null) {
csv.append(String.format(Locale.US, "%d,%d,%d,%d,%d,%d", //
bucketId / 1000, //
percentiles.get("0.000"), //
percentiles.get("25.000"), //
percentiles.get("50.000"), //
percentiles.get("75.000"), //
percentiles.get("100.000")//
));
csv.append("\n");
}
});
return csv.toString();
}
public String renderLabels(final GnuplotAxis xAxis) {
final StringBuilder result = new StringBuilder();
return result.toString();
}
}

View File

@@ -0,0 +1,99 @@
package org.lucares.pdb.plot.api;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Optional;
import org.lucares.recommind.logs.AxisSettings;
import org.lucares.recommind.logs.AxisTime;
import org.lucares.recommind.logs.DataSeries;
import org.lucares.recommind.logs.GnuplotAxis;
import org.lucares.recommind.logs.GnuplotLineType;
import org.lucares.recommind.logs.GnuplotSettings;
import org.lucares.recommind.logs.LineStyle;
import org.lucares.recommind.logs.Type;
public class BoxChartHandler extends AggregateHandler {
@Override
Type getAxisType(final GnuplotAxis axis) {
switch (axis) {
case X1:
case X2:
return Type.Time;
case Y1:
case Y2:
return Type.Duration;
default:
throw new IllegalArgumentException("Unexpected value: " + axis);
}
}
@Override
Aggregate getAggregateType() {
return Aggregate.BOX;
}
@Override
AxisSettings createXAxisSettings(final GnuplotSettings settings, final Collection<DataSeries> dataSeries) {
final AxisSettings result = AxisTime.createXAxis(settings);
result.setAxis(getxAxis());
result.setShowGrid(getxAxis() == GnuplotAxis.X1);
return result;
}
@Override
String beforePlot(final CustomAggregator aggregator, final GnuplotSettings settings) {
final StringBuilder result = new StringBuilder();
final BoxAggregator boxAggregator = (BoxAggregator) aggregator;
appendfln(result, "%s <<EOD", boxAggregator.getDataName());
appendln(result, boxAggregator.asCsv(settings.isRenderLabels()));
appendln(result, "EOD");
if (settings.isRenderLabels() && settings.isRenderBarChartTickLabels()) {
appendfln(result, boxAggregator.renderLabels(getxAxis()));
}
return result.toString();
}
@Override
String addPlot(final CustomAggregator aggregator, final LineStyle lineStyle, final Optional<String> title) {
final BoxAggregator boxAggregator = (BoxAggregator) aggregator;
final String candlestick = formatln(
"'%s' using 1:3:2:6:5:(%.1f) %s axes %s with %s whiskerbars 0.5 fs empty %s linewidth 1, \\", //
boxAggregator.getDataName(), //
width(boxAggregator.getInterval().getIntervalTimeUnit()), //
gnuplotTitle(title), //
gnuplotXYAxis(), //
GnuplotLineType.BOX, //
lineStyle.asGnuplotLineStyle()//
);
final String median = formatln(
"'%s' using 1:4:4:4:4:(%.1f) axes %s with candlesticks notitle fs empty %s linewidth 2, \\", //
boxAggregator.getDataName(), //
width(boxAggregator.getInterval().getIntervalTimeUnit()), //
gnuplotXYAxis(), //
lineStyle.asGnuplotLineStyle());
return candlestick + median;
}
private double width(final IntervalTimeUnit intervalTimeUnit) {
return intervalTimeUnit.toMillis() / 1000;
}
@Override
CustomAggregator createCustomAggregator(final Path tmpDir, final PlotSettings plotSettings,
final long fromEpochMilli, final long toEpochMilli) {
if (plotSettings.getInterval().isPresent()) {
return new BoxAggregator(plotSettings);
} else {
return null;
}
}
}

View File

@@ -8,90 +8,27 @@ import java.io.OutputStreamWriter;
import java.io.Writer; import java.io.Writer;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Locale;
import org.lucares.collections.LongLongConsumer;
import org.lucares.collections.LongLongHashMap;
import org.lucares.pdb.api.RuntimeIOException; import org.lucares.pdb.api.RuntimeIOException;
public class CumulativeDistributionCustomAggregator implements CustomAggregator { public class CumulativeDistributionCustomAggregator implements CustomAggregator {
private final static int POINTS = 500;
private static final class ToPercentiles implements LongLongConsumer {
private long cumulativeCount = 0;
private long maxValue = 0;
private final Percentiles percentiles = new Percentiles(POINTS);
private final double stepSize;
private double lastPercentile;
private double nextPercentile;
private final long totalValues;
public ToPercentiles(final long totalValues) {
this.totalValues = totalValues;
stepSize = 100.0 / POINTS;
nextPercentile = stepSize;
}
@Override
public void accept(final long duration, final long count) {
maxValue = duration;
cumulativeCount += count;
final double newPercentile = cumulativeCount * 100.0 / totalValues;
if (newPercentile >= nextPercentile) {
double currentPercentile = lastPercentile + stepSize;
while (currentPercentile <= newPercentile) {
final String percentile = String.format(Locale.US, "%.3f", currentPercentile);
percentiles.put(percentile, duration);
currentPercentile += stepSize;
}
nextPercentile = currentPercentile;
lastPercentile = currentPercentile - stepSize;
}
}
public Percentiles getPercentiles() {
return percentiles;
}
public void collect(final LongLongHashMap map) {
map.forEachOrdered(this);
percentiles.put("100.000", maxValue);
}
}
// the rather large initial capacity should prevent too many grow&re-hash phases
private final LongLongHashMap map = new LongLongHashMap(5_000, 0.75);
private long totalValues = 0;
private final Path tmpDir; private final Path tmpDir;
private final PercentilesAggregator percentilesAggregator;
public CumulativeDistributionCustomAggregator(final Path tmpDir) { public CumulativeDistributionCustomAggregator(final Path tmpDir) {
this.tmpDir = tmpDir; this.tmpDir = tmpDir;
percentilesAggregator = new PercentilesAggregator();
} }
@Override @Override
public void addValue(final long epochMilli, final long value) { public void addValue(final long epochMilli, final long value) {
map.compute(value, 0, (__, l) -> l + 1); percentilesAggregator.addValue(epochMilli, value);
totalValues++;
} }
public Percentiles getPercentiles() { public Percentiles getPercentiles() {
final ToPercentiles toPercentiles = new ToPercentiles(totalValues); return percentilesAggregator.getPercentiles();
toPercentiles.collect(map);
final Percentiles result = toPercentiles.getPercentiles();
return result;
} }
@Override @Override
@@ -100,17 +37,14 @@ public class CumulativeDistributionCustomAggregator implements CustomAggregator
final char separator = ','; final char separator = ',';
final char newline = '\n'; final char newline = '\n';
final ToPercentiles toPercentiles = new ToPercentiles(totalValues);
toPercentiles.collect(map);
final File dataFile = File.createTempFile("data", ".dat", tmpDir.toFile()); final File dataFile = File.createTempFile("data", ".dat", tmpDir.toFile());
try (final Writer output = new BufferedWriter( try (final Writer output = new BufferedWriter(
new OutputStreamWriter(new FileOutputStream(dataFile), StandardCharsets.US_ASCII));) { new OutputStreamWriter(new FileOutputStream(dataFile), StandardCharsets.US_ASCII));) {
final StringBuilder data = new StringBuilder(); final StringBuilder data = new StringBuilder();
if (map.size() > 0) { if (percentilesAggregator.hasValues()) {
// compute the percentiles // compute the percentiles
toPercentiles.getPercentiles().forEach((percentile, value) -> { percentilesAggregator.getPercentiles().forEach((percentile, value) -> {
data.append(percentile); data.append(percentile);
data.append(separator); data.append(separator);

View File

@@ -3,8 +3,9 @@ package org.lucares.pdb.plot.api;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.atomic.AtomicLong; import java.util.function.Supplier;
import org.lucares.collections.LongObjHashMap;
import org.lucares.pdb.api.DateTimeRange; import org.lucares.pdb.api.DateTimeRange;
import org.lucares.pdb.datastore.internal.LongToDateBucket; import org.lucares.pdb.datastore.internal.LongToDateBucket;
@@ -51,6 +52,30 @@ public class Interval {
return bucketer.toPartitionId(epochMilli); return bucketer.toPartitionId(epochMilli);
} }
public long toBucketMiddleTime(final long epochMilli) {
switch (intervalTimeUnit) {
case SECOND:
return epochMilli - epochMilli % 1000 + 500;
case MINUTE:
return epochMilli - epochMilli % 60000 + 30000;
case HOUR:
return epochMilli - epochMilli % 3600000 + 1800000;
case DAY:
return epochMilli - epochMilli % 86400000 + 43200000;
case WEEK:
return epochMilli - epochMilli % (7 * 24 * 3600 * 1000) + 7 * 24 * 3600 * 500; // use week based year!
// Otherwise intervals over
// the year boundary will be
// wrong
case MONTH:
return epochMilli - epochMilli % (30 * 24 * 3600 * 1000L) + 30 * 24 * 3600 * 500L;
case YEAR:
return epochMilli - epochMilli % (365 * 24 * 3600 * 1000L) + 365 * 24 * 3600 * 500L;
default:
throw new IllegalArgumentException("Unexpected value: " + intervalTimeUnit);
}
}
public IntervalTimeUnit getIntervalTimeUnit() { public IntervalTimeUnit getIntervalTimeUnit() {
return intervalTimeUnit; return intervalTimeUnit;
} }
@@ -72,13 +97,30 @@ public class Interval {
return null; return null;
} }
public Map<String, AtomicLong> getBuckets() { public <T> Map<String, T> getBuckets(final Supplier<T> initialValueSupplier) {
final Map<String, AtomicLong> result = new HashMap<>(); final Map<String, T> result = new HashMap<>();
final List<String> bucketIds = bucketer.toPartitionIds(dateTimeRange.getStart(), dateTimeRange.getEnd(), final List<String> bucketIds = bucketer.toPartitionIds(dateTimeRange.getStart(), dateTimeRange.getEnd(),
intervalTimeUnit.toChronoUnit()); intervalTimeUnit.toChronoUnit());
for (final String bucketId : bucketIds) { for (final String bucketId : bucketIds) {
result.put(bucketId, new AtomicLong(0)); result.put(bucketId, initialValueSupplier.get());
}
return result;
}
public <T> LongObjHashMap<T> getMiddleTimeBuckets(final Supplier<T> initialValueSupplier) {
final LongObjHashMap<T> result = new LongObjHashMap<>();
long current = dateTimeRange.getStart().toInstant().toEpochMilli();
final long end = dateTimeRange.getEnd().toInstant().toEpochMilli() + intervalTimeUnit.toMillis();
while (current <= end) {
final long id = toBucketMiddleTime(current);
System.out.println("add bucket: " + id);
result.put(id, initialValueSupplier.get());
current += intervalTimeUnit.toMillis();
} }
return result; return result;

View File

@@ -34,4 +34,25 @@ public enum IntervalTimeUnit {
throw new IllegalArgumentException("Unexpected value: " + this); throw new IllegalArgumentException("Unexpected value: " + this);
} }
} }
public long toMillis() {
switch (this) {
case SECOND:
return 1000;
case MINUTE:
return 60 * 1000;
case HOUR:
return 3600 * 1000;
case DAY:
return 24 * 3600 * 1000;
case WEEK:
return 7 * 24 * 3600 * 1000;
case MONTH:
return 30 * 24 * 3600 * 1000L;
case YEAR:
return 365 * 24 * 3600 * 1000L;
default:
throw new IllegalArgumentException("Unexpected value: " + this);
}
}
} }

View File

@@ -0,0 +1,91 @@
package org.lucares.pdb.plot.api;
import java.util.Locale;
import org.lucares.collections.LongLongConsumer;
import org.lucares.collections.LongLongHashMap;
public class PercentilesAggregator {
private final static int POINTS = 500;
private static final class ToPercentiles implements LongLongConsumer {
private long cumulativeCount = 0;
private long minValue = Long.MAX_VALUE;
private long maxValue = 0;
private final Percentiles percentiles = new Percentiles(POINTS);
private final double stepSize;
private double lastPercentile;
private double nextPercentile;
private final long totalValues;
public ToPercentiles(final long totalValues) {
this.totalValues = totalValues;
stepSize = 100.0 / POINTS;
nextPercentile = 0;
}
@Override
public void accept(final long duration, final long count) {
minValue = Math.min(minValue, duration);
maxValue = duration;
cumulativeCount += count;
final double newPercentile = cumulativeCount * 100.0 / totalValues;
if (newPercentile >= nextPercentile) {
double currentPercentile = lastPercentile + stepSize;
while (currentPercentile <= newPercentile) {
final String percentile = String.format(Locale.US, "%.3f", currentPercentile);
percentiles.put(percentile, duration);
currentPercentile += stepSize;
}
nextPercentile = currentPercentile;
lastPercentile = currentPercentile - stepSize;
}
}
public Percentiles getPercentiles() {
return percentiles;
}
public void collect(final LongLongHashMap map) {
percentiles.put("0.000", 0L); // make sure "0.000" is the first element in the sorted percentiles. Will be
// overwritten with the correct value later
map.forEachOrdered(this);
percentiles.put("0.000", minValue);
percentiles.put("100.000", maxValue);
}
}
// the rather large initial capacity should prevent too many grow&re-hash phases
private final LongLongHashMap map = new LongLongHashMap(5_000, 0.75);
private long totalValues = 0;
public PercentilesAggregator() {
}
public void addValue(final long epochMilli, final long value) {
map.compute(value, 0, (__, l) -> l + 1);
totalValues++;
}
public Percentiles getPercentiles() {
final ToPercentiles toPercentiles = new ToPercentiles(totalValues);
toPercentiles.collect(map);
final Percentiles result = toPercentiles.getPercentiles();
return result;
}
public boolean hasValues() {
return map.size() > 0;
}
}

View File

@@ -66,6 +66,7 @@ public class GnuplotFileGenerator implements Appender {
// appendfln(result, "set xrange [-1:1]"); // appendfln(result, "set xrange [-1:1]");
appendfln(result, "set boxwidth 0.5"); appendfln(result, "set boxwidth 0.5");
// appendfln(result, "set boxwidth 3600");
appendfln(result, "set style fill transparent solid 0.5"); appendfln(result, "set style fill transparent solid 0.5");
@@ -75,7 +76,7 @@ public class GnuplotFileGenerator implements Appender {
// render images when there are not data points on it. // render images when there are not data points on it.
appendf(result, "-1 with lines notitle"); appendf(result, "-1 with lines notitle");
LOGGER.debug("{}", result); LOGGER.info("{}", result);
return result.toString(); return result.toString();
} }

View File

@@ -5,6 +5,8 @@ public enum GnuplotLineType {
Bar("boxes"), Bar("boxes"),
BOX("candlesticks"),
Points("points"); Points("points");
private String gnuplotLineType; private String gnuplotLineType;

View File

@@ -13,13 +13,7 @@ public class LineStyle {
} }
private String asGnuplotLineStyle(final String colorHex) { private String asGnuplotLineStyle(final String colorHex) {
// TODO revert return String.format("linetype rgb \"#%s\" ", //
// return String.format("lt rgb \"#%s\" dt %s ", //
// colorHex, //
// dashType.toGnuplotDashType()//
// );
return String.format("lt rgb \"#%s\" ", //
colorHex// colorHex//
); );
} }

View File

@@ -195,7 +195,7 @@ public class Plotter {
METRICS_LOGGER.debug("wrote {} values to csv in: {}ms (ignored {} values) use millis: {}, grouping={}", METRICS_LOGGER.debug("wrote {} values to csv in: {}ms (ignored {} values) use millis: {}, grouping={}",
plottedValues, (System.nanoTime() - start) / 1_000_000.0, ignoredValues, Boolean.toString(useMillis), plottedValues, (System.nanoTime() - start) / 1_000_000.0, ignoredValues, Boolean.toString(useMillis),
groupResult.getGroupedBy().asString()); Tags.STRING_COMPRESSOR.asString(groupResult.getGroupedBy()));
return new CsvSummary(count, statsMaxValue, statsCurrentAverage, aggregator); return new CsvSummary(count, statsMaxValue, statsCurrentAverage, aggregator);
} }
@@ -208,7 +208,7 @@ public class Plotter {
static String title(final Tags tags, final CsvSummary csvSummary) { static String title(final Tags tags, final CsvSummary csvSummary) {
// TODO title must be computed by the AggregateHandler, because it is the only // TODO title must be computed by the AggregateHandler, because it is the only
// one knowing how many values are plotted // one knowing how many values are plotted
final StringBuilder result = new StringBuilder(tags.asValueString()); final StringBuilder result = new StringBuilder(Tags.STRING_COMPRESSOR.asValueString(tags));
final int values = csvSummary.getValues(); final int values = csvSummary.getValues();
result.append(" ("); result.append(" (");

View File

@@ -49,8 +49,8 @@ public class CsvReaderCsvToEntryTransformer implements CsvToEntryTransformer {
final int chunksize = 1000; final int chunksize = 1000;
Entries entries = new Entries(chunksize); Entries entries = new Entries(chunksize);
final int keyTimestamp = Tags.STRING_COMPRESSOR.put(settings.getTimeColumn()); final int keyTimestamp = Tags.STRING_COMPRESSOR.putString(settings.getTimeColumn());
final int keyDuration = Tags.STRING_COMPRESSOR.put(settings.getValueColumn()); final int keyDuration = Tags.STRING_COMPRESSOR.putString(settings.getValueColumn());
final DateTimeFormatter dateParser = createDateParser(settings.getDateTimePattern()); final DateTimeFormatter dateParser = createDateParser(settings.getDateTimePattern());
final Tags additionalTags = initAdditionalTags(settings); final Tags additionalTags = initAdditionalTags(settings);
@@ -101,7 +101,7 @@ public class CsvReaderCsvToEntryTransformer implements CsvToEntryTransformer {
final String renameTo = settings.getColumnDefinitions().getRenameTo(columnName); final String renameTo = settings.getColumnDefinitions().getRenameTo(columnName);
final String renamedColumn = renameTo != null ? renameTo : columnName; final String renamedColumn = renameTo != null ? renameTo : columnName;
compressedHeaders[i] = Tags.STRING_COMPRESSOR.put(renamedColumn); compressedHeaders[i] = Tags.STRING_COMPRESSOR.putString(renamedColumn);
final EnumSet<PostProcessors> postProcessors = settings.getColumnDefinitions() final EnumSet<PostProcessors> postProcessors = settings.getColumnDefinitions()
.getPostProcessors(columnName); .getPostProcessors(columnName);
final Function<String, String> postProcessFunction = PostProcessors.toFunction(postProcessors); final Function<String, String> postProcessFunction = PostProcessors.toFunction(postProcessors);
@@ -134,7 +134,7 @@ public class CsvReaderCsvToEntryTransformer implements CsvToEntryTransformer {
duration = Long.parseLong(val); duration = Long.parseLong(val);
} else if (!val.isEmpty()) { } else if (!val.isEmpty()) {
final Function<String, String> postProcess = postProcessersForColumns.get(i); final Function<String, String> postProcess = postProcessersForColumns.get(i);
final int value = Tags.STRING_COMPRESSOR.put(val, postProcess); final int value = Tags.STRING_COMPRESSOR.putString(val, postProcess);
tagsBuilder.add(key, value); tagsBuilder.add(key, value);
} }

View File

@@ -21,8 +21,8 @@ public interface CsvToEntryTransformer {
default Tags initAdditionalTags(final CsvReaderSettings settings) { default Tags initAdditionalTags(final CsvReaderSettings settings) {
final TagsBuilder tags = new TagsBuilder(); final TagsBuilder tags = new TagsBuilder();
for (final java.util.Map.Entry<String, String> entry : settings.getAdditionalTags().entrySet()) { for (final java.util.Map.Entry<String, String> entry : settings.getAdditionalTags().entrySet()) {
final int field = Tags.STRING_COMPRESSOR.put(entry.getKey()); final int field = Tags.STRING_COMPRESSOR.putString(entry.getKey());
final int value = Tags.STRING_COMPRESSOR.put(entry.getValue()); final int value = Tags.STRING_COMPRESSOR.putString(entry.getValue());
tags.add(field, value); tags.add(field, value);
} }
return tags.build(); return tags.build();

View File

@@ -3,16 +3,17 @@ package org.lucares.pdbui;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ArrayBlockingQueue;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.Entries; import org.lucares.pdb.datastore.Entries;
public class CsvToEntryTransformerFactory { public class CsvToEntryTransformerFactory {
public static CsvToEntryTransformer createCsvToEntryTransformer(final ArrayBlockingQueue<Entries> queue, public static CsvToEntryTransformer createCsvToEntryTransformer(final ArrayBlockingQueue<Entries> queue,
final CsvReaderSettings settings) { final CsvReaderSettings settings, final StringCompressor stringCompressor) {
if (settings.getQuoteCharacter() == null if (settings.getQuoteCharacter() == null
&& Objects.equals(settings.getDateTimePattern(), CsvReaderSettings.ISO_8601)) { && Objects.equals(settings.getDateTimePattern(), CsvReaderSettings.ISO_8601)) {
return new NoCopyCsvToEntryTransformer(queue, settings); return new NoCopyCsvToEntryTransformer(queue, settings, stringCompressor);
} else { } else {
return new CsvReaderCsvToEntryTransformer(queue, settings); return new CsvReaderCsvToEntryTransformer(queue, settings);
} }

View File

@@ -12,6 +12,7 @@ import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.Entries; import org.lucares.pdb.datastore.Entries;
import org.lucares.performance.db.PerformanceDb; import org.lucares.performance.db.PerformanceDb;
import org.lucares.utils.file.FileUtils; import org.lucares.utils.file.FileUtils;
@@ -30,8 +31,11 @@ public class CsvUploadHandler implements PropertyKeys, DisposableBean {
private final PerformanceDb performanceDb; private final PerformanceDb performanceDb;
public CsvUploadHandler(final PerformanceDb performanceDb) { private final StringCompressor stringCompressor;
public CsvUploadHandler(final PerformanceDb performanceDb, final StringCompressor stringCompressor) {
this.performanceDb = performanceDb; this.performanceDb = performanceDb;
this.stringCompressor = stringCompressor;
} }
public void ingest(final List<MultipartFile> files, final CsvReaderSettings settings) public void ingest(final List<MultipartFile> files, final CsvReaderSettings settings)
@@ -48,7 +52,8 @@ public class CsvUploadHandler implements PropertyKeys, DisposableBean {
// improved the // improved the
// ingestion performance fom 1.1m to 1.55m values per second on average // ingestion performance fom 1.1m to 1.55m values per second on average
synchronized (this) { synchronized (this) {
final NoCopyCsvToEntryTransformer csvToEntryTransformer = new NoCopyCsvToEntryTransformer(queue, settings); final NoCopyCsvToEntryTransformer csvToEntryTransformer = new NoCopyCsvToEntryTransformer(queue,
settings, stringCompressor);
try (InputStream in = file.getInputStream()) { try (InputStream in = file.getInputStream()) {
csvToEntryTransformer.readCSV(in); csvToEntryTransformer.readCSV(in);
} catch (final Exception e) { } catch (final Exception e) {

View File

@@ -10,6 +10,7 @@ import java.util.concurrent.ArrayBlockingQueue;
import java.util.zip.ZipEntry; import java.util.zip.ZipEntry;
import java.util.zip.ZipFile; import java.util.zip.ZipFile;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.Entries; import org.lucares.pdb.datastore.Entries;
import org.lucares.pdb.datastore.RuntimeTimeoutException; import org.lucares.pdb.datastore.RuntimeTimeoutException;
import org.lucares.performance.db.PerformanceDb; import org.lucares.performance.db.PerformanceDb;
@@ -21,12 +22,15 @@ public class FileDropZipHandler implements FileDropFileTypeHandler {
private final PerformanceDb performanceDb; private final PerformanceDb performanceDb;
private final FileDropConfigProvider configProvider; private final FileDropConfigProvider configProvider;
private final StringCompressor stringCompressor;
@Autowired @Autowired
public FileDropZipHandler(final PerformanceDb performanceDb, final FileDropConfigProvider configProvider) { public FileDropZipHandler(final PerformanceDb performanceDb, final FileDropConfigProvider configProvider,
final StringCompressor stringCompressor) {
super(); super();
this.performanceDb = performanceDb; this.performanceDb = performanceDb;
this.configProvider = configProvider; this.configProvider = configProvider;
this.stringCompressor = stringCompressor;
} }
@Override @Override
@@ -54,7 +58,7 @@ public class FileDropZipHandler implements FileDropFileTypeHandler {
final CsvReaderSettings csvReaderSettings = csvSettings.get(); final CsvReaderSettings csvReaderSettings = csvSettings.get();
final CsvToEntryTransformer csvToEntryTransformer = CsvToEntryTransformerFactory final CsvToEntryTransformer csvToEntryTransformer = CsvToEntryTransformerFactory
.createCsvToEntryTransformer(queue, csvReaderSettings); .createCsvToEntryTransformer(queue, csvReaderSettings, stringCompressor);
try (final InputStream inputStream = new BufferedInputStream(zipFile.getInputStream(entry), try (final InputStream inputStream = new BufferedInputStream(zipFile.getInputStream(entry),
1024 * 1024)) { 1024 * 1024)) {
csvToEntryTransformer.readCSV(inputStream); csvToEntryTransformer.readCSV(inputStream);

View File

@@ -15,6 +15,7 @@ import java.util.concurrent.Callable;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import java.util.zip.GZIPInputStream; import java.util.zip.GZIPInputStream;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.Entries; import org.lucares.pdb.datastore.Entries;
import org.lucares.pdb.datastore.Entry; import org.lucares.pdb.datastore.Entry;
import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions; import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions;
@@ -25,10 +26,13 @@ public final class IngestionHandler implements Callable<Void> {
final Socket clientSocket; final Socket clientSocket;
private final ArrayBlockingQueue<Entries> queue; private final ArrayBlockingQueue<Entries> queue;
private final StringCompressor stringCompressor;
public IngestionHandler(final Socket clientSocket, final ArrayBlockingQueue<Entries> queue) { public IngestionHandler(final Socket clientSocket, final ArrayBlockingQueue<Entries> queue,
final StringCompressor stringCompressor) {
this.clientSocket = clientSocket; this.clientSocket = clientSocket;
this.queue = queue; this.queue = queue;
this.stringCompressor = stringCompressor;
} }
@Override @Override
@@ -65,7 +69,7 @@ public final class IngestionHandler implements Callable<Void> {
} else { } else {
in.reset(); in.reset();
final NoCopyCsvToEntryTransformer csvTransformer = new NoCopyCsvToEntryTransformer(queue, final NoCopyCsvToEntryTransformer csvTransformer = new NoCopyCsvToEntryTransformer(queue,
CsvReaderSettings.create("@timestamp", "duration", ",", new ColumnDefinitions())); CsvReaderSettings.create("@timestamp", "duration", ",", new ColumnDefinitions()), stringCompressor);
csvTransformer.readCSV(in); csvTransformer.readCSV(in);
} }
} }

View File

@@ -72,12 +72,12 @@ public class JsonToEntryTransformer implements LineToEntryTransformer {
// ignore: we only support key/value tags // ignore: we only support key/value tags
break; break;
default: default:
final int keyAsInt = Tags.STRING_COMPRESSOR.put(key); final int keyAsInt = Tags.STRING_COMPRESSOR.putString(key);
final int valueAsInt; final int valueAsInt;
if (value instanceof String) { if (value instanceof String) {
valueAsInt = Tags.STRING_COMPRESSOR.put((String) value); valueAsInt = Tags.STRING_COMPRESSOR.putString((String) value);
} else if (value != null) { } else if (value != null) {
valueAsInt = Tags.STRING_COMPRESSOR.put(String.valueOf(value)); valueAsInt = Tags.STRING_COMPRESSOR.putString(String.valueOf(value));
} else { } else {
continue; continue;
} }

View File

@@ -4,6 +4,7 @@ import java.io.IOException;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.performance.db.PerformanceDb; import org.lucares.performance.db.PerformanceDb;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -28,4 +29,9 @@ public class MySpringConfiguration {
return new PerformanceDb(dataDirectory); return new PerformanceDb(dataDirectory);
} }
@Bean
StringCompressor stringCompressor(final PerformanceDb performanceDb) {
return performanceDb.getRealDataStore().getStringCompressor();
}
} }

View File

@@ -11,6 +11,7 @@ import java.util.concurrent.TimeUnit;
import java.util.function.Function; import java.util.function.Function;
import org.lucares.collections.IntList; import org.lucares.collections.IntList;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.api.Tags; import org.lucares.pdb.api.Tags;
import org.lucares.pdb.api.TagsBuilder; import org.lucares.pdb.api.TagsBuilder;
import org.lucares.pdb.datastore.Entries; import org.lucares.pdb.datastore.Entries;
@@ -31,9 +32,13 @@ class NoCopyCsvToEntryTransformer implements CsvToEntryTransformer {
private int[] compressedHeaders; private int[] compressedHeaders;
private List<Function<String, String>> postProcessersForColumns; private List<Function<String, String>> postProcessersForColumns;
public NoCopyCsvToEntryTransformer(final ArrayBlockingQueue<Entries> queue, final CsvReaderSettings settings) { private final StringCompressor stringCompressor;
public NoCopyCsvToEntryTransformer(final ArrayBlockingQueue<Entries> queue, final CsvReaderSettings settings,
final StringCompressor stringCompressor) {
this.queue = queue; this.queue = queue;
this.settings = settings; this.settings = settings;
this.stringCompressor = stringCompressor;
} }
@Override @Override
@@ -54,8 +59,8 @@ class NoCopyCsvToEntryTransformer implements CsvToEntryTransformer {
int lineCounter = 0; int lineCounter = 0;
final byte[] buffer = new byte[4096 * 16]; final byte[] buffer = new byte[4096 * 16];
final int keyTimestamp = Tags.STRING_COMPRESSOR.put(settings.getTimeColumn()); final int keyTimestamp = stringCompressor.putString(settings.getTimeColumn());
final int keyDuration = Tags.STRING_COMPRESSOR.put(settings.getValueColumn()); final int keyDuration = stringCompressor.putString(settings.getValueColumn());
final FastISODateParser dateParser = new FastISODateParser(); final FastISODateParser dateParser = new FastISODateParser();
Tags additionalTags = initAdditionalTags(settings); Tags additionalTags = initAdditionalTags(settings);
@@ -144,7 +149,7 @@ class NoCopyCsvToEntryTransformer implements CsvToEntryTransformer {
final String renameTo = settings.getColumnDefinitions().getRenameTo(columnName); final String renameTo = settings.getColumnDefinitions().getRenameTo(columnName);
final String renamedColumn = renameTo != null ? renameTo : columnName; final String renamedColumn = renameTo != null ? renameTo : columnName;
columns[i] = Tags.STRING_COMPRESSOR.put(renamedColumn); columns[i] = stringCompressor.putString(renamedColumn);
final EnumSet<PostProcessors> postProcessors = settings.getColumnDefinitions() final EnumSet<PostProcessors> postProcessors = settings.getColumnDefinitions()
.getPostProcessors(columnName); .getPostProcessors(columnName);
final Function<String, String> postProcessFunction = PostProcessors.toFunction(postProcessors); final Function<String, String> postProcessFunction = PostProcessors.toFunction(postProcessors);
@@ -186,8 +191,8 @@ class NoCopyCsvToEntryTransformer implements CsvToEntryTransformer {
duration = parseLong(line, lastSeparatorPosition + 1, separatorPosition); duration = parseLong(line, lastSeparatorPosition + 1, separatorPosition);
} else if (lastSeparatorPosition + 1 < separatorPosition) { // value is not empty } else if (lastSeparatorPosition + 1 < separatorPosition) { // value is not empty
final Function<String, String> postProcess = postProcessersForColumns.get(i); final Function<String, String> postProcess = postProcessersForColumns.get(i);
final int value = Tags.STRING_COMPRESSOR.put(line, lastSeparatorPosition + 1, separatorPosition, final int value = stringCompressor.putStringFromBytes(line, lastSeparatorPosition + 1,
postProcess); separatorPosition, postProcess);
tagsBuilder.add(key, value); tagsBuilder.add(key, value);
} }

View File

@@ -5,6 +5,7 @@ import java.util.List;
import org.lucares.pdb.plot.api.Aggregate; import org.lucares.pdb.plot.api.Aggregate;
import org.lucares.pdb.plot.api.AggregateHandlerCollection; import org.lucares.pdb.plot.api.AggregateHandlerCollection;
import org.lucares.pdb.plot.api.BarChartHandler; import org.lucares.pdb.plot.api.BarChartHandler;
import org.lucares.pdb.plot.api.BoxChartHandler;
import org.lucares.pdb.plot.api.CumulativeDistributionHandler; import org.lucares.pdb.plot.api.CumulativeDistributionHandler;
import org.lucares.pdb.plot.api.HistogramHandler; import org.lucares.pdb.plot.api.HistogramHandler;
import org.lucares.pdb.plot.api.Interval; import org.lucares.pdb.plot.api.Interval;
@@ -62,6 +63,9 @@ class PlotSettingsTransformer {
case BAR: case BAR:
aggregateHandlerCollection.addAggregateHandler(new BarChartHandler()); aggregateHandlerCollection.addAggregateHandler(new BarChartHandler());
break; break;
case BOX:
aggregateHandlerCollection.addAggregateHandler(new BoxChartHandler());
break;
default: default:
throw new IllegalStateException("unhandled enum: " + aggregate); throw new IllegalStateException("unhandled enum: " + aggregate);
} }

View File

@@ -26,8 +26,8 @@ public class TagMatchExtractor {
if (matcher.find() && matcher.groupCount() >= 1) { if (matcher.find() && matcher.groupCount() >= 1) {
final String group = matcher.group(1); final String group = matcher.group(1);
Tags.STRING_COMPRESSOR.put(tagMatcher.tag()); Tags.STRING_COMPRESSOR.putString(tagMatcher.tag());
Tags.STRING_COMPRESSOR.put(group); Tags.STRING_COMPRESSOR.putString(group);
System.out.println(tagMatcher.tag() + " -> " + group); System.out.println(tagMatcher.tag() + " -> " + group);
final Tag tag = Tags.STRING_COMPRESSOR.createTag(tagMatcher.tag(), group); final Tag tag = Tags.STRING_COMPRESSOR.createTag(tagMatcher.tag(), group);

View File

@@ -15,6 +15,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.PreDestroy; import javax.annotation.PreDestroy;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.Entries; import org.lucares.pdb.datastore.Entries;
import org.lucares.performance.db.PerformanceDb; import org.lucares.performance.db.PerformanceDb;
import org.lucares.recommind.logs.Config; import org.lucares.recommind.logs.Config;
@@ -40,15 +41,19 @@ public class TcpIngestor implements Ingestor, AutoCloseable, DisposableBean {
private volatile int port = PORT; private volatile int port = PORT;
private final StringCompressor stringCompressor;
public TcpIngestor(final Path dataDirectory) throws IOException { public TcpIngestor(final Path dataDirectory) throws IOException {
LOGGER.info("opening performance db: " + dataDirectory); LOGGER.info("opening performance db: " + dataDirectory);
db = new PerformanceDb(dataDirectory); db = new PerformanceDb(dataDirectory);
stringCompressor = db.getRealDataStore().getStringCompressor();
LOGGER.debug("performance db open"); LOGGER.debug("performance db open");
} }
@Autowired @Autowired
public TcpIngestor(final PerformanceDb db) { public TcpIngestor(final PerformanceDb db, final StringCompressor stringCompressor) {
this.db = db; this.db = db;
this.stringCompressor = stringCompressor;
} }
public void useRandomPort() { public void useRandomPort() {
@@ -94,7 +99,7 @@ public class TcpIngestor implements Ingestor, AutoCloseable, DisposableBean {
LOGGER.debug("accepted connection: " + clientSocket.getRemoteSocketAddress()); LOGGER.debug("accepted connection: " + clientSocket.getRemoteSocketAddress());
final ArrayBlockingQueue<Entries> queue = db.getQueue(); final ArrayBlockingQueue<Entries> queue = db.getQueue();
workerThreadPool.submit(new IngestionHandler(clientSocket, queue)); workerThreadPool.submit(new IngestionHandler(clientSocket, queue, stringCompressor));
LOGGER.debug("handler submitted"); LOGGER.debug("handler submitted");
} catch (final SocketTimeoutException e) { } catch (final SocketTimeoutException e) {
// expected every 100ms // expected every 100ms

View File

@@ -1 +1,6 @@
db.base=/tmp/pdb db.base=/home/andi/ws/performanceDb/dev-database/
server.port=17333
cache.images.duration.seconds=86400
defaults.groupBy=pod,method,metric
defaults.splitBy=method
defaults.query.examples=pod=vapfinra01 and method=ViewService.findFieldView,ViewService.findFieldViewGroup;pod=vappilby01 and method=ReviewInContextController.index;pod=vapnyse001 and method=ReviewInContextController.index,ReviewController.index

View File

@@ -18,6 +18,7 @@ import org.lucares.collections.LongList;
import org.lucares.pdb.api.DateTimeRange; import org.lucares.pdb.api.DateTimeRange;
import org.lucares.pdb.api.Query; import org.lucares.pdb.api.Query;
import org.lucares.pdb.api.Result; import org.lucares.pdb.api.Result;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions; import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions;
import org.lucares.pdbui.CsvReaderSettings.PostProcessors; import org.lucares.pdbui.CsvReaderSettings.PostProcessors;
import org.lucares.pdbui.domain.FileDropConfig; import org.lucares.pdbui.domain.FileDropConfig;
@@ -101,7 +102,11 @@ public class FileDropHandlerTest {
final FileDropConfigProvider fileDropConfigProvider = new FileDropConfigProvider( final FileDropConfigProvider fileDropConfigProvider = new FileDropConfigProvider(
fileDropConfigLocation.toString()); fileDropConfigLocation.toString());
final String fileDropBaseDir = dataDirectory.resolve("drop").toAbsolutePath().toString(); final String fileDropBaseDir = dataDirectory.resolve("drop").toAbsolutePath().toString();
final List<FileDropFileTypeHandler> handlers = List.of(new FileDropZipHandler(db, fileDropConfigProvider));
final StringCompressor stringCompressor = db.getRealDataStore().getStringCompressor();
final List<FileDropFileTypeHandler> handlers = List
.of(new FileDropZipHandler(db, fileDropConfigProvider, stringCompressor));
return new FileDropHandler(fileDropBaseDir, handlers); return new FileDropHandler(fileDropBaseDir, handlers);
} }

View File

@@ -19,6 +19,7 @@ import org.junit.jupiter.api.Test;
import org.lucares.collections.LongList; import org.lucares.collections.LongList;
import org.lucares.pdb.api.DateTimeRange; import org.lucares.pdb.api.DateTimeRange;
import org.lucares.pdb.api.Query; import org.lucares.pdb.api.Query;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.Entries; import org.lucares.pdb.datastore.Entries;
import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions; import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions;
import org.lucares.performance.db.PerformanceDb; import org.lucares.performance.db.PerformanceDb;
@@ -44,6 +45,7 @@ public class NoCopyCsvToEntryTransformerTest {
final OffsetDateTime dateB = OffsetDateTime.now(); final OffsetDateTime dateB = OffsetDateTime.now();
try (final PerformanceDb db = new PerformanceDb(dataDirectory)) { try (final PerformanceDb db = new PerformanceDb(dataDirectory)) {
final StringCompressor stringCompressor = db.getRealDataStore().getStringCompressor();
final String csv = "@timestamp,duration,tag\n"// final String csv = "@timestamp,duration,tag\n"//
+ dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + ",1,tagValue\n"// + dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + ",1,tagValue\n"//
@@ -52,7 +54,8 @@ public class NoCopyCsvToEntryTransformerTest {
final ArrayBlockingQueue<Entries> queue = db.getQueue(); final ArrayBlockingQueue<Entries> queue = db.getQueue();
final CsvReaderSettings settings = CsvReaderSettings.create("@timestamp", "duration", ",", final CsvReaderSettings settings = CsvReaderSettings.create("@timestamp", "duration", ",",
new ColumnDefinitions()); new ColumnDefinitions());
final NoCopyCsvToEntryTransformer csvToEntryTransformer = new NoCopyCsvToEntryTransformer(queue, settings); final NoCopyCsvToEntryTransformer csvToEntryTransformer = new NoCopyCsvToEntryTransformer(queue, settings,
stringCompressor);
csvToEntryTransformer.readCSV(new ByteArrayInputStream(csv.getBytes(StandardCharsets.UTF_8))); csvToEntryTransformer.readCSV(new ByteArrayInputStream(csv.getBytes(StandardCharsets.UTF_8)));
queue.put(Entries.POISON); queue.put(Entries.POISON);
} }
@@ -84,6 +87,7 @@ public class NoCopyCsvToEntryTransformerTest {
public void testIgnoreColumns() throws IOException, InterruptedException, TimeoutException { public void testIgnoreColumns() throws IOException, InterruptedException, TimeoutException {
try (final PerformanceDb db = new PerformanceDb(dataDirectory)) { try (final PerformanceDb db = new PerformanceDb(dataDirectory)) {
final StringCompressor stringCompressor = db.getRealDataStore().getStringCompressor();
final String csv = "@timestamp,duration,ignoredColumn,-otherIgnoredColumn,tag\n"// final String csv = "@timestamp,duration,ignoredColumn,-otherIgnoredColumn,tag\n"//
+ "2000-01-01T00:00:00.000Z,1,ignoreValue,ignoreValue,tagValue\n"// + "2000-01-01T00:00:00.000Z,1,ignoreValue,ignoreValue,tagValue\n"//
@@ -94,7 +98,8 @@ public class NoCopyCsvToEntryTransformerTest {
columnDefinitions.ignoreColumn("ignoredColumn"); columnDefinitions.ignoreColumn("ignoredColumn");
final CsvReaderSettings settings = CsvReaderSettings.create("@timestamp", "duration", ",", final CsvReaderSettings settings = CsvReaderSettings.create("@timestamp", "duration", ",",
columnDefinitions); columnDefinitions);
final NoCopyCsvToEntryTransformer csvToEntryTransformer = new NoCopyCsvToEntryTransformer(queue, settings); final NoCopyCsvToEntryTransformer csvToEntryTransformer = new NoCopyCsvToEntryTransformer(queue, settings,
stringCompressor);
csvToEntryTransformer.readCSV(new ByteArrayInputStream(csv.getBytes(StandardCharsets.UTF_8))); csvToEntryTransformer.readCSV(new ByteArrayInputStream(csv.getBytes(StandardCharsets.UTF_8)));
queue.put(Entries.POISON); queue.put(Entries.POISON);
} }

View File

@@ -231,6 +231,10 @@ public class PerformanceDb implements AutoCloseable {
return fields; return fields;
} }
public DataStore getRealDataStore() {
return dataStore;
}
public PartitionDiskStore getDataStore() { public PartitionDiskStore getDataStore() {
return dataStore.getDiskStorage(); return dataStore.getDiskStorage();
} }

View File

@@ -13,6 +13,7 @@ import java.util.concurrent.ThreadLocalRandom;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.ParameterizedTest;
@@ -24,7 +25,6 @@ import org.lucares.pdb.api.Query;
import org.lucares.pdb.api.Result; import org.lucares.pdb.api.Result;
import org.lucares.pdb.api.Tags; import org.lucares.pdb.api.Tags;
import org.lucares.pdb.datastore.Entry; import org.lucares.pdb.datastore.Entry;
import org.junit.jupiter.api.Assertions;
import org.lucares.utils.DateUtils; import org.lucares.utils.DateUtils;
public class PerformanceDbTest { public class PerformanceDbTest {
@@ -48,10 +48,10 @@ public class PerformanceDbTest {
final OffsetDateTime nowInUtc = DateUtils.nowInUtc(); final OffsetDateTime nowInUtc = DateUtils.nowInUtc();
final long date = nowInUtc.toInstant().toEpochMilli(); final long date = nowInUtc.toInstant().toEpochMilli();
final long value = 1; final long value = 1;
final Tags tags = Tags.createAndAddToDictionary("myKey", "myValue"); final Tags tags = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "myValue");
db.putEntry(new Entry(date, value, tags)); db.putEntry(new Entry(date, value, tags));
final Result result = db.get(Query.createQuery(tags, DateTimeRange.ofDay(nowInUtc))); final Result result = db.get(Query.createQuery("myKey=myValue", DateTimeRange.ofDay(nowInUtc)));
final LongList stream = result.singleGroup().flatMap(); final LongList stream = result.singleGroup().flatMap();
Assertions.assertEquals(2, stream.size()); Assertions.assertEquals(2, stream.size());
@@ -71,12 +71,12 @@ public class PerformanceDbTest {
final long dayTwo = dateRange.getEndEpochMilli(); final long dayTwo = dateRange.getEndEpochMilli();
final long valueOne = 1; final long valueOne = 1;
final long valueTwo = 2; final long valueTwo = 2;
final Tags tags = Tags.createAndAddToDictionary("myKey", "myValue"); final Tags tags = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "myValue");
db.putEntry(new Entry(dayOne, valueOne, tags)); db.putEntry(new Entry(dayOne, valueOne, tags));
db.putEntry(new Entry(dayTwo, valueTwo, tags)); db.putEntry(new Entry(dayTwo, valueTwo, tags));
final LongList stream = db.get(Query.createQuery(tags, dateRange)).singleGroup().flatMap(); final LongList stream = db.get(Query.createQuery("myKey=myValue", dateRange)).singleGroup().flatMap();
Assertions.assertEquals(4, stream.size()); Assertions.assertEquals(4, stream.size());
@@ -116,14 +116,14 @@ public class PerformanceDbTest {
final DateTimeRange timeRange = DateTimeRange.ofDay(DateUtils.getDate(year, month, day, 1, 1, 1)); final DateTimeRange timeRange = DateTimeRange.ofDay(DateUtils.getDate(year, month, day, 1, 1, 1));
final Tags tags = Tags.createAndAddToDictionary("myKey", "one"); final Tags tags = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "one");
final List<Entry> entries = generateEntries(timeRange, numberOfEntries, 0, tags); final List<Entry> entries = generateEntries(timeRange, numberOfEntries, 0, tags);
printEntries(entries, ""); printEntries(entries, "");
db.putEntries(entries); db.putEntries(entries);
final LongList actualEntries = db.get(Query.createQuery(tags, timeRange)).singleGroup().flatMap(); final LongList actualEntries = db.get(Query.createQuery("myKey=one", timeRange)).singleGroup().flatMap();
Assertions.assertEquals(entries.size() * 2, actualEntries.size()); Assertions.assertEquals(entries.size() * 2, actualEntries.size());
for (int i = 0; i < entries.size(); i++) { for (int i = 0; i < entries.size(); i++) {
@@ -149,7 +149,7 @@ public class PerformanceDbTest {
final int month = 1; final int month = 1;
final int day = 2; final int day = 2;
tags = Tags.createAndAddToDictionary("myKey", "one"); tags = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "one");
final DateTimeRange timeRange = DateTimeRange.ofDay(DateUtils.getDate(year, month, day, 1, 1, 1)); final DateTimeRange timeRange = DateTimeRange.ofDay(DateUtils.getDate(year, month, day, 1, 1, 1));
final List<Entry> entries = generateEntries(timeRange, numberOfEntries, 0, tags); final List<Entry> entries = generateEntries(timeRange, numberOfEntries, 0, tags);
@@ -167,7 +167,7 @@ public class PerformanceDbTest {
db.putEntries(entries); db.putEntries(entries);
expected.addAll(entries); expected.addAll(entries);
final LongList actualEntries = db.get(Query.createQuery(tags, timeRange)).singleGroup().flatMap(); final LongList actualEntries = db.get(Query.createQuery("myKey=one", timeRange)).singleGroup().flatMap();
Assertions.assertEquals(expected.size() * 2, actualEntries.size()); Assertions.assertEquals(expected.size() * 2, actualEntries.size());
Assertions.assertEquals(toExpectedValues(expected), actualEntries); Assertions.assertEquals(toExpectedValues(expected), actualEntries);
@@ -185,32 +185,38 @@ public class PerformanceDbTest {
final DateTimeRange dateRange = new DateTimeRange(from, to); final DateTimeRange dateRange = new DateTimeRange(from, to);
final long numberOfEntries = timeRange.duration().toHours(); final long numberOfEntries = timeRange.duration().toHours();
final Tags tagsCommon = Tags.createAndAddToDictionary("commonKey", "commonValue"); final Tags tagsOne = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "one", "commonKey",
final Tags tagsOne = Tags.createAndAddToDictionary("myKey", "one", "commonKey", "commonValue"); "commonValue");
final List<Entry> entriesOne = generateEntries(timeRange, numberOfEntries, 1, tagsOne); final List<Entry> entriesOne = generateEntries(timeRange, numberOfEntries, 1, tagsOne);
db.putEntries(entriesOne); db.putEntries(entriesOne);
printEntries(entriesOne, "one"); printEntries(entriesOne, "one");
final Tags tagsTwo = Tags.createAndAddToDictionary("myKey", "two", "commonKey", "commonValue"); final Tags tagsTwo = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "two", "commonKey",
"commonValue");
final List<Entry> entriesTwo = generateEntries(timeRange, numberOfEntries, 2, tagsTwo); final List<Entry> entriesTwo = generateEntries(timeRange, numberOfEntries, 2, tagsTwo);
printEntries(entriesTwo, "two"); printEntries(entriesTwo, "two");
db.putEntries(entriesTwo); db.putEntries(entriesTwo);
final Tags tagsThree = Tags.createAndAddToDictionary("myKey", "three", "commonKey", "commonValue"); final Tags tagsThree = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "three", "commonKey",
"commonValue");
final List<Entry> entriesThree = generateEntries(timeRange, numberOfEntries, 3, tagsThree); final List<Entry> entriesThree = generateEntries(timeRange, numberOfEntries, 3, tagsThree);
printEntries(entriesThree, "three"); printEntries(entriesThree, "three");
db.putEntries(entriesThree); db.putEntries(entriesThree);
final LongList actualEntriesOne = db.get(Query.createQuery(tagsOne, dateRange)).singleGroup().flatMap(); final LongList actualEntriesOne = db
.get(Query.createQuery("myKey=one and commonKey=commonValue", dateRange)).singleGroup().flatMap();
Assertions.assertEquals(toExpectedValues(entriesOne), actualEntriesOne); Assertions.assertEquals(toExpectedValues(entriesOne), actualEntriesOne);
final LongList actualEntriesTwo = db.get(Query.createQuery(tagsTwo, dateRange)).singleGroup().flatMap(); final LongList actualEntriesTwo = db
.get(Query.createQuery("myKey=two and commonKey=commonValue", dateRange)).singleGroup().flatMap();
Assertions.assertEquals(toExpectedValues(entriesTwo), actualEntriesTwo); Assertions.assertEquals(toExpectedValues(entriesTwo), actualEntriesTwo);
final LongList actualEntriesThree = db.get(Query.createQuery(tagsThree, dateRange)).singleGroup().flatMap(); final LongList actualEntriesThree = db
.get(Query.createQuery("myKey=three and commonKey=commonValue", dateRange)).singleGroup().flatMap();
Assertions.assertEquals(toExpectedValues(entriesThree), actualEntriesThree); Assertions.assertEquals(toExpectedValues(entriesThree), actualEntriesThree);
final LongList actualEntriesAll = db.get(Query.createQuery(tagsCommon, dateRange)).singleGroup().flatMap(); final LongList actualEntriesAll = db.get(Query.createQuery("commonKey=commonValue", dateRange))
.singleGroup().flatMap();
final List<Entry> expectedAll = CollectionUtils.collate(entriesOne, final List<Entry> expectedAll = CollectionUtils.collate(entriesOne,
CollectionUtils.collate(entriesTwo, entriesThree, EntryByDateComparator.INSTANCE), CollectionUtils.collate(entriesTwo, entriesThree, EntryByDateComparator.INSTANCE),
EntryByDateComparator.INSTANCE); EntryByDateComparator.INSTANCE);
@@ -233,9 +239,11 @@ public class PerformanceDbTest {
final long numberOfEntries = timeRange.duration().toHours(); final long numberOfEntries = timeRange.duration().toHours();
final String key = "myKey"; final String key = "myKey";
final Tags tagsOne = Tags.createAndAddToDictionary(key, "one", "commonKey", "commonValue"); final Tags tagsOne = Tags.STRING_COMPRESSOR.createAndAddToDictionary(key, "one", "commonKey",
final Tags tagsTwo = Tags.createAndAddToDictionary(key, "two", "commonKey", "commonValue"); "commonValue");
final Tags tagsThree = Tags.createAndAddToDictionary("commonKey", "commonValue"); final Tags tagsTwo = Tags.STRING_COMPRESSOR.createAndAddToDictionary(key, "two", "commonKey",
"commonValue");
final Tags tagsThree = Tags.STRING_COMPRESSOR.createAndAddToDictionary("commonKey", "commonValue");
final LongList entriesOne = storeEntries(db, timeRange, numberOfEntries, tagsOne, 1); final LongList entriesOne = storeEntries(db, timeRange, numberOfEntries, tagsOne, 1);
final LongList entriesTwo = storeEntries(db, timeRange, numberOfEntries, tagsTwo, 2); final LongList entriesTwo = storeEntries(db, timeRange, numberOfEntries, tagsTwo, 2);
final LongList entriesThree = storeEntries(db, timeRange, numberOfEntries, tagsThree, 3); final LongList entriesThree = storeEntries(db, timeRange, numberOfEntries, tagsThree, 3);
@@ -247,9 +255,9 @@ public class PerformanceDbTest {
for (final GroupResult groupResult : groups) { for (final GroupResult groupResult : groups) {
final Tags groupedBy = groupResult.getGroupedBy(); final Tags groupedBy = groupResult.getGroupedBy();
if (groupedBy.equals(Tags.createAndAddToDictionary(key, "one"))) { if (groupedBy.equals(Tags.STRING_COMPRESSOR.createAndAddToDictionary(key, "one"))) {
Assertions.assertEquals(entriesOne, groupResult.flatMap()); Assertions.assertEquals(entriesOne, groupResult.flatMap());
} else if (groupedBy.equals(Tags.createAndAddToDictionary(key, "two"))) { } else if (groupedBy.equals(Tags.STRING_COMPRESSOR.createAndAddToDictionary(key, "two"))) {
Assertions.assertEquals(entriesTwo, groupResult.flatMap()); Assertions.assertEquals(entriesTwo, groupResult.flatMap());
} else if (groupedBy.isEmpty()) { } else if (groupedBy.isEmpty()) {
@@ -272,10 +280,14 @@ public class PerformanceDbTest {
final String key1 = "myKey1"; final String key1 = "myKey1";
final String key2 = "myKey2"; final String key2 = "myKey2";
final Tags tagsOne = Tags.createAndAddToDictionary(key1, "one", key2, "aaa", "commonKey", "commonValue"); final Tags tagsOne = Tags.STRING_COMPRESSOR.createAndAddToDictionary(key1, "one", key2, "aaa", "commonKey",
final Tags tagsTwoA = Tags.createAndAddToDictionary(key1, "two", key2, "bbb", "commonKey", "commonValue"); "commonValue");
final Tags tagsTwoB = Tags.createAndAddToDictionary(key1, "two", key2, "bbb", "commonKey", "commonValue"); final Tags tagsTwoA = Tags.STRING_COMPRESSOR.createAndAddToDictionary(key1, "two", key2, "bbb", "commonKey",
final Tags tagsThree = Tags.createAndAddToDictionary(key1, "three", "commonKey", "commonValue"); "commonValue");
final Tags tagsTwoB = Tags.STRING_COMPRESSOR.createAndAddToDictionary(key1, "two", key2, "bbb", "commonKey",
"commonValue");
final Tags tagsThree = Tags.STRING_COMPRESSOR.createAndAddToDictionary(key1, "three", "commonKey",
"commonValue");
final LongList entriesOne = storeEntries(db, timeRange, numberOfEntries, tagsOne, 1); final LongList entriesOne = storeEntries(db, timeRange, numberOfEntries, tagsOne, 1);
final LongList entriesTwo = storeEntries(db, timeRange, numberOfEntries, tagsTwoA, 2); final LongList entriesTwo = storeEntries(db, timeRange, numberOfEntries, tagsTwoA, 2);
@@ -290,9 +302,10 @@ public class PerformanceDbTest {
for (final GroupResult groupResult : groups) { for (final GroupResult groupResult : groups) {
final Tags groupedBy = groupResult.getGroupedBy(); final Tags groupedBy = groupResult.getGroupedBy();
if (groupedBy.equals(Tags.createAndAddToDictionary(key1, "one", key2, "aaa"))) { if (groupedBy.equals(Tags.STRING_COMPRESSOR.createAndAddToDictionary(key1, "one", key2, "aaa"))) {
Assertions.assertEquals(entriesOne, groupResult.flatMap()); Assertions.assertEquals(entriesOne, groupResult.flatMap());
} else if (groupedBy.equals(Tags.createAndAddToDictionary(key1, "two", key2, "bbb"))) { } else if (groupedBy
.equals(Tags.STRING_COMPRESSOR.createAndAddToDictionary(key1, "two", key2, "bbb"))) {
// there is no defined order of the entries. // there is no defined order of the entries.
// eventually we might return them in ascending order, but // eventually we might return them in ascending order, but
// that is not yet implemented // that is not yet implemented
@@ -302,7 +315,7 @@ public class PerformanceDbTest {
actualEntries.sort(); actualEntries.sort();
Assertions.assertEquals(entriesTwo, actualEntries); Assertions.assertEquals(entriesTwo, actualEntries);
} else if (groupedBy.equals(Tags.createAndAddToDictionary(key1, "three"))) { } else if (groupedBy.equals(Tags.STRING_COMPRESSOR.createAndAddToDictionary(key1, "three"))) {
Assertions.assertEquals(entriesThree, groupResult.flatMap()); Assertions.assertEquals(entriesThree, groupResult.flatMap());
} else { } else {
Assertions.fail("unexpected group: " + groupedBy); Assertions.fail("unexpected group: " + groupedBy);