Merge remote-tracking branch 'origin/master'

This commit is contained in:
2022-11-21 13:43:15 +01:00
99 changed files with 17240 additions and 9477 deletions

1
.gitignore vendored
View File

@@ -6,3 +6,4 @@
/build/
/target/
/test-output/
java_pid*

View File

@@ -3,7 +3,7 @@
## start Angular development server
```
cd pdb-js
build/npm/npm-v6.14.8/bin/npm run ng serve
build/npm/npm-v8.1.2/bin/npm run ng serve
```
or
@@ -15,7 +15,7 @@ gradlew npm_run_ng_serve
```
cd pdb-js
build/npm/npm-v6.14.8/bin/npm run ng generate component component-name
build/npm/npm-v8.1.2/bin/npm run ng generate component component-name
```
## update JavaScript libraries

View File

@@ -188,6 +188,13 @@ public class PersistentMap<K, V> implements AutoCloseable {
private long version;
/**
*
* @param path file for the index, must be child of storageBasePath
* @param storageBasePath base path
* @param keyEncoder encoder for keys
* @param valueEncoder encoder for values
*/
public PersistentMap(final Path path, final Path storageBasePath, final EncoderDecoder<K> keyEncoder,
final EncoderDecoder<V> valueEncoder) {
this.path = path;
@@ -633,6 +640,14 @@ public class PersistentMap<K, V> implements AutoCloseable {
return stats;
}
public synchronized boolean isEmpty() {
final long rootNodeOffset = readNodeOffsetOfRootNode();
final PersistentMapDiskNode node = getNode(rootNodeOffset);
final List<NodeEntry> entries = node.getEntries();
return entries.size() == 1; // the empty map has a single NodeEntry for the PersistentMapDiskNode.MAX_KEY
}
private void swapFiles(final Path newFile) throws IOException {
final Path backupFile = path.getParent().resolve(path.getFileName() + "."
+ DateTimeFormatter.ofPattern("yyyyMMdd-HHmmss").format(OffsetDateTime.now()) + ".backup");

View File

@@ -4,10 +4,12 @@ import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Queue;
@@ -57,6 +59,26 @@ public class PersistentMapTest {
}
}
@Test
public void testUpdateValues() throws Exception {
final Path file = dataDirectory.resolve("map.db");
try (final PersistentMap<String, String> map = new PersistentMap<>(file, dataDirectory,
PersistentMap.STRING_CODER, PersistentMap.STRING_CODER)) {
map.putValue("key", "first");
Assertions.assertEquals("first", map.getValue("key"));
map.putValue("key", "second");
Assertions.assertEquals("second", map.getValue("key"));
final List<String> allValuesInMap = new ArrayList<>();
map.forAll((k, v) -> {
allValuesInMap.add(v);
});
Assertions.assertEquals(List.of("second"), allValuesInMap);
}
}
@Test
public void testManyValues() throws Exception {
final Path file = dataDirectory.resolve("map.db");
@@ -375,6 +397,29 @@ public class PersistentMapTest {
}
}
@Test
public void testIsEmpty() throws IOException {
final Path file = dataDirectory.resolve("map.db");
try (final PersistentMap<Long, Long> map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER,
PersistentMap.LONG_CODER)) {
Assertions.assertTrue(map.isEmpty(), "new created map is empty");
}
try (final PersistentMap<Long, Long> map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER,
PersistentMap.LONG_CODER)) {
Assertions.assertTrue(map.isEmpty(), "map is empty after reading an empty map from disk");
map.putValue(1L, 2L);
Assertions.assertFalse(map.isEmpty(), "map is empty after putting a value");
}
try (final PersistentMap<Long, Long> map = new PersistentMap<>(file, dataDirectory, PersistentMap.LONG_CODER,
PersistentMap.LONG_CODER)) {
Assertions.assertFalse(map.isEmpty(), "map is empty when reading a non empty map from disk");
}
}
private void assertValuesInMap(final Map<Long, Long> insertedValues, final PersistentMap<Long, Long> map) {
final AtomicInteger counter = new AtomicInteger();
final AtomicInteger maxDepth = new AtomicInteger();

View File

@@ -4,27 +4,27 @@ import org.apache.tools.ant.filters.ReplaceTokens
plugins {
id 'java'
id 'eclipse'
id 'com.github.ben-manes.versions' version "0.39.0" // check for dependency updates run: gradlew dependenyUpdates
id 'com.github.ben-manes.versions' version "0.42.0" // check for dependency updates run: gradlew dependenyUpdates
}
ext {
javaVersion=16
javaVersion=17
version_log4j2= '2.14.1' // keep in sync with spring-boot-starter-log4j2
version_spring = '2.5.4'
version_junit = '5.7.2'
version_junit_platform = '1.7.2'
version_nodejs = '14.17.3' // keep in sync with npm
version_npm = '6.14.13' // keep in sync with nodejs
version_log4j2= '2.17.2' // keep in sync with spring-boot-starter-log4j2
version_spring = '2.7.4'
version_junit = '5.9.1'
version_junit_platform = '1.9.1'
version_nodejs = '16.17.1' // keep in sync with npm
version_npm = '8.15.0' // keep in sync with nodejs
lib_antlr = "org.antlr:antlr4:4.9.2"
lib_antlr = "org.antlr:antlr4:4.11.1"
lib_commons_collections4 = 'org.apache.commons:commons-collections4:4.4'
lib_commons_csv= 'org.apache.commons:commons-csv:1.9.0'
lib_commons_lang3 = 'org.apache.commons:commons-lang3:3.12.0'
lib_jackson_databind = 'com.fasterxml.jackson.core:jackson-databind:2.12.4'
lib_jackson_databind = 'com.fasterxml.jackson.core:jackson-databind:2.13.2'
lib_log4j2_core = "org.apache.logging.log4j:log4j-core:${version_log4j2}"
lib_log4j2_slf4j_impl = "org.apache.logging.log4j:log4j-slf4j-impl:${version_log4j2}"
@@ -136,5 +136,5 @@ subprojects {
}
wrapper {
gradleVersion = '7.2'
gradleVersion = '7.5.1'
}

View File

@@ -37,7 +37,7 @@ public class Entry {
public String toString() {
final OffsetDateTime date = Instant.ofEpochMilli(epochMilli).atOffset(ZoneOffset.UTC);
return date.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + " = " + value + " (" + tags.asString() + ")";
return date.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + " = " + value + " (" + tags + ")";
}
@Override

View File

@@ -40,8 +40,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DataStore implements AutoCloseable {
private static final String ALL_DOCS_KEY = "\ue001allDocs"; // \ue001 is the second character in the private use
// area
private static final Logger EXECUTE_QUERY_LOGGER = LoggerFactory
.getLogger("org.lucares.metrics.dataStore.executeQuery");
private static final Logger MAP_DOCS_TO_DOCID = LoggerFactory
@@ -60,8 +59,6 @@ public class DataStore implements AutoCloseable {
// ids when getting them from the BSFiles)
private static final AtomicLong NEXT_DOC_ID = new AtomicLong(System.currentTimeMillis());
public static Tag TAG_ALL_DOCS = null;
private static final class PartitionedTagsCacheKey {
private final Tags tags;
private final ParititionId partitionId;
@@ -121,20 +118,18 @@ public class DataStore implements AutoCloseable {
private final PartitionDiskStore diskStorage;
private final Path storageBasePath;
private final StringCompressor stringCompressor;
public DataStore(final Path dataDirectory) throws IOException {
storageBasePath = storageDirectory(dataDirectory);
Tags.STRING_COMPRESSOR = StringCompressor.create(keyCompressionFile(storageBasePath));
Tags.STRING_COMPRESSOR.put(ALL_DOCS_KEY);
Tags.STRING_COMPRESSOR.put("");
TAG_ALL_DOCS = Tags.STRING_COMPRESSOR.createTag(ALL_DOCS_KEY, ""); // Tag(String, String) uses the
// StringCompressor internally, so it
// must be initialized after the string compressor has been created
stringCompressor = StringCompressor.create(storageBasePath);
Tags.STRING_COMPRESSOR = stringCompressor;
diskStorage = new PartitionDiskStore(storageBasePath, "data.bs");
tagToDocsId = new PartitionPersistentMap<>(storageBasePath, "keyToValueToDocIdsIndex.bs",
new TagEncoderDecoder(), PartitionAwareWrapper.wrap(PersistentMap.LONG_CODER));
new TagEncoderDecoder(stringCompressor), PartitionAwareWrapper.wrap(PersistentMap.LONG_CODER));
tagsToDocId = new PartitionPersistentMap<>(storageBasePath, "tagsToDocIdIndex.bs", new TagsEncoderDecoder(),
PartitionAwareWrapper.wrap(PersistentMap.LONG_CODER));
@@ -142,16 +137,12 @@ public class DataStore implements AutoCloseable {
docIdToDoc = new PartitionPersistentMap<>(storageBasePath, "docIdToDocIndex.bs", PersistentMap.LONG_CODER,
new DocEncoderDecoder());
queryCompletionIndex = new QueryCompletionIndex(storageBasePath);
queryCompletionIndex = new QueryCompletionIndex(storageBasePath, stringCompressor);
writerCache = new HotEntryCache<>(Duration.ofSeconds(10), 1000);
writerCache.addListener((key, value) -> value.close());
}
private Path keyCompressionFile(final Path dataDirectory) throws IOException {
return dataDirectory.resolve("keys.csv");
}
public static Path storageDirectory(final Path dataDirectory) throws IOException {
return dataDirectory.resolve(SUBDIR_STORAGE);
}
@@ -163,11 +154,15 @@ public class DataStore implements AutoCloseable {
final long start = System.nanoTime();
writer.write(dateAsEpochMilli, value);
final double duration = (System.nanoTime() - start) / 1_000_000.0;
if (duration > 1) {
if (duration > 10) {
System.out.println(" write took: " + duration + " ms " + tags);
}
}
public StringCompressor getStringCompressor() {
return stringCompressor;
}
// visible for test
QueryCompletionIndex getQueryCompletionIndex() {
return queryCompletionIndex;
@@ -186,7 +181,7 @@ public class DataStore implements AutoCloseable {
// store mapping from tag to docId, so that we can find all docs for a given tag
final List<Tag> ts = new ArrayList<>(tags.toTags());
ts.add(TAG_ALL_DOCS);
ts.add(StringCompressor.TAG_ALL_DOCS);
for (final Tag tag : ts) {
Long diskStoreOffsetForDocIdsOfTag = tagToDocsId.getValue(partitionId, tag);
@@ -270,13 +265,13 @@ public class DataStore implements AutoCloseable {
final Set<String> keys = new HashSet<>();
final Tag keyPrefix = Tags.STRING_COMPRESSOR.createTag("", ""); // will find everything
final Tag keyPrefix = stringCompressor.createTag("", ""); // will find everything
final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange);
tagToDocsId.visitValues(partitionIdSource, keyPrefix,
(tag, __) -> keys.add(Tags.STRING_COMPRESSOR.getKeyAsString(tag)));
(tag, __) -> keys.add(stringCompressor.getKeyAsString(tag)));
keys.remove(ALL_DOCS_KEY);
keys.remove(StringCompressor.ALL_DOCS_KEY);
final List<String> result = new ArrayList<>(keys);
Collections.sort(result);
return result;
@@ -286,9 +281,9 @@ public class DataStore implements AutoCloseable {
private PartitionLongList executeQuery(final Query query) {
final long start = System.nanoTime();
synchronized (docIdToDoc) {
final Expression expression = QueryLanguageParser.parse(query.getQuery());
final Expression expression = QueryLanguageParser.parse(query.getQuery(), stringCompressor);
final ExpressionToDocIdVisitor visitor = new ExpressionToDocIdVisitor(query.getDateRange(), tagToDocsId,
diskStorage);
diskStorage, stringCompressor);
final PartitionLongList docIdsList = expression.visit(visitor);
EXECUTE_QUERY_LOGGER.debug("executeQuery({}) took {}ms returned {} results ", query,
(System.nanoTime() - start) / 1_000_000.0, docIdsList.size());
@@ -372,7 +367,7 @@ public class DataStore implements AutoCloseable {
public List<Proposal> propose(final QueryWithCaretMarker query) {
final NewProposerParser newProposerParser = new NewProposerParser(queryCompletionIndex);
final NewProposerParser newProposerParser = new NewProposerParser(queryCompletionIndex, stringCompressor);
final List<Proposal> proposals = newProposerParser.propose(query);
LOGGER.debug("Proposals for query {}: {}", query, proposals);
return proposals;
@@ -387,7 +382,7 @@ public class DataStore implements AutoCloseable {
final PartitionedTagsCacheKey cacheKey = new PartitionedTagsCacheKey(tags, partitionId);
final PdbWriter result = writerCache.putIfAbsent(cacheKey, t -> getWriterInternal(partitionId, tags));
final double duration = (System.nanoTime() - start) / 1_000_000.0;
if (duration > 1) {
if (duration > 100) {
System.out.println(" get Writer took: " + duration + " ms " + tags);
}
return result;
@@ -408,7 +403,7 @@ public class DataStore implements AutoCloseable {
final PdbFile pdbFile = new PdbFile(partitionId, doc.getRootBlockNumber(), tags);
writer = new PdbWriter(pdbFile, diskStorage.getExisting(partitionId));
final double duration = (System.nanoTime() - start) / 1_000_000.0;
if (duration > 1) {
if (duration > 100) {
System.out.println(" init existing writer took: " + duration + " ms " + tags);
}
} catch (final RuntimeException e) {
@@ -427,7 +422,7 @@ public class DataStore implements AutoCloseable {
final PdbWriter result = new PdbWriter(pdbFile, diskStorage.getExisting(partitionId));
final double duration = (System.nanoTime() - start) / 1_000_000.0;
if (duration > 1) {
if (duration > 10) {
METRICS_LOGGER_NEW_WRITER.info("newPdbWriter took {}ms tags: {}", duration, tags);
}
return result;

View File

@@ -9,6 +9,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
import org.lucares.collections.LongList;
import org.lucares.pdb.api.DateTimeRange;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.api.Tag;
import org.lucares.pdb.api.Tags;
import org.lucares.pdb.datastore.lang.QueryCompletionExpressionOptimizer;
@@ -152,12 +153,6 @@ public class QueryCompletionIndex implements AutoCloseable {
this.tagB = tagB;
}
public TwoTags(final String fieldB, final String fieldA, final String valueA, final String valueB) {
tagA = Tags.STRING_COMPRESSOR.createTag(fieldA, valueA);
tagB = Tags.STRING_COMPRESSOR.createTag(fieldB, valueB);
}
public Tag getTagA() {
return tagA;
}
@@ -275,6 +270,12 @@ public class QueryCompletionIndex implements AutoCloseable {
private static final class EncoderField implements EncoderDecoder<String> {
private final StringCompressor stringCompressor;
public EncoderField(final StringCompressor stringCompressor) {
this.stringCompressor = stringCompressor;
}
@Override
public byte[] encode(final String field) {
@@ -282,13 +283,13 @@ public class QueryCompletionIndex implements AutoCloseable {
return new byte[0];
}
return VariableByteEncoder.encode(Tags.STRING_COMPRESSOR.put(field));
return VariableByteEncoder.encode(stringCompressor.putString(field));
}
@Override
public String decode(final byte[] bytes) {
final long compressedString = VariableByteEncoder.decodeFirstValue(bytes);
return Tags.STRING_COMPRESSOR.get((int) compressedString);
return stringCompressor.getString((int) compressedString);
}
@Override
@@ -300,16 +301,18 @@ public class QueryCompletionIndex implements AutoCloseable {
private final PartitionPersistentMap<TwoTags, Empty, Empty> tagToTagIndex;
private final PartitionPersistentMap<Tag, Empty, Empty> fieldToValueIndex;
private final PartitionPersistentMap<String, Empty, Empty> fieldIndex;
private final StringCompressor stringCompressor;
public QueryCompletionIndex(final Path basePath) throws IOException {
public QueryCompletionIndex(final Path basePath, final StringCompressor stringCompressor) throws IOException {
this.stringCompressor = stringCompressor;
tagToTagIndex = new PartitionPersistentMap<>(basePath, "queryCompletionTagToTagIndex.bs", new EncoderTwoTags(),
PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER));
fieldToValueIndex = new PartitionPersistentMap<>(basePath, "queryCompletionFieldToValueIndex.bs",
new EncoderTag(), PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER));
fieldIndex = new PartitionPersistentMap<>(basePath, "queryCompletionFieldIndex.bs", new EncoderField(),
PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER));
fieldIndex = new PartitionPersistentMap<>(basePath, "queryCompletionFieldIndex.bs",
new EncoderField(stringCompressor), PartitionAwareWrapper.wrap(PersistentMap.EMPTY_ENCODER));
}
public void addTags(final ParititionId partitionId, final Tags tags) throws IOException {
@@ -328,10 +331,10 @@ public class QueryCompletionIndex implements AutoCloseable {
// create indices of all tags and all fields
for (final Tag tag : listOfTagsA) {
fieldToValueIndex.putValue(partitionId, tag, Empty.INSTANCE);
fieldIndex.putValue(partitionId, Tags.STRING_COMPRESSOR.getKeyAsString(tag), Empty.INSTANCE);
fieldIndex.putValue(partitionId, stringCompressor.getKeyAsString(tag), Empty.INSTANCE);
}
final double d = (System.nanoTime() - start) / 1_000_000.0;
if (d > 1) {
if (d > 10) {
System.out.println(" addTags: " + d + " ms");
}
}
@@ -356,15 +359,16 @@ public class QueryCompletionIndex implements AutoCloseable {
final SortedSet<String> result = new TreeSet<>();
final TwoTags keyPrefix = new TwoTags(fieldB, fieldA, null, null);
final TwoTags keyPrefix = new TwoTags(stringCompressor.createTag(fieldA, null),
stringCompressor.createTag(fieldB, null));
final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange);
tagToTagIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> {
final String vA = Tags.STRING_COMPRESSOR.getValueAsString(k.getTagA());
final String vA = stringCompressor.getValueAsString(k.getTagA());
if (valueA.matches(vA)) {
result.add(Tags.STRING_COMPRESSOR.getValueAsString(k.getTagB()));
result.add(stringCompressor.getValueAsString(k.getTagB()));
}
});
@@ -383,14 +387,14 @@ public class QueryCompletionIndex implements AutoCloseable {
public SortedSet<String> find(final DateTimeRange dateRange, final Tag tag, final String field) {
final SortedSet<String> result = new TreeSet<>();
final int tagBKey = Tags.STRING_COMPRESSOR.put(field);
final int tagBKey = stringCompressor.putString(field);
final Tag tagB = new Tag(tagBKey, -1); // the value must be negative for the prefix search to work. See
// EncoderTwoTags
final TwoTags keyPrefix = new TwoTags(tag, tagB);
final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange);
tagToTagIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> {
result.add(Tags.STRING_COMPRESSOR.getValueAsString(k.getTagB()));
result.add(stringCompressor.getValueAsString(k.getTagB()));
});
return result;
@@ -406,12 +410,12 @@ public class QueryCompletionIndex implements AutoCloseable {
public SortedSet<String> findAllValuesForField(final DateTimeRange dateRange, final String field) {
final SortedSet<String> result = new TreeSet<>();
final int tagKey = Tags.STRING_COMPRESSOR.put(field);
final int tagKey = stringCompressor.putString(field);
final Tag keyPrefix = new Tag(tagKey, -1); // the value must be negative for the prefix search to work. See
final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange);
fieldToValueIndex.visitValues(partitionIdSource, keyPrefix, (k, v) -> {
result.add(Tags.STRING_COMPRESSOR.getValueAsString(k));
result.add(stringCompressor.getValueAsString(k));
});
return result;
@@ -431,7 +435,7 @@ public class QueryCompletionIndex implements AutoCloseable {
final String field) {
final SortedSet<String> result = new TreeSet<>();
final TwoTags keyPrefix = new TwoTags(field, Tags.STRING_COMPRESSOR.getKeyAsString(tag), null, null);
final TwoTags keyPrefix = new TwoTags(tag.unsetValue(), stringCompressor.createTag(field, null));
final int negatedValueA = tag.getValue();
final PartitionIdSource partitionIdSource = new DatePartitioner(dateRange);
@@ -439,7 +443,7 @@ public class QueryCompletionIndex implements AutoCloseable {
final int valueA = k.getTagA().getValue();
if (valueA != negatedValueA) {
result.add(Tags.STRING_COMPRESSOR.getValueAsString(k.getTagB()));
result.add(stringCompressor.getValueAsString(k.getTagB()));
}
});

View File

@@ -1,26 +1,32 @@
package org.lucares.pdb.datastore.internal;
import org.lucares.collections.LongList;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.api.Tag;
import org.lucares.pdb.api.Tags;
import org.lucares.pdb.map.PersistentMap.EncoderDecoder;
import org.lucares.utils.byteencoder.VariableByteEncoder;
class TagEncoderDecoder implements EncoderDecoder<Tag> {
private final StringCompressor stringCompressor;
public TagEncoderDecoder(final StringCompressor stringCompressor) {
this.stringCompressor = stringCompressor;
}
@Override
public byte[] encode(final Tag tag) {
final LongList keyAndValueCompressed = new LongList(2);
final String key = Tags.STRING_COMPRESSOR.getKeyAsString(tag);
final String key = stringCompressor.getKeyAsString(tag);
final byte[] result;
if (!key.isEmpty()) {
final Integer keyAsLong = Tags.STRING_COMPRESSOR.put(key);
final Integer keyAsLong = stringCompressor.putString(key);
keyAndValueCompressed.add(keyAsLong);
final String value = Tags.STRING_COMPRESSOR.getValueAsString(tag);
final String value = stringCompressor.getValueAsString(tag);
if (!value.isEmpty()) {
final Integer valueAsLong = Tags.STRING_COMPRESSOR.put(value);
final Integer valueAsLong = stringCompressor.putString(value);
keyAndValueCompressed.add(valueAsLong);
}
result = VariableByteEncoder.encode(keyAndValueCompressed);
@@ -38,17 +44,17 @@ class TagEncoderDecoder implements EncoderDecoder<Tag> {
switch (compressedStrings.size()) {
case 0:
result = Tags.STRING_COMPRESSOR.createTag("", "");
result = stringCompressor.createTag("", "");
break;
case 1:
final String k = Tags.STRING_COMPRESSOR.get((int) compressedStrings.get(0));
result = Tags.STRING_COMPRESSOR.createTag(k, "");
final String k = stringCompressor.getString((int) compressedStrings.get(0));
result = stringCompressor.createTag(k, "");
break;
case 2:
final String key = Tags.STRING_COMPRESSOR.get((int) compressedStrings.get(0));
final String value = Tags.STRING_COMPRESSOR.get((int) compressedStrings.get(1));
result = Tags.STRING_COMPRESSOR.createTag(key, value);
final String key = stringCompressor.getString((int) compressedStrings.get(0));
final String value = stringCompressor.getString((int) compressedStrings.get(1));
result = stringCompressor.createTag(key, value);
break;
default:
throw new IllegalStateException("too many values: " + compressedStrings);

View File

@@ -9,10 +9,9 @@ import java.util.stream.Collectors;
import org.lucares.collections.LongList;
import org.lucares.pdb.api.DateTimeRange;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.api.Tag;
import org.lucares.pdb.api.Tags;
import org.lucares.pdb.blockstorage.LongStreamFile;
import org.lucares.pdb.datastore.internal.DataStore;
import org.lucares.pdb.datastore.internal.DatePartitioner;
import org.lucares.pdb.datastore.internal.ParititionId;
import org.lucares.pdb.datastore.internal.PartitionDiskStore;
@@ -34,8 +33,12 @@ public class ExpressionToDocIdVisitor extends ExpressionVisitor<PartitionLongLis
private final DatePartitioner datePartitioner;
private final StringCompressor stringCompressor;
public ExpressionToDocIdVisitor(final DateTimeRange dateRange,
final PartitionPersistentMap<Tag, Long, Long> keyToValueToDocsId, final PartitionDiskStore diskStorage) {
final PartitionPersistentMap<Tag, Long, Long> keyToValueToDocsId, final PartitionDiskStore diskStorage,
final StringCompressor stringCompressor) {
this.stringCompressor = stringCompressor;
this.datePartitioner = new DatePartitioner(dateRange);
this.keyToValueToDocId = keyToValueToDocsId;
this.diskStorage = diskStorage;
@@ -131,7 +134,7 @@ public class ExpressionToDocIdVisitor extends ExpressionVisitor<PartitionLongLis
final Set<ParititionId> availablePartitionIds = keyToValueToDocId.getAvailablePartitionIds(datePartitioner);
for (final ParititionId partitionId : availablePartitionIds) {
final Long blockOffset = keyToValueToDocId.getValue(partitionId, DataStore.TAG_ALL_DOCS);
final Long blockOffset = keyToValueToDocId.getValue(partitionId, StringCompressor.TAG_ALL_DOCS);
if (blockOffset != null) {
final LongStreamFile bsFile = diskStorage.streamExistingFile(blockOffset, partitionId);
@@ -149,9 +152,9 @@ public class ExpressionToDocIdVisitor extends ExpressionVisitor<PartitionLongLis
final Set<ParititionId> availablePartitionIds = keyToValueToDocId.getAvailablePartitionIds(datePartitioner);
for (final ParititionId partitionId : availablePartitionIds) {
final List<LongList> docIdsForPartition = new ArrayList<>();
keyToValueToDocId.visitValues(partitionId, Tags.STRING_COMPRESSOR.createTag(propertyName, ""),
keyToValueToDocId.visitValues(partitionId, stringCompressor.createTag(propertyName, ""),
(tag, blockOffsetToDocIds) -> {
if (valuePattern.matcher(Tags.STRING_COMPRESSOR.getValueAsString(tag)).matches()) {
if (valuePattern.matcher(stringCompressor.getValueAsString(tag)).matches()) {
try (final LongStreamFile bsFile = diskStorage.streamExistingFile(blockOffsetToDocIds,
partitionId)) {

View File

@@ -1,11 +1,17 @@
package org.lucares.pdb.datastore.lang;
import org.lucares.pdb.api.Tags;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.lang.Expression.InExpression;
import org.lucares.pdb.datastore.lang.Expression.Property;
public class ExpressionValidationVisitor extends IdentityExpressionVisitor {
private final StringCompressor stringCompressor;
public ExpressionValidationVisitor(final StringCompressor stringCompressor) {
this.stringCompressor = stringCompressor;
}
@Override
public Expression visit(final Property expression) {
@@ -27,13 +33,13 @@ public class ExpressionValidationVisitor extends IdentityExpressionVisitor {
}
private void assertValueExists(final String value) {
if (Tags.STRING_COMPRESSOR.getIfPresent(value) < 0) {
if (stringCompressor.getIfPresent(value) < 0) {
throw new UnkownTokenSyntaxException(value);
}
}
public static void validate(final Expression expression) {
expression.visit(new ExpressionValidationVisitor());
public static void validate(final Expression expression, final StringCompressor stringCompressor) {
expression.visit(new ExpressionValidationVisitor(stringCompressor));
}
}

View File

@@ -13,6 +13,7 @@ import org.lucares.pdb.api.DateTimeRange;
import org.lucares.pdb.api.QueryConstants;
import org.lucares.pdb.api.QueryWithCaretMarker;
import org.lucares.pdb.api.QueryWithCaretMarker.ResultMode;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.Proposal;
import org.lucares.pdb.datastore.internal.QueryCompletionIndex;
import org.lucares.utils.CollectionUtils;
@@ -34,8 +35,11 @@ public class NewProposerParser implements QueryConstants {
private final QueryCompletionIndex queryCompletionIndex;
public NewProposerParser(final QueryCompletionIndex queryCompletionIndex) {
private final StringCompressor stringCompressor;
public NewProposerParser(final QueryCompletionIndex queryCompletionIndex, final StringCompressor stringCompressor) {
this.queryCompletionIndex = queryCompletionIndex;
this.stringCompressor = stringCompressor;
}
public List<Proposal> propose(final QueryWithCaretMarker query) {
@@ -159,7 +163,7 @@ public class NewProposerParser implements QueryConstants {
final String queryWithCaretMarker = query.getQueryWithCaretMarker();
// parse the query
final Expression expression = QueryLanguageParser.parse(queryWithCaretMarker);
final Expression expression = QueryLanguageParser.parse(queryWithCaretMarker, stringCompressor);
// normalize it, so that we can use the queryCompletionIndex to search for
// candidate values

View File

@@ -1,9 +1,10 @@
package org.lucares.pdb.datastore.lang;
import org.apache.commons.lang3.StringUtils;
import org.lucares.pdb.api.StringCompressor;
public class QueryLanguageParser {
public static Expression parse(final String query) {
public static Expression parse(final String query, final StringCompressor stringCompressor) {
final Expression result;
if (StringUtils.isEmpty(query)) {
@@ -12,7 +13,7 @@ public class QueryLanguageParser {
final QueryLanguage lang = new QueryLanguage();
result = lang.parse(query);
}
ExpressionValidationVisitor.validate(result);
ExpressionValidationVisitor.validate(result, stringCompressor);
return result;
}
}

View File

@@ -23,6 +23,7 @@ import javax.swing.JTextArea;
import javax.swing.JTextField;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
@@ -36,7 +37,6 @@ import org.lucares.pdb.api.Tags;
import org.lucares.pdb.blockstorage.BSFile;
import org.lucares.pdb.datastore.Doc;
import org.lucares.pdb.datastore.Proposal;
import org.junit.jupiter.api.Assertions;
import org.lucares.utils.CollectionUtils;
import org.lucares.utils.DateUtils;
import org.lucares.utils.file.FileUtils;
@@ -66,11 +66,13 @@ public class DataStoreTest {
final DateTimeRange dateRange = DateTimeRange.relativeHours(1);
final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0);
final Tags eagleTim = Tags.createAndAddToDictionary("bird", "eagle", "name", "Tim");
final Tags pigeonJennifer = Tags.createAndAddToDictionary("bird", "pigeon", "name", "Jennifer");
final Tags flamingoJennifer = Tags.createAndAddToDictionary("bird", "flamingo", "name", "Jennifer");
final Tags labradorJenny = Tags.createAndAddToDictionary("dog", "labrador", "name", "Jenny");
final Tags labradorTim = Tags.createAndAddToDictionary("dog", "labrador", "name", "Tim");
final Tags eagleTim = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "eagle", "name", "Tim");
final Tags pigeonJennifer = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "pigeon", "name",
"Jennifer");
final Tags flamingoJennifer = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "flamingo", "name",
"Jennifer");
final Tags labradorJenny = Tags.STRING_COMPRESSOR.createAndAddToDictionary("dog", "labrador", "name", "Jenny");
final Tags labradorTim = Tags.STRING_COMPRESSOR.createAndAddToDictionary("dog", "labrador", "name", "Tim");
tagsToBlockStorageRootBlockNumber = new HashMap<>();
tagsToBlockStorageRootBlockNumber.put(eagleTim, dataStore.createNewFile(partitionId, eagleTim));
@@ -115,8 +117,10 @@ public class DataStoreTest {
dataStore = new DataStore(dataDirectory);
tagsToBlockStorageRootBlockNumber = new LinkedHashMap<>();
final Tags pigeonJennifer = Tags.createAndAddToDictionary("bird", "pigeon", "name", "Jennifer");
final Tags flamingoJennifer = Tags.createAndAddToDictionary("bird", "flamingo", "name", "Jennifer");
final Tags pigeonJennifer = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "pigeon", "name",
"Jennifer");
final Tags flamingoJennifer = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "flamingo", "name",
"Jennifer");
final ParititionId partitionId = new ParititionId("partitionA");
tagsToBlockStorageRootBlockNumber.put(pigeonJennifer, dataStore.createNewFile(partitionId, pigeonJennifer));
@@ -130,7 +134,7 @@ public class DataStoreTest {
public void testBlockAlignment() throws IOException {
dataStore = new DataStore(dataDirectory);
final Tags eagleTim = Tags.createAndAddToDictionary("bird", "eagle", "name", "Tim");
final Tags eagleTim = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "eagle", "name", "Tim");
final long eagleTimBlockOffset = dataStore.createNewFile(new ParititionId("partitionA"), eagleTim);
Assertions.assertEquals(0, eagleTimBlockOffset % BSFile.BLOCK_SIZE);
}
@@ -183,18 +187,28 @@ public class DataStoreTest {
final DateTimeRange dateRange = DateTimeRange.relativeHours(1);
final List<Tags> tags = Arrays.asList(
Tags.createAndAddToDictionary("type", "bird", "subtype", "eagle", "age", "three", "name", "Tim"),
Tags.createAndAddToDictionary("type", "bird", "subtype", "pigeon", "age", "two", "name", "Jennifer"),
Tags.createAndAddToDictionary("type", "bird", "subtype", "flamingo", "age", "one", "name", "Jennifer"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "bird", "subtype", "eagle", "age", "three",
"name", "Tim"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "bird", "subtype", "pigeon", "age", "two",
"name", "Jennifer"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "bird", "subtype", "flamingo", "age", "one",
"name", "Jennifer"),
Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", "Jenny"),
Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", "Tim"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three",
"name", "Jenny"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three",
"name", "Tim"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "one", "name", "Timothy"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "two", "name", "Paul"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "three", "name", "Jane"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "Sam"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "John"));
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "one", "name",
"Timothy"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "two", "name",
"Paul"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "three",
"name", "Jane"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name",
"Sam"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name",
"John"));
tags.forEach(t -> dataStore.createNewFile(partitionId, t));
@@ -208,7 +222,7 @@ public class DataStoreTest {
final long timestamp = DateUtils.getDate(2016, 1, 1, 13, 1, 1).toInstant().toEpochMilli();
final Tags tags = Tags.createAndAddToDictionary("myKey", "myValue");
final Tags tags = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "myValue");
dataStore.write(timestamp, tags, 1);
dataStore.write(timestamp, tags, 2);
@@ -222,21 +236,28 @@ public class DataStoreTest {
try (final DataStore dataStore = new DataStore(dir)) {
final List<Tags> tags = Arrays.asList(
Tags.createAndAddToDictionary("type", "bird", "subtype", "eagle", "age", "three", "name", "Tim"),
Tags.createAndAddToDictionary("type", "bird", "subtype", "pigeon", "age", "two", "name",
"Jennifer"),
Tags.createAndAddToDictionary("type", "bird", "subtype", "flamingo", "age", "one", "name",
"Jennifer"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "bird", "subtype", "eagle", "age", "three",
"name", "Tim"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "bird", "subtype", "pigeon", "age", "two",
"name", "Jennifer"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "bird", "subtype", "flamingo", "age", "one",
"name", "Jennifer"),
Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name",
"Jenny"),
Tags.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age", "three", "name", "Tim"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age",
"three", "name", "Jenny"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "dog", "subtype", "labrador", "age",
"three", "name", "Tim"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "one", "name", "Timothy"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "two", "name", "Paul"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "three", "name", "Jane"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "Sam"),
Tags.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four", "name", "John"));
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "one",
"name", "Timothy"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "tiger", "age", "two",
"name", "Paul"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "three",
"name", "Jane"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four",
"name", "Sam"),
Tags.STRING_COMPRESSOR.createAndAddToDictionary("type", "cat", "subtype", "lion", "age", "four",
"name", "John"));
final DateTimeRange dateRange = DateTimeRange.relativeMillis(0);
final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0);

View File

@@ -8,6 +8,7 @@ import java.util.Collections;
import java.util.List;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.lucares.pdb.api.DateTimeRange;
@@ -15,7 +16,6 @@ import org.lucares.pdb.api.QueryWithCaretMarker;
import org.lucares.pdb.api.QueryWithCaretMarker.ResultMode;
import org.lucares.pdb.api.Tags;
import org.lucares.pdb.datastore.Proposal;
import org.junit.jupiter.api.Assertions;
import org.lucares.utils.CollectionUtils;
import org.lucares.utils.file.FileUtils;
@@ -44,18 +44,23 @@ public class ProposerTest {
dateRange = DateTimeRange.now();
final ParititionId now = DateIndexExtension.toPartitionIds(dateRange).get(0);
final Tags eagleTim = Tags.createAndAddToDictionary("bird", "eagle", "name", "Tim");
final Tags eagleTimothy = Tags.createAndAddToDictionary("bird", "eagle", "name", "Timothy");
final Tags pigeonJennifer = Tags.createAndAddToDictionary("bird", "pigeon", "name", "Jennifer");
final Tags flamingoJennifer = Tags.createAndAddToDictionary("bird", "flamingo", "name", "Jennifer");
final Tags labradorJenny = Tags.createAndAddToDictionary("dog", "labrador", "name", "Jenny");
final Tags labradorTim = Tags.createAndAddToDictionary("dog", "labrador", "name", "Tim");
final Tags eagleTim = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "eagle", "name", "Tim");
final Tags eagleTimothy = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "eagle", "name", "Timothy");
final Tags pigeonJennifer = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "pigeon", "name",
"Jennifer");
final Tags flamingoJennifer = Tags.STRING_COMPRESSOR.createAndAddToDictionary("bird", "flamingo", "name",
"Jennifer");
final Tags labradorJenny = Tags.STRING_COMPRESSOR.createAndAddToDictionary("dog", "labrador", "name", "Jenny");
final Tags labradorTim = Tags.STRING_COMPRESSOR.createAndAddToDictionary("dog", "labrador", "name", "Tim");
final Tags methodA = Tags.createAndAddToDictionary("method", "FooController.doImportantStuff", "source", "web");
final Tags methodB = Tags.createAndAddToDictionary("method", "FooService.doImportantStuff", "source",
"service");
final Tags methodC = Tags.createAndAddToDictionary("method", "BarController.doBoringStuff", "source", "web");
final Tags methodD = Tags.createAndAddToDictionary("method", "FooBarService.doOtherStuff", "source", "service");
final Tags methodA = Tags.STRING_COMPRESSOR.createAndAddToDictionary("method", "FooController.doImportantStuff",
"source", "web");
final Tags methodB = Tags.STRING_COMPRESSOR.createAndAddToDictionary("method", "FooService.doImportantStuff",
"source", "service");
final Tags methodC = Tags.STRING_COMPRESSOR.createAndAddToDictionary("method", "BarController.doBoringStuff",
"source", "web");
final Tags methodD = Tags.STRING_COMPRESSOR.createAndAddToDictionary("method", "FooBarService.doOtherStuff",
"source", "service");
dataStore.createNewFile(now, eagleTim);
dataStore.createNewFile(now, eagleTimothy);

View File

@@ -35,18 +35,22 @@ public class QueryCompletionIndexTest {
@Test
public void test() throws Exception {
Tags.STRING_COMPRESSOR = new StringCompressor(new UniqueStringIntegerPairs());
final StringCompressor stringCompressor = new StringCompressor(new UniqueStringIntegerPairs());
Tags.STRING_COMPRESSOR = stringCompressor;
final List<Tags> tags = Arrays.asList(//
Tags.createAndAddToDictionary("firstname", "John", "lastname", "Doe", "country", "Atlantis"), // A
Tags.createAndAddToDictionary("firstname", "Jane", "lastname", "Doe", "country", "ElDorado"), // B
Tags.createAndAddToDictionary("firstname", "John", "lastname", "Miller", "country", "Atlantis")// C
stringCompressor.createAndAddToDictionary("firstname", "John", "lastname", "Doe", "country",
"Atlantis"), // A
stringCompressor.createAndAddToDictionary("firstname", "Jane", "lastname", "Doe", "country",
"ElDorado"), // B
stringCompressor.createAndAddToDictionary("firstname", "John", "lastname", "Miller", "country",
"Atlantis")// C
);
final DateTimeRange dateRange = DateTimeRange.relativeMillis(1);
final ParititionId partitionId = DateIndexExtension.toPartitionIds(dateRange).get(0);
try (QueryCompletionIndex index = new QueryCompletionIndex(dataDirectory)) {
try (QueryCompletionIndex index = new QueryCompletionIndex(dataDirectory, stringCompressor)) {
for (final Tags t : tags) {
index.addTags(partitionId, t);
}
@@ -54,13 +58,13 @@ public class QueryCompletionIndexTest {
// all firstnames where lastname=Doe are returned sorted alphabetically.
// tags A and B match
final SortedSet<String> firstnamesWithLastnameDoe = index.find(dateRange,
Tags.STRING_COMPRESSOR.createTag("lastname", "Doe"), "firstname");
stringCompressor.createTag("lastname", "Doe"), "firstname");
Assertions.assertEquals(new TreeSet<>(Set.of("Jane", "John")), firstnamesWithLastnameDoe);
// no duplicates are returned:
// tags A and C match firstname=John, but both have country=Atlantis
final SortedSet<String> countryWithFirstnameJohn = index.find(dateRange,
Tags.STRING_COMPRESSOR.createTag("firstname", "John"), "country");
stringCompressor.createTag("firstname", "John"), "country");
Assertions.assertEquals(new TreeSet<>(Arrays.asList("Atlantis")), countryWithFirstnameJohn);
// findAllValuesForField sorts alphabetically

Binary file not shown.

View File

@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.2-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

6
gradlew vendored
View File

@@ -205,6 +205,12 @@ set -- \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.

10
gradlew.bat vendored
View File

@@ -40,7 +40,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
if %ERRORLEVEL% equ 0 goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
@@ -75,13 +75,15 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal

View File

@@ -1,5 +1,6 @@
dependencies {
implementation project(':pdb-utils')
implementation project(':block-storage')
implementation lib_primitive_collections
}

View File

@@ -1,8 +1,5 @@
package org.lucares.pdb.api;
import java.util.ArrayList;
import java.util.List;
public class Query {
private final String query;
@@ -42,25 +39,6 @@ public class Query {
return new Query(query, dateRange);
}
public static Query createQuery(final Tags tags, final DateTimeRange dateRange) {
final List<String> terms = new ArrayList<>();
for (final String key : tags.getKeys()) {
final String value = tags.getValue(key);
final StringBuilder term = new StringBuilder();
term.append(key);
term.append("=");
term.append(value);
term.append(" ");
terms.add(term.toString());
}
return new Query(String.join(" and ", terms), dateRange);
}
public String getQuery() {
return query;
}

View File

@@ -8,39 +8,52 @@ import java.util.function.Function;
*/
public class StringCompressor {
public static final String ALL_DOCS_KEY = "\ue001allDocs"; // \ue001 is the second character in the private use
// area
private static final String DEFAULT_GROUP = "<none>";
public static Tag TAG_ALL_DOCS;
private final UniqueStringIntegerPairs usip;
public StringCompressor(final UniqueStringIntegerPairs usip) throws RuntimeIOException {
this.usip = usip;
}
public static StringCompressor create(final Path path) {
final UniqueStringIntegerPairs mapsi = new UniqueStringIntegerPairs(path);
return new StringCompressor(mapsi);
public static StringCompressor create(final Path storageBasePath) {
final UniqueStringIntegerPairs mapsi = new UniqueStringIntegerPairs(storageBasePath);
final StringCompressor result = new StringCompressor(mapsi);
result.putString(ALL_DOCS_KEY);
result.putString("");
TAG_ALL_DOCS = result.createTag(ALL_DOCS_KEY, "");
return result;
}
public int put(final String string) {
public int putString(final String string) {
return usip.computeIfAbsent(string, s -> usip.getHighestInteger() + 1);
return usip.computeIfAbsent(string);
}
public int put(final byte[] bytes, final int start, final int endExclusive,
public int putStringFromBytes(final byte[] bytes, final int start, final int endExclusive,
final Function<String, String> postProcess) {
return usip.computeIfAbsent(bytes, start, endExclusive, postProcess);
}
public int put(final String value, final Function<String, String> postProcess) {
public int putString(final String value, final Function<String, String> postProcess) {
final String processedValue = postProcess.apply(value);
return usip.computeIfAbsentWithPostprocess(processedValue, postProcess);
return usip.computeIfAbsent(processedValue);
}
public String get(final int integer) {
public String getString(final int integer) {
return usip.getKey(integer);
return usip.getString(integer);
}
public int getIfPresent(final String string) {
final Integer integer = usip.get(string);
final Integer integer = usip.getInt(string);
return integer != null ? integer : -1;
}
@@ -51,17 +64,104 @@ public class StringCompressor {
* @param value the value
*/
public Tag createTag(final String field, final String value) {
final int f = field != null ? Tags.STRING_COMPRESSOR.getIfPresent(field) : -1;
final int v = value != null ? Tags.STRING_COMPRESSOR.getIfPresent(value) : -1;
final int f = field != null ? getIfPresent(field) : -1;
final int v = value != null ? getIfPresent(value) : -1;
return new Tag(f, v);
}
public String getKeyAsString(final Tag tag) {
return get(tag.getKey());
return getString(tag.getKey());
}
public String getValueAsString(final Tag tag) {
return get(tag.getValue());
return getString(tag.getValue());
}
public Tags createAndAddToDictionary(final String key, final String value) {
final int keyAsInt = putString(key);
final int valueAsInt = putString(value);
return TagsBuilder.create().add(keyAsInt, valueAsInt).build();
}
public Tags createAndAddToDictionary(final String key1, final String value1, final String key2,
final String value2) {
final int key1AsInt = putString(key1);
final int value1AsInt = putString(value1);
final int key2AsInt = putString(key2);
final int value2AsInt = putString(value2);
final Tags result = TagsBuilder.create().add(key1AsInt, value1AsInt).add(key2AsInt, value2AsInt).build();
return result;
}
public Tags createAndAddToDictionary(final String key1, final String value1, final String key2, final String value2,
final String key3, final String value3) {
final int key1AsInt = putString(key1);
final int value1AsInt = putString(value1);
final int key2AsInt = putString(key2);
final int value2AsInt = putString(value2);
final int key3AsInt = putString(key3);
final int value3AsInt = putString(value3);
final Tags result = TagsBuilder.create().add(key1AsInt, value1AsInt).add(key2AsInt, value2AsInt)
.add(key3AsInt, value3AsInt).build();
return result;
}
public Tags createAndAddToDictionary(final String key1, final String value1, final String key2, final String value2,
final String key3, final String value3, final String key4, final String value4) {
final int key1AsInt = putString(key1);
final int value1AsInt = putString(value1);
final int key2AsInt = putString(key2);
final int value2AsInt = putString(value2);
final int key3AsInt = putString(key3);
final int value3AsInt = putString(value3);
final int key4AsInt = putString(key4);
final int value4AsInt = putString(value4);
final Tags result = TagsBuilder.create().add(key1AsInt, value1AsInt).add(key2AsInt, value2AsInt)
.add(key3AsInt, value3AsInt).add(key4AsInt, value4AsInt).build();
return result;
}
public String asValueString(final Tags tags) {
final StringBuilder result = new StringBuilder();
if (tags.isEmpty()) {
result.append(DEFAULT_GROUP);
} else {
for (final Tag tag : tags.toTags()) {
final String value = getString(tag.getValue());
if (result.length() > 0) {
result.append(" / ");
}
result.append(value);
}
}
return result.toString();
}
public String asString(final Tags tags) {
final StringBuilder result = new StringBuilder();
for (final Tag tag : tags.toTags()) {
if (result.length() > 0) {
result.append(", ");
}
result.append(getString(tag.getKey()));
result.append("=");
result.append(getString(tag.getValue()));
}
return result.toString();
}
}

View File

@@ -6,13 +6,14 @@ package org.lucares.pdb.api;
* 'Sam' is the value.
*/
public class Tag implements Comparable<Tag> {
private final int field;
private final int value;
/**
* Create a new tag with field and value specified as int. See
* {@link Tags#STRING_COMPRESSOR} for the mapping between Strings and ints.
* {@link StringCompressor} for the mapping between Strings and ints.
*
* @param field the field as int
* @param value the value as int
@@ -42,6 +43,10 @@ public class Tag implements Comparable<Tag> {
return value;
}
public Tag unsetValue() {
return new Tag(field, -1);
}
@Override
public String toString() {
return field + "=" + value;

View File

@@ -7,7 +7,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.function.BiConsumer;
import java.util.function.Function;
import org.lucares.collections.IntList;
@@ -16,7 +15,6 @@ import org.lucares.utils.byteencoder.VariableByteEncoder;
public class Tags implements Comparable<Tags> {
private static final String DEFAULT_GROUP = "<none>";
public static StringCompressor STRING_COMPRESSOR = null;
private static final byte[] EMPTY_BYTES = new byte[0];
public static final Tags EMPTY = new Tags();
@@ -60,33 +58,6 @@ public class Tags implements Comparable<Tags> {
return result;
}
public static Tags createAndAddToDictionary(final String key, final String value) {
return TagsBuilder.create().addAndAddToDictionary(key, value).build();
}
public static Tags createAndAddToDictionary(final String key1, final String value1, final String key2,
final String value2) {
final Tags result = TagsBuilder.create().addAndAddToDictionary(key1, value1).addAndAddToDictionary(key2, value2)
.build();
return result;
}
public static Tags createAndAddToDictionary(final String key1, final String value1, final String key2,
final String value2, final String key3, final String value3) {
final Tags result = TagsBuilder.create().addAndAddToDictionary(key1, value1).addAndAddToDictionary(key2, value2)
.addAndAddToDictionary(key3, value3).build();
return result;
}
public static Tags createAndAddToDictionary(final String key1, final String value1, final String key2,
final String value2, final String key3, final String value3, final String key4, final String value4) {
final Tags result = TagsBuilder.create().addAndAddToDictionary(key1, value1).addAndAddToDictionary(key2, value2)
.addAndAddToDictionary(key3, value3).addAndAddToDictionary(key4, value4).build();
return result;
}
public static Tags fromBytes(final byte[] bytes) {
final List<Tag> result = new ArrayList<>();
@@ -157,18 +128,18 @@ public class Tags implements Comparable<Tags> {
}
public String getValue(final String key) {
final Tag needle = new Tag(STRING_COMPRESSOR.put(key), 0);
final Tag needle = new Tag(STRING_COMPRESSOR.putString(key), 0);
final int index = Collections.binarySearch(tags, needle, TagByKeyComparator.INSTANCE);
if (index >= 0) {
final Tag tag = tags.get(index);
return STRING_COMPRESSOR.get(tag.getValue());
return STRING_COMPRESSOR.getString(tag.getValue());
}
return null;
}
public int getValueAsInt(final String key) {
final Tag needle = new Tag(STRING_COMPRESSOR.put(key), 0);
final Tag needle = new Tag(STRING_COMPRESSOR.putString(key), 0);
final int index = Collections.binarySearch(tags, needle, TagByKeyComparator.INSTANCE);
if (index >= 0) {
@@ -181,7 +152,7 @@ public class Tags implements Comparable<Tags> {
public Set<String> getKeys() {
final TreeSet<String> result = new TreeSet<>();
for (final Tag tag : tags) {
result.add(STRING_COMPRESSOR.get(tag.getKey()));
result.add(STRING_COMPRESSOR.getString(tag.getKey()));
}
return result;
}
@@ -202,15 +173,6 @@ public class Tags implements Comparable<Tags> {
return tags;
}
public void forEach(final BiConsumer<String, String> keyValueConsumer) {
for (final Tag tag : tags) {
final String key = STRING_COMPRESSOR.get(tag.getKey());
final String value = STRING_COMPRESSOR.get(tag.getValue());
keyValueConsumer.accept(key, value);
}
}
public Tags mapTags(final Function<Tag, Tag> tagMapFuntion) {
final List<Tag> mappedTags = new ArrayList<>(tags.size());
for (final Tag tag : tags) {
@@ -270,39 +232,4 @@ public class Tags implements Comparable<Tags> {
public boolean isEmpty() {
return tags.isEmpty();
}
/**
* @return User facing readable representation
*/
public String asString() {
final StringBuilder result = new StringBuilder();
for (final Tag tag : tags) {
if (result.length() > 0) {
result.append(", ");
}
result.append(STRING_COMPRESSOR.get(tag.getKey()));
result.append("=");
result.append(STRING_COMPRESSOR.get(tag.getValue()));
}
return result.toString();
}
public String asValueString() {
final StringBuilder result = new StringBuilder();
if (isEmpty()) {
result.append(DEFAULT_GROUP);
} else {
forEach((k, v) -> {
if (result.length() > 0) {
result.append(" / ");
}
result.append(v);
});
}
return result.toString();
}
}

View File

@@ -25,16 +25,9 @@ public class TagsBuilder {
return this;
}
public TagsBuilder add(final String key, final String value) {
final int keyAsInt = Tags.STRING_COMPRESSOR.getIfPresent(key);
final int valueAsInt = Tags.STRING_COMPRESSOR.getIfPresent(value);
return add(keyAsInt, valueAsInt);
}
public TagsBuilder addAndAddToDictionary(final String key, final String value) {
final int keyAsInt = Tags.STRING_COMPRESSOR.put(key);
final int valueAsInt = Tags.STRING_COMPRESSOR.put(value);
return add(keyAsInt, valueAsInt);
public TagsBuilder add(final Tag tag) {
tags.add(tag);
return this;
}
public Tags build() {

View File

@@ -2,11 +2,8 @@ package org.lucares.pdb.api;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -19,6 +16,8 @@ import java.util.Set;
import java.util.function.Function;
import java.util.regex.Pattern;
import org.lucares.pdb.map.PersistentMap;
/**
* A very simple {@link Set}-like or {@link Map}-like data structure that stores
* unique&sup1; pairs of Strings and integers persistently.
@@ -33,8 +32,6 @@ import java.util.regex.Pattern;
public class UniqueStringIntegerPairs {
private static final String SEPARATOR = "\t";
private static final boolean APPEND = true;
private static final class ByteArray implements Comparable<ByteArray> {
private final byte[] array;
private final int start;
@@ -97,33 +94,50 @@ public class UniqueStringIntegerPairs {
*/
private final List<String> intToString = new ArrayList<>();
private final Path file;
final PersistentMap<String, Long> persistentMap;
public UniqueStringIntegerPairs() {
this(null);
}
public UniqueStringIntegerPairs(final Path file) {
this.file = file;
if (file != null) {
init(file);
public UniqueStringIntegerPairs(final Path storageBasePath) {
if (storageBasePath != null) {
persistentMap = new PersistentMap<>(storageBasePath.resolve("keys.bs"), storageBasePath,
PersistentMap.STRING_CODER, PersistentMap.LONG_CODER);
final Path oldKeysCsvFile = keyCompressionFile(storageBasePath);
if (persistentMap.isEmpty() && Files.exists(oldKeysCsvFile)) {
upgradeFromCsvFile(oldKeysCsvFile);
} else {
init();
}
} else {
// some unit tests disable the persistence and use this class memory only
persistentMap = null;
}
}
private void init(final Path file) throws RuntimeIOException {
private void init() {
persistentMap.forAll((string, integer) -> {
intToStringPut(integer.intValue(), string);
stringToInt.put(string, integer.intValue());
bytesToInt.put(new ByteArray(string), integer.intValue());
});
}
private Path keyCompressionFile(final Path dataDirectory) {
return dataDirectory.resolve("keys.csv");
}
private void upgradeFromCsvFile(final Path file) throws RuntimeIOException {
try {
Files.createDirectories(file.getParent());
if (!Files.exists(file)) {
Files.createFile(file);
}
try (final BufferedReader reader = new BufferedReader(
new InputStreamReader(new FileInputStream(file.toFile()), StandardCharsets.UTF_8))) {
String line;
while ((line = reader.readLine()) != null) {
// TODO use more efficient code to read the CSV -> improves startup time
final String[] tokens = line.split(Pattern.quote(SEPARATOR));
if (tokens.length == 2) {
@@ -132,6 +146,7 @@ public class UniqueStringIntegerPairs {
intToStringPut(integer, string);
stringToInt.put(string, integer);
bytesToInt.put(new ByteArray(string), integer);
persistentMap.putValue(string, (long) integer);
}
}
}
@@ -150,20 +165,13 @@ public class UniqueStringIntegerPairs {
intToString.set(value, string);
}
void put(final String string, final int integer) {
void putStringAndInteger(final String string, final int integer) {
if (stringToInt.containsKey(string) || (intToString.size() > integer && intToString.get(integer) != null)) {
throw new IllegalArgumentException("Unique key constraint violation for (" + string + ", " + integer + ")");
}
if (file != null) {
try (final Writer writer = new OutputStreamWriter(new FileOutputStream(file.toFile(), APPEND),
StandardCharsets.UTF_8)) {
writer.write(string + SEPARATOR + integer + "\n");
} catch (final IOException e) {
throw new RuntimeIOException(e);
}
if (persistentMap != null) {
persistentMap.putValue(string, (long) integer);
}
intToStringPut(integer, string);
@@ -171,16 +179,16 @@ public class UniqueStringIntegerPairs {
bytesToInt.put(new ByteArray(string), integer);
}
public Integer get(final String string) {
public Integer getInt(final String string) {
return stringToInt.get(string);
}
public String getKey(final int second) {
public String getString(final int second) {
return intToString.get(second);
}
public Integer getHighestInteger() {
Integer getHighestInteger() {
return intToString.size() == 0 ? -1 : intToString.size() - 1;
}
@@ -189,7 +197,7 @@ public class UniqueStringIntegerPairs {
synchronized (stringToInt) {
if (!stringToInt.containsKey(string)) {
final Integer second = mappingFunction.apply(string);
put(string, second);
putStringAndInteger(string, second);
}
}
}
@@ -197,6 +205,17 @@ public class UniqueStringIntegerPairs {
return stringToInt.get(string);
}
public Integer computeIfAbsent(final String string) {
if (!stringToInt.containsKey(string)) {
synchronized (stringToInt) {
final Integer integer = intToString.size();
putStringAndInteger(string, integer);
}
}
return stringToInt.get(string);
}
public Integer computeIfAbsent(final byte[] bytes, final int start, final int endExclusive,
final Function<String, String> postProcess) {
@@ -204,33 +223,9 @@ public class UniqueStringIntegerPairs {
Integer result = bytesToInt.get(byteArray);
if (result == null) {
final String string = new String(bytes, start, endExclusive - start, StandardCharsets.UTF_8);
result = computeIfAbsentWithPostprocess(string, postProcess);
final String postProcessed = postProcess.apply(string);
result = computeIfAbsent(postProcessed);
}
return result;
}
public Integer computeIfAbsentWithPostprocess(final String string, final Function<String, String> postProcess) {
final ByteArray byteArray = new ByteArray(string);
Integer result = bytesToInt.get(byteArray);
if (result == null) {
synchronized (stringToInt) {
if (!bytesToInt.containsKey(byteArray)) {
final String normalizedString = postProcess.apply(string);
result = get(normalizedString);
if (result != null) {
return result;
}
final Integer integer = intToString.size();
put(normalizedString, integer); // adds the normalized String to stringToInt and bytesToInt
bytesToInt.put(byteArray, integer); // also add the original String to bytesToInt, because it is
// used as cache
}
result = bytesToInt.get(byteArray);
}
}
return result;
}
}

View File

@@ -7,6 +7,7 @@ import java.util.LinkedHashMap;
import java.util.Map;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.api.Tag;
import org.lucares.pdb.api.Tags;
import org.lucares.pdb.api.TagsBuilder;
import org.lucares.pdb.api.UniqueStringIntegerPairs;
@@ -72,30 +73,31 @@ public class MemoryScale {
}
}
private static Object createTag() {
private static Tag createTag() {
return Tags.STRING_COMPRESSOR.createTag("", "");
}
private static Object createTags0() {
private static Tags createTags0() {
return new Tags();
}
private static Object createTags1() {
return Tags.createAndAddToDictionary("k1", "v1");
private static Tags createTags1() {
return Tags.STRING_COMPRESSOR.createAndAddToDictionary("k1", "v1");
}
private static Object createTags2() {
return Tags.createAndAddToDictionary("k1", "v1", "k2", "v2");
private static Tags createTags2() {
return Tags.STRING_COMPRESSOR.createAndAddToDictionary("k1", "v1", "k2", "v2");
}
private static Object createTags6() {
TagsBuilder result = TagsBuilder.create();
result = result.add("k1", "v1");
result = result.add("k2", "v2");
result = result.add("k3", "v3");
result = result.add("k4", "v4");
result = result.add("k5", "v5");
result = result.add("k6", "v6");
private static Tags createTags6() {
final TagsBuilder result = TagsBuilder.create();
result.add(Tags.STRING_COMPRESSOR.createTag("k1", "v1"));
result.add(Tags.STRING_COMPRESSOR.createTag("k2", "v2"));
result.add(Tags.STRING_COMPRESSOR.createTag("k3", "v3"));
result.add(Tags.STRING_COMPRESSOR.createTag("k4", "v4"));
result.add(Tags.STRING_COMPRESSOR.createTag("k5", "v5"));
result.add(Tags.STRING_COMPRESSOR.createTag("k6", "v6"));
return result.build();
}

View File

@@ -11,9 +11,9 @@ import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Assertions;
import org.lucares.utils.file.FileUtils;
public class StringCompressorTest {
@@ -34,8 +34,8 @@ public class StringCompressorTest {
final StringCompressor keyValueCompressor = StringCompressor.create(dataDirectory.resolve("key.csv"));
final String value = "foo";
final Integer intFoo = keyValueCompressor.put(value);
final String actual = keyValueCompressor.get(intFoo);
final Integer intFoo = keyValueCompressor.putString(value);
final String actual = keyValueCompressor.getString(intFoo);
Assertions.assertEquals(value, actual);
}
@@ -47,12 +47,12 @@ public class StringCompressorTest {
{
final StringCompressor keyValueCompressor = StringCompressor.create(database);
keyValueCompressor.put(value);
keyValueCompressor.putString(value);
}
{
final StringCompressor keyValueCompressor = StringCompressor.create(database);
keyValueCompressor.get(0);
keyValueCompressor.getString(0);
}
}

View File

@@ -21,7 +21,7 @@ final class StringInserter implements Callable<List<String>> {
final List<String> result = new ArrayList<>();
for (int i = 0; i < numEntries; i++) {
final String s = UUID.randomUUID().toString();
stringCompressor.put(s);
stringCompressor.putString(s);
result.add(s);
}
return result;

View File

@@ -5,9 +5,9 @@ import java.nio.file.Files;
import java.nio.file.Path;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Assertions;
import org.lucares.utils.file.FileUtils;
public class UniqueStringIntegerPairsTest {
@@ -33,16 +33,16 @@ public class UniqueStringIntegerPairsTest {
{
final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database);
usip.put(first, second);
Assertions.assertEquals(second, usip.get(first));
Assertions.assertEquals(first, usip.getKey(second));
usip.putStringAndInteger(first, second);
Assertions.assertEquals(second, usip.getInt(first));
Assertions.assertEquals(first, usip.getString(second));
}
{
final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database);
Assertions.assertEquals(second, usip.get(first));
Assertions.assertEquals(first, usip.getKey(second));
Assertions.assertEquals(second, usip.getInt(first));
Assertions.assertEquals(first, usip.getString(second));
}
}
@@ -53,11 +53,11 @@ public class UniqueStringIntegerPairsTest {
final Integer second = 1;
final UniqueStringIntegerPairs usip = new UniqueStringIntegerPairs(database);
usip.put(first, second);
usip.putStringAndInteger(first, second);
try {
// cannot add another pair with the first key
final int another = second + 1;
usip.put(first, another);
usip.putStringAndInteger(first, another);
Assertions.fail("expected an IllegalArgumentException");
} catch (final IllegalArgumentException e) {
// expected
@@ -66,7 +66,7 @@ public class UniqueStringIntegerPairsTest {
try {
// cannot add another pair with the same second value
final String another = first + 1;
usip.put(another, second);
usip.putStringAndInteger(another, second);
Assertions.fail("expected an IllegalArgumentException");
} catch (final IllegalArgumentException e) {
// expected

View File

@@ -2,11 +2,15 @@
# For additional information regarding the format and rule options, please see:
# https://github.com/browserslist/browserslist#queries
# For the full list of supported browsers by the Angular framework, please see:
# https://angular.io/guide/browser-support
# You can see what browsers were selected by your queries by running:
# npx browserslist
> 0.5%
last 2 versions
last 1 Chrome version
last 1 Firefox version
last 2 Edge major versions
last 2 Safari major versions
last 2 iOS major versions
Firefox ESR
not dead
not IE 9-11 # For IE 9-11 support, remove 'not'.

View File

@@ -8,6 +8,9 @@ indent_size = 2
insert_final_newline = true
trim_trailing_whitespace = true
[*.ts]
quote_type = single
[*.md]
max_line_length = off
trim_trailing_whitespace = false

5
pdb-js/.gitignore vendored
View File

@@ -1,6 +1,7 @@
# See http://help.github.com/ignore-files/ for more about ignoring files.
# compiled output
/dist
/build
/bin
/tmp
@@ -14,7 +15,6 @@
# profiling files
chrome-profiler-events*.json
speed-measure-plugin*.json
# IDEs and editors
/.idea
@@ -34,6 +34,7 @@ speed-measure-plugin*.json
.history/*
# misc
/.angular/cache
/.sass-cache
/connect.lock
/coverage
@@ -46,4 +47,6 @@ testem.log
# System Files
.DS_Store
Thumbs.db
#
/env.sh

4
pdb-js/.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,4 @@
{
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=827846
"recommendations": ["angular.ng-template"]
}

20
pdb-js/.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,20 @@
{
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "ng serve",
"type": "pwa-chrome",
"request": "launch",
"preLaunchTask": "npm: start",
"url": "http://localhost:4200/"
},
{
"name": "ng test",
"type": "chrome",
"request": "launch",
"preLaunchTask": "npm: test",
"url": "http://localhost:9876/debug.html"
}
]
}

42
pdb-js/.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,42 @@
{
// For more information, visit: https://go.microsoft.com/fwlink/?LinkId=733558
"version": "2.0.0",
"tasks": [
{
"type": "npm",
"script": "start",
"isBackground": true,
"problemMatcher": {
"owner": "typescript",
"pattern": "$tsc",
"background": {
"activeOnStart": true,
"beginsPattern": {
"regexp": "(.*?)"
},
"endsPattern": {
"regexp": "bundle generation complete"
}
}
}
},
{
"type": "npm",
"script": "test",
"isBackground": true,
"problemMatcher": {
"owner": "typescript",
"pattern": "$tsc",
"background": {
"activeOnStart": true,
"beginsPattern": {
"regexp": "(.*?)"
},
"endsPattern": {
"regexp": "bundle generation complete"
}
}
}
}
]
}

View File

@@ -3,16 +3,19 @@
"version": 1,
"newProjectRoot": "projects",
"projects": {
"pdb": {
"pdb-js": {
"projectType": "application",
"schematics": {
"@schematics/angular:component": {
"style": "scss"
},
"@schematics/angular:application": {
"strict": true
}
},
"root": "",
"sourceRoot": "src",
"prefix": "pdb",
"prefix": "app",
"architect": {
"build": {
"builder": "@angular-devkit/build-angular:browser",
@@ -22,69 +25,68 @@
"main": "src/main.ts",
"polyfills": "src/polyfills.ts",
"tsConfig": "tsconfig.app.json",
"inlineStyleLanguage": "scss",
"assets": [
"src/favicon.ico",
"src/assets"
],
"styles": [
"src/styles.scss",
"src/custom-theme.scss"
"src/styles.scss"
],
"scripts": [],
"vendorChunk": true,
"extractLicenses": false,
"buildOptimizer": false,
"sourceMap": true,
"optimization": false,
"namedChunks": true
"scripts": []
},
"configurations": {
"production": {
"budgets": [
{
"type": "initial",
"maximumWarning": "500kb",
"maximumError": "2mb"
},
{
"type": "anyComponentStyle",
"maximumWarning": "2kb",
"maximumError": "4kb"
}
],
"fileReplacements": [
{
"replace": "src/environments/environment.ts",
"with": "src/environments/environment.prod.ts"
}
],
"optimization": true,
"outputHashing": "all",
"sourceMap": false,
"namedChunks": false,
"extractLicenses": true,
"vendorChunk": false,
"buildOptimizer": true,
"budgets": [
{
"type": "initial",
"maximumWarning": "2mb",
"maximumError": "5mb"
"outputHashing": "all"
},
{
"type": "anyComponentStyle",
"maximumWarning": "6kb",
"maximumError": "10kb"
}
]
"development": {
"buildOptimizer": false,
"optimization": false,
"vendorChunk": true,
"extractLicenses": false,
"sourceMap": true,
"namedChunks": true
}
},
"defaultConfiguration": ""
"defaultConfiguration": "production"
},
"serve": {
"builder": "@angular-devkit/build-angular:dev-server",
"options": {
"browserTarget": "pdb:build",
"proxyConfig": "proxy.conf.json"
},
"configurations": {
"production": {
"browserTarget": "pdb:build:production"
}
"browserTarget": "pdb-js:build:production"
},
"development": {
"browserTarget": "pdb-js:build:development"
}
},
"defaultConfiguration": "development"
},
"extract-i18n": {
"builder": "@angular-devkit/build-angular:extract-i18n",
"options": {
"browserTarget": "pdb:build"
"browserTarget": "pdb-js:build"
}
},
"test": {
@@ -94,6 +96,7 @@
"polyfills": "src/polyfills.ts",
"tsConfig": "tsconfig.spec.json",
"karmaConfig": "karma.conf.js",
"inlineStyleLanguage": "scss",
"assets": [
"src/favicon.ico",
"src/assets"
@@ -103,33 +106,8 @@
],
"scripts": []
}
},
"lint": {
"builder": "@angular-devkit/build-angular:tslint",
"options": {
"tsConfig": [
"tsconfig.app.json",
"tsconfig.spec.json",
"e2e/tsconfig.json"
],
"exclude": [
"**/node_modules/**"
]
}
},
"e2e": {
"builder": "@angular-devkit/build-angular:protractor",
"options": {
"protractorConfig": "e2e/protractor.conf.js",
"devServerTarget": "pdb:serve"
},
"configurations": {
"production": {
"devServerTarget": "pdb:serve:production"
}
}
}
}
}},
"defaultProject": "pdb"
}

View File

@@ -2,7 +2,7 @@ import java.nio.file.Files
import java.nio.file.Paths
plugins {
id("com.github.node-gradle.node") version "3.1.0"
id("com.github.node-gradle.node") version "3.4.0"
}

View File

@@ -1,32 +0,0 @@
// @ts-check
// Protractor configuration file, see link for more information
// https://github.com/angular/protractor/blob/master/lib/config.ts
const { SpecReporter } = require('jasmine-spec-reporter');
/**
* @type { import("protractor").Config }
*/
exports.config = {
allScriptsTimeout: 11000,
specs: [
'./src/**/*.e2e-spec.ts'
],
capabilities: {
'browserName': 'chrome'
},
directConnect: true,
baseUrl: 'http://localhost:4200/',
framework: 'jasmine',
jasmineNodeOpts: {
showColors: true,
defaultTimeoutInterval: 30000,
print: function() {}
},
onPrepare() {
require('ts-node').register({
project: require('path').join(__dirname, './tsconfig.json')
});
jasmine.getEnv().addReporter(new SpecReporter({ spec: { displayStacktrace: true } }));
}
};

View File

@@ -1,23 +0,0 @@
import { AppPage } from './app.po';
import { browser, logging } from 'protractor';
describe('workspace-project App', () => {
let page: AppPage;
beforeEach(() => {
page = new AppPage();
});
it('should display welcome message', () => {
page.navigateTo();
expect(page.getTitleText()).toEqual('pdb app is running!');
});
afterEach(async () => {
// Assert that there are no errors emitted from the browser
const logs = await browser.manage().logs().get(logging.Type.BROWSER);
expect(logs).not.toContain(jasmine.objectContaining({
level: logging.Level.SEVERE,
} as logging.Entry));
});
});

View File

@@ -1,11 +0,0 @@
import { browser, by, element } from 'protractor';
export class AppPage {
navigateTo() {
return browser.get(browser.baseUrl) as Promise<any>;
}
getTitleText() {
return element(by.css('app-root .content span')).getText() as Promise<string>;
}
}

View File

@@ -1,13 +0,0 @@
{
"extends": "../tsconfig.base.json",
"compilerOptions": {
"outDir": "../out-tsc/e2e",
"module": "commonjs",
"target": "es2018",
"types": [
"jasmine",
"jasminewd2",
"node"
]
}
}

View File

@@ -9,16 +9,28 @@ module.exports = function (config) {
require('karma-jasmine'),
require('karma-chrome-launcher'),
require('karma-jasmine-html-reporter'),
require('karma-coverage-istanbul-reporter'),
require('karma-coverage'),
require('@angular-devkit/build-angular/plugins/karma')
],
client: {
jasmine: {
// you can add configuration options for Jasmine here
// the possible options are listed at https://jasmine.github.io/api/edge/Configuration.html
// for example, you can disable the random execution with `random: false`
// or set a specific seed with `seed: 4321`
},
clearContext: false // leave Jasmine Spec Runner output visible in browser
},
coverageIstanbulReporter: {
dir: require('path').join(__dirname, './coverage/pdb'),
reports: ['html', 'lcovonly', 'text-summary'],
fixWebpackSourcePaths: true
jasmineHtmlReporter: {
suppressAll: true // removes the duplicated traces
},
coverageReporter: {
dir: require('path').join(__dirname, './coverage/pdb-js'),
subdir: '.',
reporters: [
{ type: 'html' },
{ type: 'text-summary' }
]
},
reporters: ['progress', 'kjhtml'],
port: 9876,

24014
pdb-js/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,52 +1,46 @@
{
"name": "pdb",
"name": "pdb-js",
"version": "0.0.0",
"scripts": {
"ng": "ng",
"start": "ng serve",
"build": "ng build",
"releasebuild": "ng build --configuration production",
"watch": "ng build --watch --configuration development",
"test": "ng test",
"lint": "ng lint",
"e2e": "ng e2e"
"e2e": "ng e2e",
"releasebuild": "ng build --configuration production"
},
"private": true,
"dependencies": {
"@angular/animations": "^12.1.1",
"@angular/cdk": "^12.1.1",
"@angular/common": "^12.1.1",
"@angular/compiler": "^12.1.1",
"@angular/core": "^12.1.1",
"@angular/forms": "^12.1.1",
"@angular/material": "^12.1.1",
"@angular/platform-browser": "^12.1.1",
"@angular/platform-browser-dynamic": "^12.1.1",
"@angular/router": "^12.1.1",
"@angular/animations": "^14.2.5",
"@angular/cdk": "^14.2.4",
"@angular/common": "^14.2.5",
"@angular/compiler": "^14.2.5",
"@angular/core": "^14.2.5",
"@angular/forms": "^14.2.5",
"@angular/material": "^14.2.4",
"@angular/platform-browser": "^14.2.5",
"@angular/platform-browser-dynamic": "^14.2.5",
"@angular/router": "^14.2.5",
"moment": "^2.29.1",
"rxjs": "^6.6.7",
"rxjs": "~7.5.0",
"rxjs-compat": "^6.6.7",
"tslib": "^2.3.0",
"zone.js": "~0.11.4"
},
"devDependencies": {
"@angular-devkit/build-angular": "^12.1.1",
"@angular/cli": "^12.1.1",
"@angular/compiler-cli": "^12.1.1",
"@angular/language-service": "^12.1.1",
"@types/jasmine": "^3.8.1",
"@types/jasminewd2": "^2.0.10",
"@types/node": "^12.20.16",
"codelyzer": "^6.0.2",
"jasmine-core": "~3.6.0",
"jasmine-spec-reporter": "~5.0.0",
"karma": "~6.3.4",
"@angular-devkit/build-angular": "^14.2.5",
"@angular/cli": "^14.2.5",
"@angular/compiler-cli": "^14.2.5",
"@types/jasmine": "~3.10.0",
"@types/node": "^12.11.1",
"jasmine-core": "~3.10.0",
"karma": "~6.3.0",
"karma-chrome-launcher": "~3.1.0",
"karma-coverage-istanbul-reporter": "~3.0.2",
"karma-coverage": "~2.1.0",
"karma-jasmine": "~4.0.0",
"karma-jasmine-html-reporter": "^1.7.0",
"protractor": "~7.0.0",
"ts-node": "~8.5.4",
"tslint": "^6.1.0",
"typescript": "4.3.5"
"karma-jasmine-html-reporter": "~1.7.0",
"typescript": "~4.6.2"
}
}

View File

@@ -10,7 +10,7 @@
</mat-select>
</mat-form-field>
<pdb-image-toggle *ngIf="filterBy" images="{{compareImages}}" (valueChanged)="comparatorChanged($event)"></pdb-image-toggle>
<pdb-image-toggle *ngIf="filterBy" images="{{compareImages}}" (valueChanged)="comparatorChanged()"></pdb-image-toggle>
<mat-form-field *ngIf="filterBy" class="pdb-form-number-long">
<input matInput type="number" placeholder="" min="0" [(ngModel)]="value">

View File

@@ -15,10 +15,10 @@
</mat-form-field>
<pdb-image-toggle images="{{ascDescImages}}" (valueChanged)="sortOrderChanged($event)"></pdb-image-toggle>
<pdb-image-toggle images="{{ascDescImages}}" (valueChanged)="sortOrderChanged()"></pdb-image-toggle>
<pdb-gallery-filter-view (valueChanged)="filterChanged($event)"></pdb-gallery-filter-view>
<pdb-gallery-filter-view (valueChanged)="filterChanged()"></pdb-gallery-filter-view>
<mat-checkbox [(ngModel)]="showDetails">Show Details</mat-checkbox>
</div>

View File

@@ -46,8 +46,7 @@ export class GalleryFilterView {
@Output()
valueChanged : EventEmitter<GalleryFilterData> = new EventEmitter<GalleryFilterData>();
comparatorChanged(newComparator: string){
this._comparator = newComparator;
comparatorChanged(){
this.valueChanged.emit(undefined);
}
@@ -115,7 +114,7 @@ export class GalleryViewComponent implements OnInit {
showDetails = false;
@ViewChild(GalleryFilterView)
filter : GalleryFilterView;
filter! : GalleryFilterView;
ascDescImages = JSON.stringify([
{
@@ -133,7 +132,7 @@ export class GalleryViewComponent implements OnInit {
constructor(private plotService: PlotService, private snackBar: MatSnackBar) {
}
showError(message) {
showError(message: string) {
this.snackBar.open(message, "", {
duration: 5000,
verticalPosition: 'top'
@@ -193,8 +192,8 @@ export class GalleryViewComponent implements OnInit {
filterPredicate(galleryItem: GalleryItem){
const predicate = this.filter.comparator == 'LESS_EQUAL'
? function(a, b) { return a <= b; }
: function(a, b) { return a >= b; };
? function(a: number, b: number) { return a <= b; }
: function(a: number, b: number) { return a >= b; };
const millis = this.timeUnitToMillis(this.filter.value, this.filter.unit);
switch(this.filter.filterBy){
case 'NONE':
@@ -214,7 +213,7 @@ export class GalleryViewComponent implements OnInit {
throw "unhandled option: " + this.filter.filterBy;
}
timeUnitToMillis(value, unit)
timeUnitToMillis(value: number, unit: string)
{
switch(unit){
case 'NO_UNIT':
@@ -260,7 +259,7 @@ export class GalleryViewComponent implements OnInit {
return;
}
const splitByValue = this.splitByValuesQueue.pop();
const splitByValue = <string>this.splitByValuesQueue.pop();
let request = masterRequest.copy();
request.query = "("+request.query+") and " + splitByField+"="+ splitByValue;
@@ -283,7 +282,7 @@ export class GalleryViewComponent implements OnInit {
that.sortAndFilterGallery();
that.renderGalleryRecursively(masterRequest, splitByField);
},
error => {
(error:any) => {
that.showError(error.error.message);
});
}
@@ -295,12 +294,11 @@ export class GalleryViewComponent implements OnInit {
get sortBy(): string { return this._sortBy; }
sortOrderChanged(event){
this.sortOrder = event;
sortOrderChanged(){
this.sortAndFilterGallery();
}
filterChanged(event){
filterChanged(){
this.sortAndFilterGallery();
}
@@ -313,7 +311,7 @@ export class GalleryViewComponent implements OnInit {
})
export class GalleryItemView {
@Input()
data: GalleryItem;
data!: GalleryItem;
@Input()
showDetails: boolean = false;
@@ -338,7 +336,7 @@ export class GalleryItem {
imageUrl: string;
stats: PlotResponseStats;
splitByValue : string;
show : boolean;
show : boolean = false;
constructor(splitByValue: string, plotResponse: PlotResponse){
this.thumbnailUrl = plotResponse.thumbnailUrl;

View File

@@ -17,7 +17,7 @@ export class ImageToggleComponent implements OnInit {
text = undefined;
_states : Array<any>;
_states : Array<any> = [];
constructor() { }
@@ -34,7 +34,7 @@ export class ImageToggleComponent implements OnInit {
return this._states[this.index].title;
}
toggle(event){
toggle(event: any){
this.index = (this.index+1) % this._states.length;
this.text = this._states[this.index].text;
this.valueChanged.emit(this._states[this.index].value);

View File

@@ -17,7 +17,7 @@
<td><div class="{{ pointTypeClass(stat.dashTypeAndColor) }}" title="{{ stat.name }}"></div></td>
<td>{{ stat.values }}</td>
<td>{{ utils.format(stat.average, valueFormat) }}</td>
<td *ngFor="let key of percentilesToPlot.keys()">{{utils.format(stat.percentiles[percentilesToPlot.get(key)], valueFormat)}}</td>
<td *ngFor="let key of percentilesToPlot.keys()">{{percentileStat(key, stat)}}</td>
<td>{{ utils.format(stat.maxValue, valueFormat)}}</td>
</tr>
</table>
@@ -51,7 +51,7 @@
<tr *ngFor="let statsRow of stats.dataSeriesStats">
<td><div class="{{ pointTypeClass(statsRow.dashTypeAndColor) }}" title="{{ statsRow.name }}"></div></td>
<td *ngFor="let statsCol of stats.dataSeriesStats">
{{ utils.toPercent(statsRow.percentiles[percentilesToPlot.get(p)] / statsCol.percentiles[percentilesToPlot.get(p)]) }}
{{ toPercent(statsRow, statsCol, p) }}
</td>
</tr>
</table>

View File

@@ -1,5 +1,5 @@
import { Component, OnInit, Input, Output, ViewChild, EventEmitter, ɵpublishDefaultGlobalUtils } from '@angular/core';
import { DashTypeAndColor, PlotResponseStats } from '../plot.service';
import { DashTypeAndColor, PlotResponseStats, DataSeriesStats } from '../plot.service';
import { UtilService } from '../utils.service';
@Component({
@@ -10,7 +10,7 @@ import { UtilService } from '../utils.service';
export class PlotDetailsComponent {
@Input()
stats: PlotResponseStats;
stats!: PlotResponseStats;
hasPercentiles = false;
@@ -25,6 +25,9 @@ export class PlotDetailsComponent {
ngOnInit() {
this.hasPercentiles = false;
this.percentilesToPlot.clear();
console.log("plotdetails.stats: " + JSON.stringify(this.stats));
if (this.stats) {
for (let i = 0; i < this.stats.dataSeriesStats.length; i++)
{
const stat = this.stats.dataSeriesStats[i];
@@ -39,6 +42,7 @@ export class PlotDetailsComponent {
}
}
}
}
percentile(value: number): string {
return this.utils.format(value, this.valueFormat);
@@ -49,4 +53,28 @@ export class PlotDetailsComponent {
+" plot-details-plotType_"+typeAndColor.pointType
+" plot-details-plotType_"+typeAndColor.color.toLocaleLowerCase();
}
toPercent(statsRow: DataSeriesStats, statsCol: DataSeriesStats, key: string){
const percentile = this.percentilesToPlot.get(key);
if (percentile) {
const rowValue = (<any>statsRow.percentiles)[percentile];
const columnValue = (<any>statsCol.percentiles)[percentile];
if (rowValue !== undefined && columnValue !== undefined) {
return this.utils.toPercent(rowValue / columnValue);
}
}
return "?%"
}
percentileStat(key: string, stat: DataSeriesStats): string{
const plotKey = this.percentilesToPlot.get(key);
if (plotKey !== undefined){
console.log("stat.percentiles: ", stat.percentiles);
const value = (<any>stat.percentiles)[plotKey];
if (value !== undefined){
return this.utils.format(value, this.valueFormat);
}
}
return "no value";
}
}

View File

@@ -29,7 +29,7 @@
[style.width]="zoomInSliderStyleWidth"
></div>
</div>
<div *ngIf="showStats" class="plot-view-overlay">
<div *ngIf="showStats && stats != null" class="plot-view-overlay">
<pdb-plot-details [stats]="stats"></pdb-plot-details>
<div class="top-right">
<img

View File

@@ -15,10 +15,10 @@ export class PlotViewComponent implements OnInit {
readonly gnuplotBMargin = 76; // The bottom margin configured for gnuplot
imageUrl : string;
stats : PlotResponseStats;
imageUrl! : string;
stats: PlotResponseStats | null = null;
axes: AxesTypes;
axes!: AxesTypes;
@Output()
zoomRange : EventEmitter<SelectionRange> = new EventEmitter<SelectionRange>();
@@ -49,20 +49,20 @@ export class PlotViewComponent implements OnInit {
hideZoomInSlider() {
this.zoomInSliderStyleDisplay = "none";
}
update_cursor(event){
update_cursor(event: MouseEvent){
//$('#result-image').css('cursor', this.isInPlot(event) ? 'crosshair' : 'default');
this.imageCursor = this.isInPlot(event) ? 'crosshair' : 'default';
}
imageWidth() {
return Math.floor(document.getElementById('result-image').offsetWidth);
return Math.floor(document.getElementById('result-image')!.offsetWidth);
}
imageHeight() {
return Math.floor(document.getElementById('result-image').offsetHeight);
return Math.floor(document.getElementById('result-image')!.offsetHeight);
}
positionInImage(event) : any {
positionInImage(event: MouseEvent) : any {
const rect = (<HTMLImageElement>document.getElementById('result-image')).getBoundingClientRect();
const x= event.clientX - rect.left;
const y= event.clientY - rect.top;
@@ -73,7 +73,7 @@ export class PlotViewComponent implements OnInit {
return {x: x, y: y};
}
isInPlot(event) : boolean{
isInPlot(event: MouseEvent) : boolean{
const pos = this.positionInImage(event);
return pos.x > this.gnuplotLMargin
@@ -82,7 +82,7 @@ export class PlotViewComponent implements OnInit {
&& pos.y < this.imageHeight()- this.gnuplotBMargin;
}
isInImage(event) : boolean{
isInImage(event: MouseEvent) : boolean{
const pos = this.positionInImage(event);
return pos.x > 0
@@ -91,7 +91,7 @@ export class PlotViewComponent implements OnInit {
&& pos.y < this.imageHeight();
}
dragStart(event) {
dragStart(event: MouseEvent) {
//console.log("dragStart inPlot: " + this.isInPlot(event));
event.preventDefault();
@@ -104,7 +104,7 @@ export class PlotViewComponent implements OnInit {
}
}
dragging(event) {
dragging(event: MouseEvent) {
//console.log("dragging " + this.isInPlot(event));
this.update_cursor(event);
@@ -128,7 +128,7 @@ export class PlotViewComponent implements OnInit {
}
}
dragStop(event) {
dragStop(event: MouseEvent) {
if (this.in_drag_mode){
this.in_drag_mode = false;
this.hideZoomInSlider();
@@ -152,7 +152,7 @@ export class PlotViewComponent implements OnInit {
}
}
dragAbort(event) {
dragAbort(event: MouseEvent) {
//console.log("drag_abort");
if (this.in_drag_mode && !this.isInImage(event)) {
this.in_drag_mode = false;
@@ -162,7 +162,7 @@ export class PlotViewComponent implements OnInit {
}
}
zoomByScroll(event) {
zoomByScroll(event: WheelEvent) {
if (this.isInImage(event) && event.deltaY != 0 && this.axes.hasXAxis(DataType.Time)) {
this.in_drag_mode = false;
this.hideZoomInSlider();

View File

@@ -1,6 +1,6 @@
import { Injectable, OnInit } from '@angular/core';
import { HttpClient, HttpParams } from '@angular/common/http';
import { Observable } from 'rxjs/Observable';
import { Observable } from 'rxjs';
import { map } from 'rxjs/operators';
@@ -17,7 +17,8 @@ export class PlotService {
this.plotTypes.push(new PlotType("CUM_DISTRIBUTION", "Cumulative Distribution", "cumulative-distribution-chart", true, DataType.Percent, DataType.Duration));
this.plotTypes.push(new PlotType("HISTOGRAM", "Histogram", "histogram", true, DataType.HistogramBin, DataType.HistogramCount));
this.plotTypes.push(new PlotType("PARALLEL", "Parallel Requests", "parallel-requests-chart", true, DataType.Time, DataType.Count));
this.plotTypes.push(new PlotType("BAR", "Bar", "bar-chart", true, DataType.Group, DataType.Count));
this.plotTypes.push(new PlotType("BAR", "Bar (number of requests)", "bar-chart", true, DataType.Group, DataType.Count));
this.plotTypes.push(new PlotType("BOX", "Box", "box-plot", true, DataType.Time, DataType.Duration));
this.plotTypes.push(new PlotType("HEATMAP", "Heatmap", "heatmap", false, DataType.Other, DataType.Other));
this.plotTypes.push(new PlotType("CONTOUR", "Contour", "contour-chart", false, DataType.Time, DataType.Duration));
@@ -80,7 +81,7 @@ export class PlotService {
const q = "("+query+") and "+splitBy+"=";
return this.autocomplete(q, q.length+1, ResultMode.FULL_VALUES).pipe(
map(
autocompleteResult => autocompleteResult.proposals.map(suggestion => suggestion.value)
(autocompleteResult: AutocompleteResult) => autocompleteResult.proposals.map((suggestion:Suggestion) => suggestion.value)
)
);
}
@@ -181,39 +182,44 @@ export class AxesTypes {
const x2 = this.getXAxisDataType(2);
const y2 = this.getYAxisDataType(2);
return "x1:"+DataType[x1]+ " y1:"+DataType[y1]+ " x2:"+DataType[x2]+ " y2:"+DataType[y2];
return (x1 ? "x1:"+DataType[x1] : "")
+ (y1 ? " y1:"+DataType[y1] : "")
+ (x2 ? " x2:"+DataType[x2] : "")
+ (y2 ? " y2:"+DataType[y2] : "");
}
}
export class Suggestion {
value: string;
newQuery: string;
newCaretPosition: number;
constructor(
public value: string,
public newQuery: string,
public newCaretPosition: number){}
}
export class AutocompleteResult{
proposals: Array<Suggestion>;
constructor(public proposals: Array<Suggestion>){}
}
export class PlotRequest {
query : string;
height : number;
width : number;
thumbnailMaxWidth : number = 300;
thumbnailMaxHeight : number = 200;
groupBy : Array<string>;
limitBy : string;
limit : number;
y1:YAxisDefinition;
y2:YAxisDefinition;
dateRange : string;
aggregates : Array<string>;
keyOutside : boolean = false;
generateThumbnail : boolean;
intervalUnit: string;
intervalValue: number;
renderBarChartTickLabels: boolean = false;
constructor(
public query : string,
public height : number,
public width : number,
public thumbnailMaxWidth : number = 300,
public thumbnailMaxHeight : number = 200,
public groupBy : Array<string>,
public limitBy : string,
public limit : number,
public y1:YAxisDefinition,
public y2:YAxisDefinition|undefined,
public dateRange : string,
public aggregates : Array<string>,
public keyOutside : boolean = false,
public generateThumbnail : boolean,
public intervalUnit: string,
public intervalValue: number,
public renderBarChartTickLabels: boolean = false){}
copy(): PlotRequest {
return JSON.parse(JSON.stringify(this));
@@ -221,46 +227,52 @@ export class PlotRequest {
}
export class YAxisDefinition {
axisScale : string;
rangeMin : number;
rangeMax : number;
rangeUnit : string;
constructor(
public axisScale : string,
public rangeMin : number,
public rangeMax : number,
public rangeUnit : string){}
}
export class PlotResponse {
imageUrl : string;
stats : PlotResponseStats;
thumbnailUrl : string;
constructor(
public imageUrl : string,
public stats : PlotResponseStats,
public thumbnailUrl : string){}
}
export class PlotResponseStats {
maxValue : number;
values : number;
average : number ;
plottedValues : number;
maxAvgRatio: number;
dataSeriesStats : Array<DataSeriesStats>;
constructor(
public maxValue : number,
public values : number,
public average : number,
public plottedValues : number,
public maxAvgRatio: number,
public dataSeriesStats : Array<DataSeriesStats>){}
}
export class DataSeriesStats {
name: string;
values : number;
maxValue : number;
average : number;
plottedValues : number;
dashTypeAndColor: DashTypeAndColor;
percentiles: Map<string, number>
constructor(
public name: string,
public values : number,
public maxValue : number,
public average : number ,
public plottedValues : number,
public dashTypeAndColor: DashTypeAndColor,
public percentiles: Map<string, number>){}
}
export class DashTypeAndColor {
color: string;
pointType: number;
constructor(
public color: string,
public pointType: number) {}
}
export class FilterDefaults {
groupBy: Array<string>;
fields: Array<string>;
splitBy: string;
constructor(
public groupBy: Array<string>,
public fields: Array<string>,
public splitBy: string){}
}
export enum ResultMode {

View File

@@ -1,5 +1,5 @@
import { Component, OnInit, Input, ViewChild } from '@angular/core';
import {FormControl} from '@angular/forms';
import {UntypedFormControl} from '@angular/forms';
import {Observable} from 'rxjs';
import {startWith, map} from 'rxjs/operators';
import {MatAutocompleteTrigger } from '@angular/material/autocomplete';
@@ -12,16 +12,16 @@ import { PlotService, PlotType, AutocompleteResult, Suggestion, ResultMode } fro
})
export class QueryAutocompleteComponent implements OnInit {
queryField = new FormControl('');
queryField = new UntypedFormControl('');
suggestions = new FormControl();
suggestions = new UntypedFormControl();
filteredSuggestions: Observable<Suggestion[]>;
filteredSuggestions!: Observable<Suggestion[]>;
query : string;
query : string = "";
@ViewChild(MatAutocompleteTrigger)
autocomplete: MatAutocompleteTrigger;
autocomplete!: MatAutocompleteTrigger;
constructor(private plotService: PlotService) {}
@@ -72,12 +72,12 @@ export class QueryAutocompleteComponent implements OnInit {
that.autocomplete.openPanel();
},
error => console.log(error)
(error:any) => console.log(error)
);
}
displaySuggestion(suggestion?: Suggestion): string | undefined {
displaySuggestion(suggestion?: Suggestion): string {
//console.log("suggestion: "+JSON.stringify(suggestion));
return suggestion ? suggestion.newQuery : undefined;
return suggestion ? suggestion.newQuery : '';
}
}

View File

@@ -17,7 +17,7 @@ export class UtilService {
}
}
formatMs(valueInMs):string {
formatMs(valueInMs: number):string {
const ms = Math.floor(valueInMs % 1000);
const s = Math.floor((valueInMs / 1000) % 60);
const m = Math.floor((valueInMs / (60*1000)) % 60);

View File

@@ -31,7 +31,7 @@
</mat-form-field>
<pdb-limit-by #limitbycomponent></pdb-limit-by>
<div [hidden]="!selectedPlotTypesContains('BAR')">
<div [hidden]="!selectedPlotTypesContains(['BAR', 'BOX'])">
<mat-form-field >
<mat-label>Intervals (only bar chart):</mat-label>
<mat-select [(value)]="intervalUnit">
@@ -46,7 +46,7 @@
</mat-select>
</mat-form-field>
</div>
<div [hidden]="!selectedPlotTypesContains('BAR')">
<div [hidden]="!selectedPlotTypesContains(['BAR', 'BOX'])">
<mat-checkbox [(ngModel)]="renderBarChartTickLabels">Show Tic Labels (bar chart)</mat-checkbox>
</div>
<pdb-y-axis-definition #y1AxisDefinitionComponent yIndex="1"></pdb-y-axis-definition>

View File

@@ -1,7 +1,6 @@
import { Component, OnInit, ViewChild } from '@angular/core';
import { PlotService, PlotType, PlotRequest, PlotResponse, TagField, FilterDefaults, DataType, YAxisDefinition, AxesTypes } from '../plot.service';
import { Observable } from 'rxjs/Observable';
import { FormControl, Validators } from '@angular/forms';
import { UntypedFormControl, Validators } from '@angular/forms';
import { MatSnackBar } from '@angular/material/snack-bar';
import { LimitByComponent } from '../limit-by/limit-by.component';
import { YAxisDefinitionComponent } from '../y-axis-definition/y-axis-definition.component';
@@ -19,36 +18,36 @@ export class VisualizationPageComponent implements OnInit {
readonly DATE_PATTERN = "YYYY-MM-DD HH:mm:ss"; // for moment-JS
dateRange = new FormControl('2019-10-05 00:00:00 - 2019-10-11 23:59:59');
dateRange = new UntypedFormControl('2019-10-05 00:00:00 - 2019-10-11 23:59:59');
selectedPlotType = [];
plotTypes: Array<any>;
selectedPlotType = new Array<PlotType>();
plotTypes: Array<any> = [];
tagFields: Array<TagField> = new Array<TagField>();
groupBy = new Array<TagField>();
@ViewChild('limitbycomponent')
private limitbycomponent : LimitByComponent;
private limitbycomponent! : LimitByComponent;
@ViewChild('y1AxisDefinitionComponent', { read: YAxisDefinitionComponent })
private y1AxisDefinitionComponent : YAxisDefinitionComponent;
private y1AxisDefinitionComponent! : YAxisDefinitionComponent;
@ViewChild('y2AxisDefinitionComponent', { read: YAxisDefinitionComponent })
private y2AxisDefinitionComponent : YAxisDefinitionComponent;
private y2AxisDefinitionComponent! : YAxisDefinitionComponent;
@ViewChild('query')
query: QueryAutocompleteComponent;
query!: QueryAutocompleteComponent;
@ViewChild('plotView')
plotView: PlotViewComponent;
plotView!: PlotViewComponent;
@ViewChild('galleryView')
galleryView: GalleryViewComponent;
galleryView!: GalleryViewComponent;
enableGallery = false;
splitBy = null;
splitBy : TagField | undefined = undefined;
y2AxisAvailable = false;
intervalUnit = 'NO_INTERVAL';
@@ -58,7 +57,7 @@ export class VisualizationPageComponent implements OnInit {
constructor(private plotService: PlotService, private snackBar: MatSnackBar) {
}
showError(message) {
showError(message:string) {
this.snackBar.open(message, "", {
duration: 5000,
verticalPosition: 'top'
@@ -70,12 +69,12 @@ export class VisualizationPageComponent implements OnInit {
this.plotTypes = this.plotService.getPlotTypes();
this.selectedPlotType.push(this.plotTypes[0]);
that.plotService.getFilterDefaults().subscribe(function(filterDefaults) {
that.plotService.getFilterDefaults().subscribe(function(filterDefaults: FilterDefaults) {
filterDefaults.fields.forEach(function(name) {
filterDefaults.fields.forEach(function(name:string) {
that.tagFields.push(new TagField(name));
},
error => {
(error: any) => {
that.showError(error.error.message);
});
@@ -93,8 +92,8 @@ export class VisualizationPageComponent implements OnInit {
this.y2AxisAvailable = axesTypes.y.length == 2;
}
selectedPlotTypesContains(plotTypeId: string){
return this.selectedPlotType.filter(pt => pt.id == plotTypeId).length > 0;
selectedPlotTypesContains(plotTypeIds: Array<string>){
return this.selectedPlotType.filter(pt => plotTypeIds.includes(pt.id)).length > 0;
}
@@ -103,12 +102,16 @@ export class VisualizationPageComponent implements OnInit {
}
gallery(){
if (this.splitBy != null){
const that = this;
this.plotView.imageUrl = '';
that.plotView.stats = null;
that.galleryView.show=true;
const request = this.createPlotRequest();
this.galleryView.renderGallery(request, this.splitBy.name);
} else {
console.error("variable splitBy was null when rendering gallery");
}
}
getAxes() : AxesTypes {
@@ -141,43 +144,47 @@ export class VisualizationPageComponent implements OnInit {
const request = this.createPlotRequest();
this.plotService.sendPlotRequest(request).subscribe(function(plotResponse){
console.log("response: " + JSON.stringify(plotResponse));
that.plotView.imageUrl = "http://"+window.location.hostname+':'+window.location.port+'/'+plotResponse.imageUrl;
that.plotView.stats = plotResponse.stats;
this.plotService.sendPlotRequest(request).subscribe({
next: (plotResponse: PlotResponse) => {
this.plotView.imageUrl = "http://"+window.location.hostname+':'+window.location.port+'/'+plotResponse.imageUrl;
this.plotView.stats = plotResponse.stats;
document.dispatchEvent(new Event("invadersPause", {}));
},
error => {
that.plotView.imageUrl = '';
that.plotView.stats = null;
that.showError(error.error.message);
error: (error:any) => {
this.plotView.imageUrl = '';
this.plotView.stats = null;
this.showError(error.error.message);
document.dispatchEvent(new Event("invadersPause", {}));
}
});
}
createPlotRequest(): PlotRequest {
const aggregates = [];
const aggregates = new Array<string>();
this.selectedPlotType.forEach(a => aggregates.push(a.id));
const y1 = this.y1AxisDefinitionComponent.getAxisDefinition();
const y2 = this.y2AxisDefinitionComponent ? this.y2AxisDefinitionComponent.getAxisDefinition() : undefined;
const results = document.getElementById("results");
const request = new PlotRequest();
request.query = this.query.query;
request.height = document.getElementById("results").offsetHeight-1;
request.width = document.getElementById("results").offsetWidth-1;
request.groupBy = this.groupBy.map(o => o.name);
request.limitBy = this.limitbycomponent.limitBy;
request.limit = this.limitbycomponent.limit;
request.y1 = y1;
request.y2 = y2;
request.dateRange = this.dateRangeAsString();
request.aggregates = aggregates;
request.keyOutside = false;
request.generateThumbnail = this.enableGallery;
request.intervalUnit = this.intervalUnit;
request.intervalValue = this.intervalValue;
request.renderBarChartTickLabels = this.renderBarChartTickLabels;
const request = new PlotRequest(
this.query.query,
results != null ? results.offsetHeight-1: 1024,
results != null ? results.offsetWidth-1 : 1024,
300, // thumbnailMaxWidth
200, // thumbnailMaxHeight
this.groupBy.map(o => o.name),
this.limitbycomponent.limitBy,
this.limitbycomponent.limit,
y1,
y2,
this.dateRangeAsString(), // dateRange
aggregates, // aggregates
false, // keyOutside
this.enableGallery, // generateThumbnail
this.intervalUnit,
this.intervalValue,
this.renderBarChartTickLabels);
return request;
}
@@ -222,7 +229,6 @@ export class VisualizationPageComponent implements OnInit {
}
parseDateRange(dateRangeAsString : string) : DateRange {
if (dateRangeAsString) {
const startDate = moment(dateRangeAsString.slice(0, 19));
const endDate = moment(dateRangeAsString.slice(22, 41));
@@ -232,7 +238,6 @@ export class VisualizationPageComponent implements OnInit {
duration: moment.duration(endDate.diff(startDate))
};
}
}
setDateRange(startDate: any, endDate: any) {
const formattedStartDate = startDate.format(this.DATE_PATTERN);
@@ -260,9 +265,11 @@ export class DateRange {
duration: any;
}
/*
export class AxesUsed {
x1: DataType;
y1: DataType;
x2: DataType;
y2: DataType;
}
*/

View File

@@ -20,11 +20,6 @@ export class YAxisDefinitionComponent {
}
getAxisDefinition() {
const result = new YAxisDefinition();
result.axisScale = this.yAxisScale;
result.rangeMin = this.minYValue;
result.rangeMax = this.maxYValue;
result.rangeUnit = this.yAxisUnit;
return result;
return new YAxisDefinition(this.yAxisScale,this.minYValue,this.maxYValue,this.yAxisUnit);
}
}

View File

@@ -0,0 +1,25 @@
<svg xmlns="http://www.w3.org/2000/svg" width="512" height="512" viewBox="0 0 32 32">
<g transform="translate(0.5,0.5)">
<rect x="5" y="8" width="8" height="15" style="fill: none; stroke: black;stroke-width:2;" />
<line x1="6" y1="3" x2="12" y2="3" style="stroke:black;stroke-width:2;"/>
<line x1="9" y1="8" x2="9" y2="3" style="stroke:black;stroke-width:2;"/>
<line x1="5" y1="15" x2="13" y2="15" style="stroke:black;stroke-width:2;"/>
<line x1="9" y1="23" x2="9" y2="28" style="stroke:black;stroke-width:2;"/>
<line x1="6" y1="28" x2="12" y2="28" style="stroke:black;stroke-width:2;"/>
<rect x="18" y="6" width="8" height="13" style="fill: none; stroke: black; stroke-width:2;" />
<line x1="19" y1="2" x2="25" y2="2" style="stroke:black;stroke-width:2;"/>
<line x1="22" y1="6" x2="22" y2="2" style="stroke:black;stroke-width:2;"/>
<line x1="18" y1="13" x2="26" y2="13" style="stroke:black;stroke-width:2;"/>
<line x1="22" y1="19" x2="22" y2="26" style="stroke:black;stroke-width:2;"/>
<line x1="19" y1="26" x2="25" y2="26" style="stroke:black;stroke-width:2;"/>
<path d="M1,0
L1,30
L32,30"
style="stroke:black; stroke-width: 3px; fill:none;"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -1,24 +0,0 @@
@use '~@angular/material' as mat;
// Plus imports for other components in your app.
// Include the common styles for Angular Material. We include this here so that you only
// have to load a single css file for Angular Material in your app.
// Be sure that you only ever include this mixin once!
@include mat.core();
// Define the palettes for your theme using the Material Design palettes available in palette.scss
// (imported above). For each palette, you can optionally specify a default, lighter, and darker
// hue. Available color palettes: https://material.io/design/color/
$candy-app-primary: mat.define-palette(mat.$blue-palette);
$candy-app-accent: mat.define-palette(mat.$blue-palette, A200, A100, A400);
// The warn palette is optional (defaults to red).
$candy-app-warn: mat.define-palette(mat.$red-palette);
// Create the theme object (a Sass map containing all of the palettes).
$candy-app-theme: mat.define-light-theme($candy-app-primary, $candy-app-accent, $candy-app-warn);
// Include theme styles for core and each component used in your app.
// Alternatively, you can import and @include the theme mixins for each component
// that you are using.
@include mat.all-component-themes($candy-app-theme);

View File

@@ -8,8 +8,8 @@
* file.
*
* The current setup is for so-called "evergreen" browsers; the last versions of browsers that
* automatically update themselves. This includes Safari >= 10, Chrome >= 55 (including Opera),
* Edge >= 13 on the desktop, and iOS 10 and Chrome on mobile.
* automatically update themselves. This includes recent versions of Safari, Chrome (including
* Opera), Edge on the desktop, and iOS and Chrome on mobile.
*
* Learn more in https://angular.io/guide/browser-support
*/
@@ -18,16 +18,6 @@
* BROWSER POLYFILLS
*/
/** IE10 and IE11 requires the following for NgClass support on SVG elements */
// import 'classlist.js'; // Run `npm install --save classlist.js`.
/**
* Web Animations `@angular/platform-browser/animations`
* Only required if AnimationBuilder is used within the application and using IE/Edge or Safari.
* Standard animation support in Angular DOES NOT require any polyfills (as of Angular 6.0).
*/
// import 'web-animations-js'; // Run `npm install --save web-animations-js`.
/**
* By default, zone.js will patch all possible macroTask and DomEvents
* user can disable parts of macroTask/DomEvents patch by setting following flags
@@ -35,7 +25,7 @@
* will put import in the top of bundle, so user need to create a separate file
* in this directory (for example: zone-flags.ts), and put the following flags
* into that file, and then add the following code before importing zone.js.
* import './zone-flags.ts';
* import './zone-flags';
*
* The flags allowed in zone-flags.ts are listed here.
*

View File

@@ -1,5 +1,39 @@
/* You can add global styles to this file, and also import other style files */
@use '~@angular/material' as mat;
// Custom Theming for Angular Material
// For more information: https://material.angular.io/guide/theming
@use '@angular/material' as mat;
// Plus imports for other components in your app.
// Include the common styles for Angular Material. We include this here so that you only
// have to load a single css file for Angular Material in your app.
// Be sure that you only ever include this mixin once!
@include mat.core();
// Define the palettes for your theme using the Material Design palettes available in palette.scss
// (imported above). For each palette, you can optionally specify a default, lighter, and darker
// hue. Available color palettes: https://material.io/design/color/
$candy-app-primary: mat.define-palette(mat.$blue-palette);
$candy-app-accent: mat.define-palette(mat.$blue-palette, A200, A100, A400);
// The warn palette is optional (defaults to red).
$candy-app-warn: mat.define-palette(mat.$red-palette);
// Create the theme object. A theme consists of configurations for individual
// theming systems such as "color" or "typography".
$candy-app-theme: mat.define-light-theme((
color: (
primary: $candy-app-primary,
accent: $candy-app-accent,
warn: $candy-app-warn,
)
));
// Include theme styles for core and each component used in your app.
// Alternatively, you can import and @include the theme mixins for each component
// that you are using.
@include mat.all-component-themes($candy-app-theme);
/*
blue
#CBD7F4
@@ -20,11 +54,6 @@ grey
*/
$background-color: #CBD7F4;
//@import '~@angular/material/prebuilt-themes/deeppurple-amber.css';
@import 'custom-theme.scss';
*, body {
font-family: Arial;
font-size: 14px;

View File

@@ -7,13 +7,19 @@ import {
platformBrowserDynamicTesting
} from '@angular/platform-browser-dynamic/testing';
declare const require: any;
declare const require: {
context(path: string, deep?: boolean, filter?: RegExp): {
<T>(id: string): T;
keys(): string[];
};
};
// First, initialize the Angular testing environment.
getTestBed().initTestEnvironment(
BrowserDynamicTestingModule,
platformBrowserDynamicTesting()
platformBrowserDynamicTesting(),
);
// Then we find all the tests.
const context = require.context('./', true, /\.spec\.ts$/);
// And load the modules.

View File

@@ -1,5 +1,6 @@
/* To learn more about this file see: https://angular.io/config/tsconfig. */
{
"extends": "./tsconfig.base.json",
"extends": "./tsconfig.json",
"compilerOptions": {
"outDir": "./out-tsc/app",
"types": []
@@ -10,9 +11,5 @@
],
"include": [
"src/**/*.d.ts"
],
"exclude": [
"src/test.ts",
"src/**/*.spec.ts"
]
}

View File

@@ -1,26 +0,0 @@
{
"compileOnSave": false,
"compilerOptions": {
"baseUrl": "./",
"outDir": "./dist/out-tsc",
"sourceMap": true,
"declaration": false,
"downlevelIteration": true,
"experimentalDecorators": true,
"module": "es2020",
"moduleResolution": "node",
"importHelpers": true,
"target": "es2015",
"typeRoots": [
"node_modules/@types"
],
"lib": [
"es2018",
"dom"
]
},
"angularCompilerOptions": {
"fullTemplateTypeCheck": true,
"strictInjectionParameters": true
}
}

View File

@@ -1,17 +1,32 @@
/*
This is a "Solution Style" tsconfig.json file, and is used by editors and TypeScripts language server to improve development experience.
It is not intended to be used to perform a compilation.
To learn more about this file see: https://angular.io/config/solution-tsconfig.
*/
/* To learn more about this file see: https://angular.io/config/tsconfig. */
{
"files": [],
"references": [
{
"path": "./tsconfig.app.json"
},
{
"path": "./tsconfig.spec.json"
}
"compileOnSave": false,
"compilerOptions": {
"baseUrl": "./",
"outDir": "./dist/out-tsc",
"forceConsistentCasingInFileNames": true,
"strict": true,
"noImplicitOverride": true,
"noPropertyAccessFromIndexSignature": true,
"noImplicitReturns": true,
"noFallthroughCasesInSwitch": true,
"sourceMap": true,
"declaration": false,
"downlevelIteration": true,
"experimentalDecorators": true,
"moduleResolution": "node",
"importHelpers": true,
"target": "es2020",
"module": "es2020",
"lib": [
"es2020",
"dom"
]
},
"angularCompilerOptions": {
"enableI18nLegacyMessageIdFormat": false,
"strictInjectionParameters": true,
"strictInputAccessModifiers": true,
"strictTemplates": true
}
}

View File

@@ -1,10 +1,10 @@
/* To learn more about this file see: https://angular.io/config/tsconfig. */
{
"extends": "./tsconfig.base.json",
"extends": "./tsconfig.json",
"compilerOptions": {
"outDir": "./out-tsc/spec",
"types": [
"jasmine",
"node"
"jasmine"
]
},
"files": [

View File

@@ -1,155 +0,0 @@
{
"extends": "tslint:recommended",
"rules": {
"align": {
"options": [
"parameters",
"statements"
]
},
"array-type": false,
"arrow-parens": false,
"arrow-return-shorthand": true,
"curly": true,
"deprecation": {
"severity": "warning"
},
"component-class-suffix": true,
"contextual-lifecycle": true,
"directive-class-suffix": true,
"directive-selector": [
true,
"attribute",
"app",
"camelCase"
],
"component-selector": [
true,
"element",
"app",
"kebab-case"
],
"eofline": true,
"import-blacklist": [
true,
"rxjs/Rx"
],
"import-spacing": true,
"indent": {
"options": [
"spaces"
]
},
"interface-name": false,
"max-classes-per-file": false,
"max-line-length": [
true,
140
],
"member-access": false,
"member-ordering": [
true,
{
"order": [
"static-field",
"instance-field",
"static-method",
"instance-method"
]
}
],
"no-consecutive-blank-lines": false,
"no-console": [
true,
"debug",
"info",
"time",
"timeEnd",
"trace"
],
"no-empty": false,
"no-inferrable-types": [
true,
"ignore-params"
],
"no-non-null-assertion": true,
"no-redundant-jsdoc": true,
"no-switch-case-fall-through": true,
"no-var-requires": false,
"object-literal-key-quotes": [
true,
"as-needed"
],
"object-literal-sort-keys": false,
"ordered-imports": false,
"quotemark": [
true,
"single"
],
"semicolon": {
"options": [
"always"
]
},
"space-before-function-paren": {
"options": {
"anonymous": "never",
"asyncArrow": "always",
"constructor": "never",
"method": "never",
"named": "never"
}
},
"trailing-comma": false,
"no-conflicting-lifecycle": true,
"no-host-metadata-property": true,
"no-input-rename": true,
"no-inputs-metadata-property": true,
"no-output-native": true,
"no-output-on-prefix": true,
"no-output-rename": true,
"no-outputs-metadata-property": true,
"template-banana-in-box": true,
"template-no-negated-async": true,
"typedef-whitespace": {
"options": [
{
"call-signature": "nospace",
"index-signature": "nospace",
"parameter": "nospace",
"property-declaration": "nospace",
"variable-declaration": "nospace"
},
{
"call-signature": "onespace",
"index-signature": "onespace",
"parameter": "onespace",
"property-declaration": "onespace",
"variable-declaration": "onespace"
}
]
},
"use-lifecycle-interface": true,
"use-pipe-transform-interface": true
, "variable-name": {
"options": [
"ban-keywords",
"check-format",
"allow-pascal-case"
]
},
"whitespace": {
"options": [
"check-branch",
"check-decl",
"check-operator",
"check-separator",
"check-type",
"check-typecast"
]
}
},
"rulesDirectory": [
"codelyzer"
]
}

View File

@@ -18,7 +18,9 @@ public enum Aggregate {
*/
CUM_DISTRIBUTION("Cumulative Distribution"),
HISTOGRAM("Histogram");
HISTOGRAM("Histogram"),
BOX("Box");
private final String axisLabel;

View File

@@ -30,7 +30,7 @@ public class BarChartAggregatorForIntervals implements CustomAggregator, Indexed
public BarChartAggregatorForIntervals(final PlotSettings settings) {
this.settings = settings;
this.interval = settings.getInterval().get();
buckets = interval.getBuckets();
buckets = interval.getBuckets(AtomicLong::new);
}
@Override

View File

@@ -0,0 +1,76 @@
package org.lucares.pdb.plot.api;
import java.util.Locale;
import java.util.UUID;
import org.lucares.collections.LongObjHashMap;
import org.lucares.recommind.logs.GnuplotAxis;
public class BoxAggregator implements CustomAggregator {
private final String dataName = "$data" + UUID.randomUUID().toString().replace("-", "");
private final Interval interval;
private final LongObjHashMap<PercentilesAggregator> buckets;
public BoxAggregator(final PlotSettings settings) {
this.interval = settings.getInterval().get();
this.buckets = interval.getMiddleTimeBuckets(PercentilesAggregator::new);
}
@Override
public void addValue(final long epochMilli, final long value) {
final long bucketId = interval.toBucketMiddleTime(epochMilli);
final PercentilesAggregator bucket = buckets.get(bucketId);
bucket.addValue(epochMilli, value);
}
@Override
public AggregatedData getAggregatedData() {
// not needed - usually this method is used to write the data to file, but bar
// charts use inline data
return null;
}
@Override
public Aggregate getType() {
return Aggregate.BOX;
}
public Object getDataName() {
return dataName;
}
public Interval getInterval() {
return interval;
}
public String asCsv(final boolean renderLabels) {
final StringBuilder csv = new StringBuilder();
buckets.forEachOrdered((final long bucketId, final PercentilesAggregator percentilesAggregator) -> {
final Percentiles percentiles = percentilesAggregator.getPercentiles();
if (percentiles.get("25.000") != null) {
csv.append(String.format(Locale.US, "%d,%d,%d,%d,%d,%d", //
bucketId / 1000, //
percentiles.get("0.000"), //
percentiles.get("25.000"), //
percentiles.get("50.000"), //
percentiles.get("75.000"), //
percentiles.get("100.000")//
));
csv.append("\n");
}
});
return csv.toString();
}
public String renderLabels(final GnuplotAxis xAxis) {
final StringBuilder result = new StringBuilder();
return result.toString();
}
}

View File

@@ -0,0 +1,99 @@
package org.lucares.pdb.plot.api;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Optional;
import org.lucares.recommind.logs.AxisSettings;
import org.lucares.recommind.logs.AxisTime;
import org.lucares.recommind.logs.DataSeries;
import org.lucares.recommind.logs.GnuplotAxis;
import org.lucares.recommind.logs.GnuplotLineType;
import org.lucares.recommind.logs.GnuplotSettings;
import org.lucares.recommind.logs.LineStyle;
import org.lucares.recommind.logs.Type;
public class BoxChartHandler extends AggregateHandler {
@Override
Type getAxisType(final GnuplotAxis axis) {
switch (axis) {
case X1:
case X2:
return Type.Time;
case Y1:
case Y2:
return Type.Duration;
default:
throw new IllegalArgumentException("Unexpected value: " + axis);
}
}
@Override
Aggregate getAggregateType() {
return Aggregate.BOX;
}
@Override
AxisSettings createXAxisSettings(final GnuplotSettings settings, final Collection<DataSeries> dataSeries) {
final AxisSettings result = AxisTime.createXAxis(settings);
result.setAxis(getxAxis());
result.setShowGrid(getxAxis() == GnuplotAxis.X1);
return result;
}
@Override
String beforePlot(final CustomAggregator aggregator, final GnuplotSettings settings) {
final StringBuilder result = new StringBuilder();
final BoxAggregator boxAggregator = (BoxAggregator) aggregator;
appendfln(result, "%s <<EOD", boxAggregator.getDataName());
appendln(result, boxAggregator.asCsv(settings.isRenderLabels()));
appendln(result, "EOD");
if (settings.isRenderLabels() && settings.isRenderBarChartTickLabels()) {
appendfln(result, boxAggregator.renderLabels(getxAxis()));
}
return result.toString();
}
@Override
String addPlot(final CustomAggregator aggregator, final LineStyle lineStyle, final Optional<String> title) {
final BoxAggregator boxAggregator = (BoxAggregator) aggregator;
final String candlestick = formatln(
"'%s' using 1:3:2:6:5:(%.1f) %s axes %s with %s whiskerbars 0.5 fs empty %s linewidth 1, \\", //
boxAggregator.getDataName(), //
width(boxAggregator.getInterval().getIntervalTimeUnit()), //
gnuplotTitle(title), //
gnuplotXYAxis(), //
GnuplotLineType.BOX, //
lineStyle.asGnuplotLineStyle()//
);
final String median = formatln(
"'%s' using 1:4:4:4:4:(%.1f) axes %s with candlesticks notitle fs empty %s linewidth 2, \\", //
boxAggregator.getDataName(), //
width(boxAggregator.getInterval().getIntervalTimeUnit()), //
gnuplotXYAxis(), //
lineStyle.asGnuplotLineStyle());
return candlestick + median;
}
private double width(final IntervalTimeUnit intervalTimeUnit) {
return intervalTimeUnit.toMillis() / 1000;
}
@Override
CustomAggregator createCustomAggregator(final Path tmpDir, final PlotSettings plotSettings,
final long fromEpochMilli, final long toEpochMilli) {
if (plotSettings.getInterval().isPresent()) {
return new BoxAggregator(plotSettings);
} else {
return null;
}
}
}

View File

@@ -8,90 +8,27 @@ import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.Locale;
import org.lucares.collections.LongLongConsumer;
import org.lucares.collections.LongLongHashMap;
import org.lucares.pdb.api.RuntimeIOException;
public class CumulativeDistributionCustomAggregator implements CustomAggregator {
private final static int POINTS = 500;
private static final class ToPercentiles implements LongLongConsumer {
private long cumulativeCount = 0;
private long maxValue = 0;
private final Percentiles percentiles = new Percentiles(POINTS);
private final double stepSize;
private double lastPercentile;
private double nextPercentile;
private final long totalValues;
public ToPercentiles(final long totalValues) {
this.totalValues = totalValues;
stepSize = 100.0 / POINTS;
nextPercentile = stepSize;
}
@Override
public void accept(final long duration, final long count) {
maxValue = duration;
cumulativeCount += count;
final double newPercentile = cumulativeCount * 100.0 / totalValues;
if (newPercentile >= nextPercentile) {
double currentPercentile = lastPercentile + stepSize;
while (currentPercentile <= newPercentile) {
final String percentile = String.format(Locale.US, "%.3f", currentPercentile);
percentiles.put(percentile, duration);
currentPercentile += stepSize;
}
nextPercentile = currentPercentile;
lastPercentile = currentPercentile - stepSize;
}
}
public Percentiles getPercentiles() {
return percentiles;
}
public void collect(final LongLongHashMap map) {
map.forEachOrdered(this);
percentiles.put("100.000", maxValue);
}
}
// the rather large initial capacity should prevent too many grow&re-hash phases
private final LongLongHashMap map = new LongLongHashMap(5_000, 0.75);
private long totalValues = 0;
private final Path tmpDir;
private final PercentilesAggregator percentilesAggregator;
public CumulativeDistributionCustomAggregator(final Path tmpDir) {
this.tmpDir = tmpDir;
percentilesAggregator = new PercentilesAggregator();
}
@Override
public void addValue(final long epochMilli, final long value) {
map.compute(value, 0, (__, l) -> l + 1);
totalValues++;
percentilesAggregator.addValue(epochMilli, value);
}
public Percentiles getPercentiles() {
final ToPercentiles toPercentiles = new ToPercentiles(totalValues);
toPercentiles.collect(map);
final Percentiles result = toPercentiles.getPercentiles();
return result;
return percentilesAggregator.getPercentiles();
}
@Override
@@ -100,17 +37,14 @@ public class CumulativeDistributionCustomAggregator implements CustomAggregator
final char separator = ',';
final char newline = '\n';
final ToPercentiles toPercentiles = new ToPercentiles(totalValues);
toPercentiles.collect(map);
final File dataFile = File.createTempFile("data", ".dat", tmpDir.toFile());
try (final Writer output = new BufferedWriter(
new OutputStreamWriter(new FileOutputStream(dataFile), StandardCharsets.US_ASCII));) {
final StringBuilder data = new StringBuilder();
if (map.size() > 0) {
if (percentilesAggregator.hasValues()) {
// compute the percentiles
toPercentiles.getPercentiles().forEach((percentile, value) -> {
percentilesAggregator.getPercentiles().forEach((percentile, value) -> {
data.append(percentile);
data.append(separator);

View File

@@ -3,8 +3,9 @@ package org.lucares.pdb.plot.api;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Supplier;
import org.lucares.collections.LongObjHashMap;
import org.lucares.pdb.api.DateTimeRange;
import org.lucares.pdb.datastore.internal.LongToDateBucket;
@@ -51,6 +52,30 @@ public class Interval {
return bucketer.toPartitionId(epochMilli);
}
public long toBucketMiddleTime(final long epochMilli) {
switch (intervalTimeUnit) {
case SECOND:
return epochMilli - epochMilli % 1000 + 500;
case MINUTE:
return epochMilli - epochMilli % 60000 + 30000;
case HOUR:
return epochMilli - epochMilli % 3600000 + 1800000;
case DAY:
return epochMilli - epochMilli % 86400000 + 43200000;
case WEEK:
return epochMilli - epochMilli % (7 * 24 * 3600 * 1000) + 7 * 24 * 3600 * 500; // use week based year!
// Otherwise intervals over
// the year boundary will be
// wrong
case MONTH:
return epochMilli - epochMilli % (30 * 24 * 3600 * 1000L) + 30 * 24 * 3600 * 500L;
case YEAR:
return epochMilli - epochMilli % (365 * 24 * 3600 * 1000L) + 365 * 24 * 3600 * 500L;
default:
throw new IllegalArgumentException("Unexpected value: " + intervalTimeUnit);
}
}
public IntervalTimeUnit getIntervalTimeUnit() {
return intervalTimeUnit;
}
@@ -72,13 +97,30 @@ public class Interval {
return null;
}
public Map<String, AtomicLong> getBuckets() {
final Map<String, AtomicLong> result = new HashMap<>();
public <T> Map<String, T> getBuckets(final Supplier<T> initialValueSupplier) {
final Map<String, T> result = new HashMap<>();
final List<String> bucketIds = bucketer.toPartitionIds(dateTimeRange.getStart(), dateTimeRange.getEnd(),
intervalTimeUnit.toChronoUnit());
for (final String bucketId : bucketIds) {
result.put(bucketId, new AtomicLong(0));
result.put(bucketId, initialValueSupplier.get());
}
return result;
}
public <T> LongObjHashMap<T> getMiddleTimeBuckets(final Supplier<T> initialValueSupplier) {
final LongObjHashMap<T> result = new LongObjHashMap<>();
long current = dateTimeRange.getStart().toInstant().toEpochMilli();
final long end = dateTimeRange.getEnd().toInstant().toEpochMilli() + intervalTimeUnit.toMillis();
while (current <= end) {
final long id = toBucketMiddleTime(current);
System.out.println("add bucket: " + id);
result.put(id, initialValueSupplier.get());
current += intervalTimeUnit.toMillis();
}
return result;

View File

@@ -34,4 +34,25 @@ public enum IntervalTimeUnit {
throw new IllegalArgumentException("Unexpected value: " + this);
}
}
public long toMillis() {
switch (this) {
case SECOND:
return 1000;
case MINUTE:
return 60 * 1000;
case HOUR:
return 3600 * 1000;
case DAY:
return 24 * 3600 * 1000;
case WEEK:
return 7 * 24 * 3600 * 1000;
case MONTH:
return 30 * 24 * 3600 * 1000L;
case YEAR:
return 365 * 24 * 3600 * 1000L;
default:
throw new IllegalArgumentException("Unexpected value: " + this);
}
}
}

View File

@@ -0,0 +1,91 @@
package org.lucares.pdb.plot.api;
import java.util.Locale;
import org.lucares.collections.LongLongConsumer;
import org.lucares.collections.LongLongHashMap;
public class PercentilesAggregator {
private final static int POINTS = 500;
private static final class ToPercentiles implements LongLongConsumer {
private long cumulativeCount = 0;
private long minValue = Long.MAX_VALUE;
private long maxValue = 0;
private final Percentiles percentiles = new Percentiles(POINTS);
private final double stepSize;
private double lastPercentile;
private double nextPercentile;
private final long totalValues;
public ToPercentiles(final long totalValues) {
this.totalValues = totalValues;
stepSize = 100.0 / POINTS;
nextPercentile = 0;
}
@Override
public void accept(final long duration, final long count) {
minValue = Math.min(minValue, duration);
maxValue = duration;
cumulativeCount += count;
final double newPercentile = cumulativeCount * 100.0 / totalValues;
if (newPercentile >= nextPercentile) {
double currentPercentile = lastPercentile + stepSize;
while (currentPercentile <= newPercentile) {
final String percentile = String.format(Locale.US, "%.3f", currentPercentile);
percentiles.put(percentile, duration);
currentPercentile += stepSize;
}
nextPercentile = currentPercentile;
lastPercentile = currentPercentile - stepSize;
}
}
public Percentiles getPercentiles() {
return percentiles;
}
public void collect(final LongLongHashMap map) {
percentiles.put("0.000", 0L); // make sure "0.000" is the first element in the sorted percentiles. Will be
// overwritten with the correct value later
map.forEachOrdered(this);
percentiles.put("0.000", minValue);
percentiles.put("100.000", maxValue);
}
}
// the rather large initial capacity should prevent too many grow&re-hash phases
private final LongLongHashMap map = new LongLongHashMap(5_000, 0.75);
private long totalValues = 0;
public PercentilesAggregator() {
}
public void addValue(final long epochMilli, final long value) {
map.compute(value, 0, (__, l) -> l + 1);
totalValues++;
}
public Percentiles getPercentiles() {
final ToPercentiles toPercentiles = new ToPercentiles(totalValues);
toPercentiles.collect(map);
final Percentiles result = toPercentiles.getPercentiles();
return result;
}
public boolean hasValues() {
return map.size() > 0;
}
}

View File

@@ -66,6 +66,7 @@ public class GnuplotFileGenerator implements Appender {
// appendfln(result, "set xrange [-1:1]");
appendfln(result, "set boxwidth 0.5");
// appendfln(result, "set boxwidth 3600");
appendfln(result, "set style fill transparent solid 0.5");
@@ -75,7 +76,7 @@ public class GnuplotFileGenerator implements Appender {
// render images when there are not data points on it.
appendf(result, "-1 with lines notitle");
LOGGER.debug("{}", result);
LOGGER.info("{}", result);
return result.toString();
}

View File

@@ -5,6 +5,8 @@ public enum GnuplotLineType {
Bar("boxes"),
BOX("candlesticks"),
Points("points");
private String gnuplotLineType;

View File

@@ -13,13 +13,7 @@ public class LineStyle {
}
private String asGnuplotLineStyle(final String colorHex) {
// TODO revert
// return String.format("lt rgb \"#%s\" dt %s ", //
// colorHex, //
// dashType.toGnuplotDashType()//
// );
return String.format("lt rgb \"#%s\" ", //
return String.format("linetype rgb \"#%s\" ", //
colorHex//
);
}

View File

@@ -195,7 +195,7 @@ public class Plotter {
METRICS_LOGGER.debug("wrote {} values to csv in: {}ms (ignored {} values) use millis: {}, grouping={}",
plottedValues, (System.nanoTime() - start) / 1_000_000.0, ignoredValues, Boolean.toString(useMillis),
groupResult.getGroupedBy().asString());
Tags.STRING_COMPRESSOR.asString(groupResult.getGroupedBy()));
return new CsvSummary(count, statsMaxValue, statsCurrentAverage, aggregator);
}
@@ -208,7 +208,7 @@ public class Plotter {
static String title(final Tags tags, final CsvSummary csvSummary) {
// TODO title must be computed by the AggregateHandler, because it is the only
// one knowing how many values are plotted
final StringBuilder result = new StringBuilder(tags.asValueString());
final StringBuilder result = new StringBuilder(Tags.STRING_COMPRESSOR.asValueString(tags));
final int values = csvSummary.getValues();
result.append(" (");

View File

@@ -49,8 +49,8 @@ public class CsvReaderCsvToEntryTransformer implements CsvToEntryTransformer {
final int chunksize = 1000;
Entries entries = new Entries(chunksize);
final int keyTimestamp = Tags.STRING_COMPRESSOR.put(settings.getTimeColumn());
final int keyDuration = Tags.STRING_COMPRESSOR.put(settings.getValueColumn());
final int keyTimestamp = Tags.STRING_COMPRESSOR.putString(settings.getTimeColumn());
final int keyDuration = Tags.STRING_COMPRESSOR.putString(settings.getValueColumn());
final DateTimeFormatter dateParser = createDateParser(settings.getDateTimePattern());
final Tags additionalTags = initAdditionalTags(settings);
@@ -101,7 +101,7 @@ public class CsvReaderCsvToEntryTransformer implements CsvToEntryTransformer {
final String renameTo = settings.getColumnDefinitions().getRenameTo(columnName);
final String renamedColumn = renameTo != null ? renameTo : columnName;
compressedHeaders[i] = Tags.STRING_COMPRESSOR.put(renamedColumn);
compressedHeaders[i] = Tags.STRING_COMPRESSOR.putString(renamedColumn);
final EnumSet<PostProcessors> postProcessors = settings.getColumnDefinitions()
.getPostProcessors(columnName);
final Function<String, String> postProcessFunction = PostProcessors.toFunction(postProcessors);
@@ -134,7 +134,7 @@ public class CsvReaderCsvToEntryTransformer implements CsvToEntryTransformer {
duration = Long.parseLong(val);
} else if (!val.isEmpty()) {
final Function<String, String> postProcess = postProcessersForColumns.get(i);
final int value = Tags.STRING_COMPRESSOR.put(val, postProcess);
final int value = Tags.STRING_COMPRESSOR.putString(val, postProcess);
tagsBuilder.add(key, value);
}

View File

@@ -21,8 +21,8 @@ public interface CsvToEntryTransformer {
default Tags initAdditionalTags(final CsvReaderSettings settings) {
final TagsBuilder tags = new TagsBuilder();
for (final java.util.Map.Entry<String, String> entry : settings.getAdditionalTags().entrySet()) {
final int field = Tags.STRING_COMPRESSOR.put(entry.getKey());
final int value = Tags.STRING_COMPRESSOR.put(entry.getValue());
final int field = Tags.STRING_COMPRESSOR.putString(entry.getKey());
final int value = Tags.STRING_COMPRESSOR.putString(entry.getValue());
tags.add(field, value);
}
return tags.build();

View File

@@ -3,16 +3,17 @@ package org.lucares.pdbui;
import java.util.Objects;
import java.util.concurrent.ArrayBlockingQueue;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.Entries;
public class CsvToEntryTransformerFactory {
public static CsvToEntryTransformer createCsvToEntryTransformer(final ArrayBlockingQueue<Entries> queue,
final CsvReaderSettings settings) {
final CsvReaderSettings settings, final StringCompressor stringCompressor) {
if (settings.getQuoteCharacter() == null
&& Objects.equals(settings.getDateTimePattern(), CsvReaderSettings.ISO_8601)) {
return new NoCopyCsvToEntryTransformer(queue, settings);
return new NoCopyCsvToEntryTransformer(queue, settings, stringCompressor);
} else {
return new CsvReaderCsvToEntryTransformer(queue, settings);
}

View File

@@ -12,6 +12,7 @@ import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.Entries;
import org.lucares.performance.db.PerformanceDb;
import org.lucares.utils.file.FileUtils;
@@ -30,8 +31,11 @@ public class CsvUploadHandler implements PropertyKeys, DisposableBean {
private final PerformanceDb performanceDb;
public CsvUploadHandler(final PerformanceDb performanceDb) {
private final StringCompressor stringCompressor;
public CsvUploadHandler(final PerformanceDb performanceDb, final StringCompressor stringCompressor) {
this.performanceDb = performanceDb;
this.stringCompressor = stringCompressor;
}
public void ingest(final List<MultipartFile> files, final CsvReaderSettings settings)
@@ -48,7 +52,8 @@ public class CsvUploadHandler implements PropertyKeys, DisposableBean {
// improved the
// ingestion performance fom 1.1m to 1.55m values per second on average
synchronized (this) {
final NoCopyCsvToEntryTransformer csvToEntryTransformer = new NoCopyCsvToEntryTransformer(queue, settings);
final NoCopyCsvToEntryTransformer csvToEntryTransformer = new NoCopyCsvToEntryTransformer(queue,
settings, stringCompressor);
try (InputStream in = file.getInputStream()) {
csvToEntryTransformer.readCSV(in);
} catch (final Exception e) {

View File

@@ -10,6 +10,7 @@ import java.util.concurrent.ArrayBlockingQueue;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.Entries;
import org.lucares.pdb.datastore.RuntimeTimeoutException;
import org.lucares.performance.db.PerformanceDb;
@@ -21,12 +22,15 @@ public class FileDropZipHandler implements FileDropFileTypeHandler {
private final PerformanceDb performanceDb;
private final FileDropConfigProvider configProvider;
private final StringCompressor stringCompressor;
@Autowired
public FileDropZipHandler(final PerformanceDb performanceDb, final FileDropConfigProvider configProvider) {
public FileDropZipHandler(final PerformanceDb performanceDb, final FileDropConfigProvider configProvider,
final StringCompressor stringCompressor) {
super();
this.performanceDb = performanceDb;
this.configProvider = configProvider;
this.stringCompressor = stringCompressor;
}
@Override
@@ -54,7 +58,7 @@ public class FileDropZipHandler implements FileDropFileTypeHandler {
final CsvReaderSettings csvReaderSettings = csvSettings.get();
final CsvToEntryTransformer csvToEntryTransformer = CsvToEntryTransformerFactory
.createCsvToEntryTransformer(queue, csvReaderSettings);
.createCsvToEntryTransformer(queue, csvReaderSettings, stringCompressor);
try (final InputStream inputStream = new BufferedInputStream(zipFile.getInputStream(entry),
1024 * 1024)) {
csvToEntryTransformer.readCSV(inputStream);

View File

@@ -15,6 +15,7 @@ import java.util.concurrent.Callable;
import java.util.concurrent.TimeoutException;
import java.util.zip.GZIPInputStream;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.Entries;
import org.lucares.pdb.datastore.Entry;
import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions;
@@ -25,10 +26,13 @@ public final class IngestionHandler implements Callable<Void> {
final Socket clientSocket;
private final ArrayBlockingQueue<Entries> queue;
private final StringCompressor stringCompressor;
public IngestionHandler(final Socket clientSocket, final ArrayBlockingQueue<Entries> queue) {
public IngestionHandler(final Socket clientSocket, final ArrayBlockingQueue<Entries> queue,
final StringCompressor stringCompressor) {
this.clientSocket = clientSocket;
this.queue = queue;
this.stringCompressor = stringCompressor;
}
@Override
@@ -65,7 +69,7 @@ public final class IngestionHandler implements Callable<Void> {
} else {
in.reset();
final NoCopyCsvToEntryTransformer csvTransformer = new NoCopyCsvToEntryTransformer(queue,
CsvReaderSettings.create("@timestamp", "duration", ",", new ColumnDefinitions()));
CsvReaderSettings.create("@timestamp", "duration", ",", new ColumnDefinitions()), stringCompressor);
csvTransformer.readCSV(in);
}
}

View File

@@ -72,12 +72,12 @@ public class JsonToEntryTransformer implements LineToEntryTransformer {
// ignore: we only support key/value tags
break;
default:
final int keyAsInt = Tags.STRING_COMPRESSOR.put(key);
final int keyAsInt = Tags.STRING_COMPRESSOR.putString(key);
final int valueAsInt;
if (value instanceof String) {
valueAsInt = Tags.STRING_COMPRESSOR.put((String) value);
valueAsInt = Tags.STRING_COMPRESSOR.putString((String) value);
} else if (value != null) {
valueAsInt = Tags.STRING_COMPRESSOR.put(String.valueOf(value));
valueAsInt = Tags.STRING_COMPRESSOR.putString(String.valueOf(value));
} else {
continue;
}

View File

@@ -4,6 +4,7 @@ import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.performance.db.PerformanceDb;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -28,4 +29,9 @@ public class MySpringConfiguration {
return new PerformanceDb(dataDirectory);
}
@Bean
StringCompressor stringCompressor(final PerformanceDb performanceDb) {
return performanceDb.getRealDataStore().getStringCompressor();
}
}

View File

@@ -11,6 +11,7 @@ import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import org.lucares.collections.IntList;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.api.Tags;
import org.lucares.pdb.api.TagsBuilder;
import org.lucares.pdb.datastore.Entries;
@@ -31,9 +32,13 @@ class NoCopyCsvToEntryTransformer implements CsvToEntryTransformer {
private int[] compressedHeaders;
private List<Function<String, String>> postProcessersForColumns;
public NoCopyCsvToEntryTransformer(final ArrayBlockingQueue<Entries> queue, final CsvReaderSettings settings) {
private final StringCompressor stringCompressor;
public NoCopyCsvToEntryTransformer(final ArrayBlockingQueue<Entries> queue, final CsvReaderSettings settings,
final StringCompressor stringCompressor) {
this.queue = queue;
this.settings = settings;
this.stringCompressor = stringCompressor;
}
@Override
@@ -54,8 +59,8 @@ class NoCopyCsvToEntryTransformer implements CsvToEntryTransformer {
int lineCounter = 0;
final byte[] buffer = new byte[4096 * 16];
final int keyTimestamp = Tags.STRING_COMPRESSOR.put(settings.getTimeColumn());
final int keyDuration = Tags.STRING_COMPRESSOR.put(settings.getValueColumn());
final int keyTimestamp = stringCompressor.putString(settings.getTimeColumn());
final int keyDuration = stringCompressor.putString(settings.getValueColumn());
final FastISODateParser dateParser = new FastISODateParser();
Tags additionalTags = initAdditionalTags(settings);
@@ -144,7 +149,7 @@ class NoCopyCsvToEntryTransformer implements CsvToEntryTransformer {
final String renameTo = settings.getColumnDefinitions().getRenameTo(columnName);
final String renamedColumn = renameTo != null ? renameTo : columnName;
columns[i] = Tags.STRING_COMPRESSOR.put(renamedColumn);
columns[i] = stringCompressor.putString(renamedColumn);
final EnumSet<PostProcessors> postProcessors = settings.getColumnDefinitions()
.getPostProcessors(columnName);
final Function<String, String> postProcessFunction = PostProcessors.toFunction(postProcessors);
@@ -186,8 +191,8 @@ class NoCopyCsvToEntryTransformer implements CsvToEntryTransformer {
duration = parseLong(line, lastSeparatorPosition + 1, separatorPosition);
} else if (lastSeparatorPosition + 1 < separatorPosition) { // value is not empty
final Function<String, String> postProcess = postProcessersForColumns.get(i);
final int value = Tags.STRING_COMPRESSOR.put(line, lastSeparatorPosition + 1, separatorPosition,
postProcess);
final int value = stringCompressor.putStringFromBytes(line, lastSeparatorPosition + 1,
separatorPosition, postProcess);
tagsBuilder.add(key, value);
}

View File

@@ -5,6 +5,7 @@ import java.util.List;
import org.lucares.pdb.plot.api.Aggregate;
import org.lucares.pdb.plot.api.AggregateHandlerCollection;
import org.lucares.pdb.plot.api.BarChartHandler;
import org.lucares.pdb.plot.api.BoxChartHandler;
import org.lucares.pdb.plot.api.CumulativeDistributionHandler;
import org.lucares.pdb.plot.api.HistogramHandler;
import org.lucares.pdb.plot.api.Interval;
@@ -62,6 +63,9 @@ class PlotSettingsTransformer {
case BAR:
aggregateHandlerCollection.addAggregateHandler(new BarChartHandler());
break;
case BOX:
aggregateHandlerCollection.addAggregateHandler(new BoxChartHandler());
break;
default:
throw new IllegalStateException("unhandled enum: " + aggregate);
}

View File

@@ -26,8 +26,8 @@ public class TagMatchExtractor {
if (matcher.find() && matcher.groupCount() >= 1) {
final String group = matcher.group(1);
Tags.STRING_COMPRESSOR.put(tagMatcher.tag());
Tags.STRING_COMPRESSOR.put(group);
Tags.STRING_COMPRESSOR.putString(tagMatcher.tag());
Tags.STRING_COMPRESSOR.putString(group);
System.out.println(tagMatcher.tag() + " -> " + group);
final Tag tag = Tags.STRING_COMPRESSOR.createTag(tagMatcher.tag(), group);

View File

@@ -15,6 +15,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.PreDestroy;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.Entries;
import org.lucares.performance.db.PerformanceDb;
import org.lucares.recommind.logs.Config;
@@ -40,15 +41,19 @@ public class TcpIngestor implements Ingestor, AutoCloseable, DisposableBean {
private volatile int port = PORT;
private final StringCompressor stringCompressor;
public TcpIngestor(final Path dataDirectory) throws IOException {
LOGGER.info("opening performance db: " + dataDirectory);
db = new PerformanceDb(dataDirectory);
stringCompressor = db.getRealDataStore().getStringCompressor();
LOGGER.debug("performance db open");
}
@Autowired
public TcpIngestor(final PerformanceDb db) {
public TcpIngestor(final PerformanceDb db, final StringCompressor stringCompressor) {
this.db = db;
this.stringCompressor = stringCompressor;
}
public void useRandomPort() {
@@ -94,7 +99,7 @@ public class TcpIngestor implements Ingestor, AutoCloseable, DisposableBean {
LOGGER.debug("accepted connection: " + clientSocket.getRemoteSocketAddress());
final ArrayBlockingQueue<Entries> queue = db.getQueue();
workerThreadPool.submit(new IngestionHandler(clientSocket, queue));
workerThreadPool.submit(new IngestionHandler(clientSocket, queue, stringCompressor));
LOGGER.debug("handler submitted");
} catch (final SocketTimeoutException e) {
// expected every 100ms

View File

@@ -1 +1,6 @@
db.base=/tmp/pdb
db.base=/home/andi/ws/performanceDb/dev-database/
server.port=17333
cache.images.duration.seconds=86400
defaults.groupBy=pod,method,metric
defaults.splitBy=method
defaults.query.examples=pod=vapfinra01 and method=ViewService.findFieldView,ViewService.findFieldViewGroup;pod=vappilby01 and method=ReviewInContextController.index;pod=vapnyse001 and method=ReviewInContextController.index,ReviewController.index

View File

@@ -18,6 +18,7 @@ import org.lucares.collections.LongList;
import org.lucares.pdb.api.DateTimeRange;
import org.lucares.pdb.api.Query;
import org.lucares.pdb.api.Result;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions;
import org.lucares.pdbui.CsvReaderSettings.PostProcessors;
import org.lucares.pdbui.domain.FileDropConfig;
@@ -101,7 +102,11 @@ public class FileDropHandlerTest {
final FileDropConfigProvider fileDropConfigProvider = new FileDropConfigProvider(
fileDropConfigLocation.toString());
final String fileDropBaseDir = dataDirectory.resolve("drop").toAbsolutePath().toString();
final List<FileDropFileTypeHandler> handlers = List.of(new FileDropZipHandler(db, fileDropConfigProvider));
final StringCompressor stringCompressor = db.getRealDataStore().getStringCompressor();
final List<FileDropFileTypeHandler> handlers = List
.of(new FileDropZipHandler(db, fileDropConfigProvider, stringCompressor));
return new FileDropHandler(fileDropBaseDir, handlers);
}

View File

@@ -19,6 +19,7 @@ import org.junit.jupiter.api.Test;
import org.lucares.collections.LongList;
import org.lucares.pdb.api.DateTimeRange;
import org.lucares.pdb.api.Query;
import org.lucares.pdb.api.StringCompressor;
import org.lucares.pdb.datastore.Entries;
import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions;
import org.lucares.performance.db.PerformanceDb;
@@ -44,6 +45,7 @@ public class NoCopyCsvToEntryTransformerTest {
final OffsetDateTime dateB = OffsetDateTime.now();
try (final PerformanceDb db = new PerformanceDb(dataDirectory)) {
final StringCompressor stringCompressor = db.getRealDataStore().getStringCompressor();
final String csv = "@timestamp,duration,tag\n"//
+ dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + ",1,tagValue\n"//
@@ -52,7 +54,8 @@ public class NoCopyCsvToEntryTransformerTest {
final ArrayBlockingQueue<Entries> queue = db.getQueue();
final CsvReaderSettings settings = CsvReaderSettings.create("@timestamp", "duration", ",",
new ColumnDefinitions());
final NoCopyCsvToEntryTransformer csvToEntryTransformer = new NoCopyCsvToEntryTransformer(queue, settings);
final NoCopyCsvToEntryTransformer csvToEntryTransformer = new NoCopyCsvToEntryTransformer(queue, settings,
stringCompressor);
csvToEntryTransformer.readCSV(new ByteArrayInputStream(csv.getBytes(StandardCharsets.UTF_8)));
queue.put(Entries.POISON);
}
@@ -84,6 +87,7 @@ public class NoCopyCsvToEntryTransformerTest {
public void testIgnoreColumns() throws IOException, InterruptedException, TimeoutException {
try (final PerformanceDb db = new PerformanceDb(dataDirectory)) {
final StringCompressor stringCompressor = db.getRealDataStore().getStringCompressor();
final String csv = "@timestamp,duration,ignoredColumn,-otherIgnoredColumn,tag\n"//
+ "2000-01-01T00:00:00.000Z,1,ignoreValue,ignoreValue,tagValue\n"//
@@ -94,7 +98,8 @@ public class NoCopyCsvToEntryTransformerTest {
columnDefinitions.ignoreColumn("ignoredColumn");
final CsvReaderSettings settings = CsvReaderSettings.create("@timestamp", "duration", ",",
columnDefinitions);
final NoCopyCsvToEntryTransformer csvToEntryTransformer = new NoCopyCsvToEntryTransformer(queue, settings);
final NoCopyCsvToEntryTransformer csvToEntryTransformer = new NoCopyCsvToEntryTransformer(queue, settings,
stringCompressor);
csvToEntryTransformer.readCSV(new ByteArrayInputStream(csv.getBytes(StandardCharsets.UTF_8)));
queue.put(Entries.POISON);
}

View File

@@ -231,6 +231,10 @@ public class PerformanceDb implements AutoCloseable {
return fields;
}
public DataStore getRealDataStore() {
return dataStore;
}
public PartitionDiskStore getDataStore() {
return dataStore.getDiskStorage();
}

View File

@@ -13,6 +13,7 @@ import java.util.concurrent.ThreadLocalRandom;
import org.apache.commons.collections4.CollectionUtils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
@@ -24,7 +25,6 @@ import org.lucares.pdb.api.Query;
import org.lucares.pdb.api.Result;
import org.lucares.pdb.api.Tags;
import org.lucares.pdb.datastore.Entry;
import org.junit.jupiter.api.Assertions;
import org.lucares.utils.DateUtils;
public class PerformanceDbTest {
@@ -48,10 +48,10 @@ public class PerformanceDbTest {
final OffsetDateTime nowInUtc = DateUtils.nowInUtc();
final long date = nowInUtc.toInstant().toEpochMilli();
final long value = 1;
final Tags tags = Tags.createAndAddToDictionary("myKey", "myValue");
final Tags tags = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "myValue");
db.putEntry(new Entry(date, value, tags));
final Result result = db.get(Query.createQuery(tags, DateTimeRange.ofDay(nowInUtc)));
final Result result = db.get(Query.createQuery("myKey=myValue", DateTimeRange.ofDay(nowInUtc)));
final LongList stream = result.singleGroup().flatMap();
Assertions.assertEquals(2, stream.size());
@@ -71,12 +71,12 @@ public class PerformanceDbTest {
final long dayTwo = dateRange.getEndEpochMilli();
final long valueOne = 1;
final long valueTwo = 2;
final Tags tags = Tags.createAndAddToDictionary("myKey", "myValue");
final Tags tags = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "myValue");
db.putEntry(new Entry(dayOne, valueOne, tags));
db.putEntry(new Entry(dayTwo, valueTwo, tags));
final LongList stream = db.get(Query.createQuery(tags, dateRange)).singleGroup().flatMap();
final LongList stream = db.get(Query.createQuery("myKey=myValue", dateRange)).singleGroup().flatMap();
Assertions.assertEquals(4, stream.size());
@@ -116,14 +116,14 @@ public class PerformanceDbTest {
final DateTimeRange timeRange = DateTimeRange.ofDay(DateUtils.getDate(year, month, day, 1, 1, 1));
final Tags tags = Tags.createAndAddToDictionary("myKey", "one");
final Tags tags = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "one");
final List<Entry> entries = generateEntries(timeRange, numberOfEntries, 0, tags);
printEntries(entries, "");
db.putEntries(entries);
final LongList actualEntries = db.get(Query.createQuery(tags, timeRange)).singleGroup().flatMap();
final LongList actualEntries = db.get(Query.createQuery("myKey=one", timeRange)).singleGroup().flatMap();
Assertions.assertEquals(entries.size() * 2, actualEntries.size());
for (int i = 0; i < entries.size(); i++) {
@@ -149,7 +149,7 @@ public class PerformanceDbTest {
final int month = 1;
final int day = 2;
tags = Tags.createAndAddToDictionary("myKey", "one");
tags = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "one");
final DateTimeRange timeRange = DateTimeRange.ofDay(DateUtils.getDate(year, month, day, 1, 1, 1));
final List<Entry> entries = generateEntries(timeRange, numberOfEntries, 0, tags);
@@ -167,7 +167,7 @@ public class PerformanceDbTest {
db.putEntries(entries);
expected.addAll(entries);
final LongList actualEntries = db.get(Query.createQuery(tags, timeRange)).singleGroup().flatMap();
final LongList actualEntries = db.get(Query.createQuery("myKey=one", timeRange)).singleGroup().flatMap();
Assertions.assertEquals(expected.size() * 2, actualEntries.size());
Assertions.assertEquals(toExpectedValues(expected), actualEntries);
@@ -185,32 +185,38 @@ public class PerformanceDbTest {
final DateTimeRange dateRange = new DateTimeRange(from, to);
final long numberOfEntries = timeRange.duration().toHours();
final Tags tagsCommon = Tags.createAndAddToDictionary("commonKey", "commonValue");
final Tags tagsOne = Tags.createAndAddToDictionary("myKey", "one", "commonKey", "commonValue");
final Tags tagsOne = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "one", "commonKey",
"commonValue");
final List<Entry> entriesOne = generateEntries(timeRange, numberOfEntries, 1, tagsOne);
db.putEntries(entriesOne);
printEntries(entriesOne, "one");
final Tags tagsTwo = Tags.createAndAddToDictionary("myKey", "two", "commonKey", "commonValue");
final Tags tagsTwo = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "two", "commonKey",
"commonValue");
final List<Entry> entriesTwo = generateEntries(timeRange, numberOfEntries, 2, tagsTwo);
printEntries(entriesTwo, "two");
db.putEntries(entriesTwo);
final Tags tagsThree = Tags.createAndAddToDictionary("myKey", "three", "commonKey", "commonValue");
final Tags tagsThree = Tags.STRING_COMPRESSOR.createAndAddToDictionary("myKey", "three", "commonKey",
"commonValue");
final List<Entry> entriesThree = generateEntries(timeRange, numberOfEntries, 3, tagsThree);
printEntries(entriesThree, "three");
db.putEntries(entriesThree);
final LongList actualEntriesOne = db.get(Query.createQuery(tagsOne, dateRange)).singleGroup().flatMap();
final LongList actualEntriesOne = db
.get(Query.createQuery("myKey=one and commonKey=commonValue", dateRange)).singleGroup().flatMap();
Assertions.assertEquals(toExpectedValues(entriesOne), actualEntriesOne);
final LongList actualEntriesTwo = db.get(Query.createQuery(tagsTwo, dateRange)).singleGroup().flatMap();
final LongList actualEntriesTwo = db
.get(Query.createQuery("myKey=two and commonKey=commonValue", dateRange)).singleGroup().flatMap();
Assertions.assertEquals(toExpectedValues(entriesTwo), actualEntriesTwo);
final LongList actualEntriesThree = db.get(Query.createQuery(tagsThree, dateRange)).singleGroup().flatMap();
final LongList actualEntriesThree = db
.get(Query.createQuery("myKey=three and commonKey=commonValue", dateRange)).singleGroup().flatMap();
Assertions.assertEquals(toExpectedValues(entriesThree), actualEntriesThree);
final LongList actualEntriesAll = db.get(Query.createQuery(tagsCommon, dateRange)).singleGroup().flatMap();
final LongList actualEntriesAll = db.get(Query.createQuery("commonKey=commonValue", dateRange))
.singleGroup().flatMap();
final List<Entry> expectedAll = CollectionUtils.collate(entriesOne,
CollectionUtils.collate(entriesTwo, entriesThree, EntryByDateComparator.INSTANCE),
EntryByDateComparator.INSTANCE);
@@ -233,9 +239,11 @@ public class PerformanceDbTest {
final long numberOfEntries = timeRange.duration().toHours();
final String key = "myKey";
final Tags tagsOne = Tags.createAndAddToDictionary(key, "one", "commonKey", "commonValue");
final Tags tagsTwo = Tags.createAndAddToDictionary(key, "two", "commonKey", "commonValue");
final Tags tagsThree = Tags.createAndAddToDictionary("commonKey", "commonValue");
final Tags tagsOne = Tags.STRING_COMPRESSOR.createAndAddToDictionary(key, "one", "commonKey",
"commonValue");
final Tags tagsTwo = Tags.STRING_COMPRESSOR.createAndAddToDictionary(key, "two", "commonKey",
"commonValue");
final Tags tagsThree = Tags.STRING_COMPRESSOR.createAndAddToDictionary("commonKey", "commonValue");
final LongList entriesOne = storeEntries(db, timeRange, numberOfEntries, tagsOne, 1);
final LongList entriesTwo = storeEntries(db, timeRange, numberOfEntries, tagsTwo, 2);
final LongList entriesThree = storeEntries(db, timeRange, numberOfEntries, tagsThree, 3);
@@ -247,9 +255,9 @@ public class PerformanceDbTest {
for (final GroupResult groupResult : groups) {
final Tags groupedBy = groupResult.getGroupedBy();
if (groupedBy.equals(Tags.createAndAddToDictionary(key, "one"))) {
if (groupedBy.equals(Tags.STRING_COMPRESSOR.createAndAddToDictionary(key, "one"))) {
Assertions.assertEquals(entriesOne, groupResult.flatMap());
} else if (groupedBy.equals(Tags.createAndAddToDictionary(key, "two"))) {
} else if (groupedBy.equals(Tags.STRING_COMPRESSOR.createAndAddToDictionary(key, "two"))) {
Assertions.assertEquals(entriesTwo, groupResult.flatMap());
} else if (groupedBy.isEmpty()) {
@@ -272,10 +280,14 @@ public class PerformanceDbTest {
final String key1 = "myKey1";
final String key2 = "myKey2";
final Tags tagsOne = Tags.createAndAddToDictionary(key1, "one", key2, "aaa", "commonKey", "commonValue");
final Tags tagsTwoA = Tags.createAndAddToDictionary(key1, "two", key2, "bbb", "commonKey", "commonValue");
final Tags tagsTwoB = Tags.createAndAddToDictionary(key1, "two", key2, "bbb", "commonKey", "commonValue");
final Tags tagsThree = Tags.createAndAddToDictionary(key1, "three", "commonKey", "commonValue");
final Tags tagsOne = Tags.STRING_COMPRESSOR.createAndAddToDictionary(key1, "one", key2, "aaa", "commonKey",
"commonValue");
final Tags tagsTwoA = Tags.STRING_COMPRESSOR.createAndAddToDictionary(key1, "two", key2, "bbb", "commonKey",
"commonValue");
final Tags tagsTwoB = Tags.STRING_COMPRESSOR.createAndAddToDictionary(key1, "two", key2, "bbb", "commonKey",
"commonValue");
final Tags tagsThree = Tags.STRING_COMPRESSOR.createAndAddToDictionary(key1, "three", "commonKey",
"commonValue");
final LongList entriesOne = storeEntries(db, timeRange, numberOfEntries, tagsOne, 1);
final LongList entriesTwo = storeEntries(db, timeRange, numberOfEntries, tagsTwoA, 2);
@@ -290,9 +302,10 @@ public class PerformanceDbTest {
for (final GroupResult groupResult : groups) {
final Tags groupedBy = groupResult.getGroupedBy();
if (groupedBy.equals(Tags.createAndAddToDictionary(key1, "one", key2, "aaa"))) {
if (groupedBy.equals(Tags.STRING_COMPRESSOR.createAndAddToDictionary(key1, "one", key2, "aaa"))) {
Assertions.assertEquals(entriesOne, groupResult.flatMap());
} else if (groupedBy.equals(Tags.createAndAddToDictionary(key1, "two", key2, "bbb"))) {
} else if (groupedBy
.equals(Tags.STRING_COMPRESSOR.createAndAddToDictionary(key1, "two", key2, "bbb"))) {
// there is no defined order of the entries.
// eventually we might return them in ascending order, but
// that is not yet implemented
@@ -302,7 +315,7 @@ public class PerformanceDbTest {
actualEntries.sort();
Assertions.assertEquals(entriesTwo, actualEntries);
} else if (groupedBy.equals(Tags.createAndAddToDictionary(key1, "three"))) {
} else if (groupedBy.equals(Tags.STRING_COMPRESSOR.createAndAddToDictionary(key1, "three"))) {
Assertions.assertEquals(entriesThree, groupResult.flatMap());
} else {
Assertions.fail("unexpected group: " + groupedBy);