diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusterId.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusterId.java index 4e08cff..ddc8993 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusterId.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusterId.java @@ -1,6 +1,6 @@ package org.lucares.pdb.datastore.internal; -public class ClusterId { +public class ClusterId implements Comparable { private final String clusterId; /** @@ -18,6 +18,11 @@ public class ClusterId { return new ClusterId(clusterId); } + @Override + public int compareTo(final ClusterId other) { + return clusterId.compareTo(other.getClusterId()); + } + /** * @return the id, e.g. a time like 201902 (cluster for entries of February * 2019) diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusterIdSource.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusterIdSource.java index 7916e71..6ca7633 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusterIdSource.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusterIdSource.java @@ -1,7 +1,7 @@ package org.lucares.pdb.datastore.internal; -import java.util.List; +import java.util.Set; public interface ClusterIdSource { - List toClusterIds(); + Set toClusterIds(Set availableClusters); } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusteredLongList.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusteredLongList.java new file mode 100644 index 0000000..8eda2bb --- /dev/null +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusteredLongList.java @@ -0,0 +1,95 @@ +package org.lucares.pdb.datastore.internal; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; + +import org.lucares.collections.LongList; + +public class ClusteredLongList implements Iterable { + private final Map lists = new HashMap<>(); + + public LongList put(final ClusterId clusterId, final LongList longList) { + return lists.put(clusterId, longList); + } + + public LongList get(final ClusterId clusterId) { + return lists.get(clusterId); + } + + @Override + public Iterator iterator() { + return lists.keySet().iterator(); + } + + public static ClusteredLongList intersection(final ClusteredLongList a, final ClusteredLongList b) { + final ClusteredLongList result = new ClusteredLongList(); + final Set clusterIds = new HashSet<>(); + clusterIds.addAll(a.lists.keySet()); + clusterIds.addAll(b.lists.keySet()); + + for (final ClusterId clusterId : clusterIds) { + final LongList x = a.get(clusterId); + final LongList y = b.get(clusterId); + + if (x != null && y != null) { + final LongList intersection = LongList.intersection(x, y); + result.put(clusterId, intersection); + } else { + // one list is empty => the intersection is empty + } + } + return result; + } + + public static ClusteredLongList union(final ClusteredLongList a, final ClusteredLongList b) { + final ClusteredLongList result = new ClusteredLongList(); + final Set clusterIds = new HashSet<>(); + clusterIds.addAll(a.lists.keySet()); + clusterIds.addAll(b.lists.keySet()); + for (final ClusterId clusterId : clusterIds) { + final LongList x = a.get(clusterId); + final LongList y = b.get(clusterId); + + if (x != null && y != null) { + final LongList intersection = LongList.union(x, y); + result.put(clusterId, intersection); + } else if (x != null) { + result.put(clusterId, x.clone()); + } else if (y != null) { + result.put(clusterId, y.clone()); + } + } + return result; + } + + public int size() { + int size = 0; + + for (final LongList longList : lists.values()) { + size += longList.size(); + } + + return size; + } + + public boolean isSorted() { + for (final LongList longList : lists.values()) { + if (!longList.isSorted()) { + return false; + } + } + return true; + } + + public void removeAll(final ClusteredLongList remove) { + for (final ClusterId clusterId : lists.keySet()) { + final LongList removeLongList = remove.get(clusterId); + if (removeLongList != null) { + lists.get(clusterId).removeAll(removeLongList); + } + } + } +} diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusteredPersistentMap.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusteredPersistentMap.java index 0b9434c..9afc6dd 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusteredPersistentMap.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/ClusteredPersistentMap.java @@ -1,12 +1,15 @@ package org.lucares.pdb.datastore.internal; +import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; +import org.lucares.pdb.api.RuntimeIOException; import org.lucares.pdb.map.PersistentMap; import org.lucares.pdb.map.PersistentMap.EncoderDecoder; import org.lucares.pdb.map.Visitor; @@ -17,7 +20,7 @@ import org.lucares.pdb.map.Visitor; * * @param the key * @param the value used by the consumer of this - * {@link ClusteredPersistentMap} + * {@link ClusteredPersistentMap} * @param

the value that is stored */ public class ClusteredPersistentMap implements AutoCloseable { @@ -44,6 +47,28 @@ public class ClusteredPersistentMap implements AutoCloseable { } return null; }; + preload(storageBasePath); + } + + private void preload(final Path storageBasePath) { + try { + Files.list(storageBasePath)// + .filter(Files::isDirectory)// + .map(Path::getFileName)// + .map(Path::toString)// + .map(ClusterId::of)// + .forEach(clusterId -> maps.computeIfAbsent(clusterId, supplier)); + } catch (final IOException e) { + throw new RuntimeIOException(e); + } + } + + private Set getAllClusterIds() { + return maps.keySet(); + } + + public Set getAvailableClusterIds(final ClusterIdSource clusterIdSource) { + return clusterIdSource.toClusterIds(getAllClusterIds()); } private PersistentMap getExistingPersistentMap(final ClusterId clusterId) { @@ -62,7 +87,7 @@ public class ClusteredPersistentMap implements AutoCloseable { public List getValues(final ClusterIdSource clusterIdSource, final K key) { final List result = new ArrayList<>(); - final List clusterIds = clusterIdSource.toClusterIds(); + final Set clusterIds = clusterIdSource.toClusterIds(getAllClusterIds()); for (final ClusterId clusterId : clusterIds) { final PersistentMap map = getPersistentMapCreateIfNotExists(clusterId); @@ -95,7 +120,7 @@ public class ClusteredPersistentMap implements AutoCloseable { } public void visitValues(final ClusterIdSource clusterIdSource, final K keyPrefix, final Visitor visitor) { - final List clusterIds = clusterIdSource.toClusterIds(); + final Set clusterIds = clusterIdSource.toClusterIds(getAllClusterIds()); for (final ClusterId clusterId : clusterIds) { final PersistentMap map = getExistingPersistentMap(clusterId); diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DataStore.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DataStore.java index 7a9c6b0..49f7fe5 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DataStore.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DataStore.java @@ -73,7 +73,7 @@ public class DataStore implements AutoCloseable { // A Doc will never be changed once it is created. Therefore we can cache them // easily. - private final HotEntryCache docIdToDocCache = new HotEntryCache<>(Duration.ofMillis(30), 100_000); + private final HotEntryCache docIdToDocCache = new HotEntryCache<>(Duration.ofMinutes(30), 100_000); private final HotEntryCache writerCache; @@ -103,7 +103,7 @@ public class DataStore implements AutoCloseable { queryCompletionIndex = new QueryCompletionIndex(storageBasePath); writerCache = new HotEntryCache<>(Duration.ofSeconds(10), 1000); - writerCache.addListener((k, v) -> v.close()); + writerCache.addListener((tags, writer) -> writer.close()); } private Path keyCompressionFile(final Path dataDirectory) throws IOException { @@ -196,15 +196,11 @@ public class DataStore implements AutoCloseable { public List search(final Query query) { try { final List result = new ArrayList<>(); - final List clusterIds = DateIndexExtension.toClusterIds(query.getDateRange()); - for (final ClusterId clusterId : clusterIds) { - final LongList docIdsList = executeQuery(clusterId, query.getQuery()); - LOGGER.trace("query {} found {} docs", query, docIdsList.size()); - final List docs = mapDocIdsToDocs(clusterId, docIdsList); - result.addAll(docs); - - } + final ClusteredLongList docIdsList = executeQuery(query); + LOGGER.trace("query {} found {} docs", query, docIdsList.size()); + final List docs = mapDocIdsToDocs(docIdsList); + result.addAll(docs); return result; } catch (final IOException e) { @@ -213,15 +209,8 @@ public class DataStore implements AutoCloseable { } public int count(final Query query) { - int count = 0; - final List clusterIds = DateIndexExtension.toClusterIds(query.getDateRange()); - for (final ClusterId clusterId : clusterIds) { - - final LongList docIdsList = executeQuery(clusterId, query.getQuery()); - count += docIdsList.size(); - } - - return count; + final ClusteredLongList docIdsList = executeQuery(query); + return docIdsList.size(); } public List getAvailableFields(final DateTimeRange dateRange) { @@ -261,31 +250,38 @@ public class DataStore implements AutoCloseable { } - private LongList executeQuery(final ClusterId clusterId, final String query) { + private ClusteredLongList executeQuery(final Query query) { final long start = System.nanoTime(); synchronized (docIdToDoc) { - final Expression expression = QueryLanguageParser.parse(query); - final ExpressionToDocIdVisitor visitor = new ExpressionToDocIdVisitor(clusterId, tagToDocsId, diskStorage); - final LongList docIdsList = expression.visit(visitor); + final Expression expression = QueryLanguageParser.parse(query.getQuery()); + final ExpressionToDocIdVisitor visitor = new ExpressionToDocIdVisitor(query.getDateRange(), tagToDocsId, + diskStorage); + final ClusteredLongList docIdsList = expression.visit(visitor); EXECUTE_QUERY_LOGGER.debug("executeQuery({}) took {}ms returned {} results ", query, (System.nanoTime() - start) / 1_000_000.0, docIdsList.size()); return docIdsList; } } - private List mapDocIdsToDocs(final ClusterId clusterId, final LongList docIdsList) throws IOException { + private List mapDocIdsToDocs(final ClusteredLongList docIdsList) throws IOException { final List result = new ArrayList<>(docIdsList.size()); synchronized (docIdToDoc) { final long start = System.nanoTime(); - for (int i = 0; i < docIdsList.size(); i++) { - final long docId = docIdsList.get(i); - final Doc doc = getDocByDocId(clusterId, docId); - Objects.requireNonNull(doc, "Doc with id " + docId + " did not exist."); + for (final ClusterId clusterId : docIdsList) { + final LongList docIds = docIdsList.get(clusterId); - result.add(doc); + for (int i = 0; i < docIds.size(); i++) { + final long docId = docIds.get(i); + + final Doc doc = getDocByDocId(clusterId, docId); + Objects.requireNonNull(doc, "Doc with id " + docId + " did not exist."); + + result.add(doc); + } } + MAP_DOCS_TO_DOCID.debug("mapDocIdsToDocs({}): {}ms", docIdsList.size(), (System.nanoTime() - start) / 1_000_000.0); } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DateCluster.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DateCluster.java index 90df6e7..134e715 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DateCluster.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DateCluster.java @@ -1,6 +1,6 @@ package org.lucares.pdb.datastore.internal; -import java.util.List; +import java.util.Set; import org.lucares.pdb.api.DateTimeRange; @@ -13,7 +13,7 @@ public class DateCluster implements ClusterIdSource { } @Override - public List toClusterIds() { - return DateIndexExtension.toClusterIds(dateRange); + public Set toClusterIds(final Set availableClusters) { + return DateIndexExtension.toClusterIds(dateRange, availableClusters); } } diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DateIndexExtension.java b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DateIndexExtension.java index f191bfb..8428449 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/internal/DateIndexExtension.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/internal/DateIndexExtension.java @@ -5,6 +5,8 @@ import java.time.OffsetDateTime; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map.Entry; import java.util.Set; @@ -75,7 +77,14 @@ public class DateIndexExtension { return result; } - public static List toClusterIds(final DateTimeRange dateRange) { + /** + * only for tests, use toClusterIds(final DateTimeRange dateRange,final + * Collection availableClusterIds) instead + * + * @param dateRange + * @return + */ + static List toClusterIds(final DateTimeRange dateRange) { final List result = new ArrayList<>(); OffsetDateTime current = dateRange.getStart(); @@ -93,6 +102,22 @@ public class DateIndexExtension { return result; } + public static Set toClusterIds(final DateTimeRange dateRange, + final Collection availableClusterIds) { + final Set result = new LinkedHashSet<>(); + + final ClusterId start = toClusterId(dateRange.getStart().toInstant().toEpochMilli()); + final ClusterId end = toClusterId(dateRange.getEnd().toInstant().toEpochMilli()); + + for (final ClusterId clusterId : availableClusterIds) { + if (start.compareTo(clusterId) <= 0 && end.compareTo(clusterId) >= 0) { + result.add(clusterId); + } + } + + return result; + } + public static DatePrefixAndRange toDatePrefixAndRange(final long epochMilli) { final OffsetDateTime date = Instant.ofEpochMilli(epochMilli).atOffset(ZoneOffset.UTC); final OffsetDateTime beginOfMonth = date.withDayOfMonth(1).withHour(0).withMinute(0).withSecond(0).withNano(0); diff --git a/data-store/src/main/java/org/lucares/pdb/datastore/lang/ExpressionToDocIdVisitor.java b/data-store/src/main/java/org/lucares/pdb/datastore/lang/ExpressionToDocIdVisitor.java index 4b7eda0..a76cb11 100644 --- a/data-store/src/main/java/org/lucares/pdb/datastore/lang/ExpressionToDocIdVisitor.java +++ b/data-store/src/main/java/org/lucares/pdb/datastore/lang/ExpressionToDocIdVisitor.java @@ -3,16 +3,20 @@ package org.lucares.pdb.datastore.lang; import java.util.ArrayList; import java.util.Collection; import java.util.List; +import java.util.Set; import java.util.regex.Pattern; import java.util.stream.Collectors; import org.lucares.collections.LongList; +import org.lucares.pdb.api.DateTimeRange; import org.lucares.pdb.api.Tag; import org.lucares.pdb.blockstorage.LongStreamFile; import org.lucares.pdb.datastore.internal.ClusterId; import org.lucares.pdb.datastore.internal.ClusteredDiskStore; +import org.lucares.pdb.datastore.internal.ClusteredLongList; import org.lucares.pdb.datastore.internal.ClusteredPersistentMap; import org.lucares.pdb.datastore.internal.DataStore; +import org.lucares.pdb.datastore.internal.DateCluster; import org.lucares.pdb.datastore.lang.Expression.And; import org.lucares.pdb.datastore.lang.Expression.Not; import org.lucares.pdb.datastore.lang.Expression.Or; @@ -21,31 +25,31 @@ import org.lucares.utils.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class ExpressionToDocIdVisitor extends ExpressionVisitor { +public class ExpressionToDocIdVisitor extends ExpressionVisitor { private static final Logger LOGGER = LoggerFactory.getLogger(ExpressionToDocIdVisitor.class); private final ClusteredPersistentMap keyToValueToDocId; private final ClusteredDiskStore diskStorage; - private final ClusterId clusterId; + private final DateCluster dateCluster; - public ExpressionToDocIdVisitor(final ClusterId clusterId, + public ExpressionToDocIdVisitor(final DateTimeRange dateRange, final ClusteredPersistentMap keyToValueToDocsId, final ClusteredDiskStore diskStorage) { - this.clusterId = clusterId; + this.dateCluster = new DateCluster(dateRange); this.keyToValueToDocId = keyToValueToDocsId; this.diskStorage = diskStorage; } @Override - public LongList visit(final And expression) { + public ClusteredLongList visit(final And expression) { final Expression left = expression.getLeft(); final Expression right = expression.getRight(); - final LongList leftFiles = left.visit(this); - final LongList rightFiles = right.visit(this); + final ClusteredLongList leftFiles = left.visit(this); + final ClusteredLongList rightFiles = right.visit(this); final long start = System.nanoTime(); - final LongList result = LongList.intersection(leftFiles, rightFiles); + final ClusteredLongList result = ClusteredLongList.intersection(leftFiles, rightFiles); LOGGER.trace("and: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, result.size()); assert result.isSorted(); @@ -54,14 +58,14 @@ public class ExpressionToDocIdVisitor extends ExpressionVisitor { } @Override - public LongList visit(final Or expression) { + public ClusteredLongList visit(final Or expression) { final Expression left = expression.getLeft(); final Expression right = expression.getRight(); - final LongList leftFiles = left.visit(this); - final LongList rightFiles = right.visit(this); + final ClusteredLongList leftFiles = left.visit(this); + final ClusteredLongList rightFiles = right.visit(this); final long start = System.nanoTime(); - final LongList result = LongList.union(leftFiles, rightFiles); + final ClusteredLongList result = ClusteredLongList.union(leftFiles, rightFiles); LOGGER.trace("or: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, result.size()); assert result.isSorted(); @@ -70,13 +74,13 @@ public class ExpressionToDocIdVisitor extends ExpressionVisitor { } @Override - public LongList visit(final Not expression) { + public ClusteredLongList visit(final Not expression) { final Expression negatedExpression = expression.getExpression(); - final LongList docIdsToBeNegated = negatedExpression.visit(this); + final ClusteredLongList docIdsToBeNegated = negatedExpression.visit(this); final long start = System.nanoTime(); - final LongList result = getAllDocIds().clone(); + final ClusteredLongList result = getAllDocIds(); result.removeAll(docIdsToBeNegated); LOGGER.trace("not: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, @@ -86,35 +90,34 @@ public class ExpressionToDocIdVisitor extends ExpressionVisitor { } @Override - public LongList visit(final Parentheses parentheses) { + public ClusteredLongList visit(final Parentheses parentheses) { throw new UnsupportedOperationException( "Parenthesis not supported. The correct order should come from the parser."); } @Override - public LongList visit(final Expression.MatchAll expression) { + public ClusteredLongList visit(final Expression.MatchAll expression) { final long start = System.nanoTime(); - final LongList result = getAllDocIds(); + final ClusteredLongList result = getAllDocIds(); LOGGER.trace("matchAll: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, result.size()); return result; } @Override - public LongList visit(final Expression.InExpression expression) { + public ClusteredLongList visit(final Expression.InExpression expression) { final long start = System.nanoTime(); final String propertyName = expression.getProperty(); final List values = expression.getValues(); - LongList result = new LongList(); + ClusteredLongList result = new ClusteredLongList(); for (final String value : values) { - final Collection docIds = filterByWildcard(propertyName, GloblikePattern.globlikeToRegex(value)); - final LongList mergedDocIds = merge(docIds); - result = LongList.union(result, mergedDocIds); + final ClusteredLongList docIds = filterByWildcard(propertyName, GloblikePattern.globlikeToRegex(value)); + result = ClusteredLongList.union(result, docIds); } LOGGER.trace("in: {} took {} ms results={}", expression, (System.nanoTime() - start) / 1_000_000.0, @@ -122,44 +125,53 @@ public class ExpressionToDocIdVisitor extends ExpressionVisitor { return result; } - private LongList getAllDocIds() { + private ClusteredLongList getAllDocIds() { + final ClusteredLongList result = new ClusteredLongList(); + final Set availableClusterIds = keyToValueToDocId.getAvailableClusterIds(dateCluster); + for (final ClusterId clusterId : availableClusterIds) { - final Long blockOffset = keyToValueToDocId.getValue(clusterId, DataStore.TAG_ALL_DOCS); + final Long blockOffset = keyToValueToDocId.getValue(clusterId, DataStore.TAG_ALL_DOCS); - if (blockOffset != null) { - final LongStreamFile bsFile = diskStorage.streamExistingFile(blockOffset, clusterId); - final LongList longList = bsFile.asLongList(); - - return longList; - } else { - return new LongList(0); + if (blockOffset != null) { + final LongStreamFile bsFile = diskStorage.streamExistingFile(blockOffset, clusterId); + final LongList tmp = bsFile.asLongList(); + result.put(clusterId, tmp); + } } + return result; } - private List filterByWildcard(final String propertyName, final Pattern valuePattern) { - final List result = new ArrayList<>(); + private ClusteredLongList filterByWildcard(final String propertyName, final Pattern valuePattern) { + final ClusteredLongList result = new ClusteredLongList(); final long start = System.nanoTime(); - keyToValueToDocId.visitValues(clusterId, new Tag(propertyName, ""), (tags, blockOffsetToDocIds) -> { - if (valuePattern.matcher(tags.getValueAsString()).matches()) { - try (final LongStreamFile bsFile = diskStorage.streamExistingFile(blockOffsetToDocIds, clusterId)) { + final Set availableClusterIds = keyToValueToDocId.getAvailableClusterIds(dateCluster); + for (final ClusterId clusterId : availableClusterIds) { + final List docIdsForCluster = new ArrayList<>(); + keyToValueToDocId.visitValues(clusterId, new Tag(propertyName, ""), (tags, blockOffsetToDocIds) -> { + if (valuePattern.matcher(tags.getValueAsString()).matches()) { + try (final LongStreamFile bsFile = diskStorage.streamExistingFile(blockOffsetToDocIds, clusterId)) { - // We know that all LongLists coming from a BSFile are sorted, non-overlapping - // and increasing, that means we can just concatenate them and get a sorted - // list. - final List longLists = bsFile.streamOfLongLists().collect(Collectors.toList()); - final LongList concatenatedLists = concatenateLists(longLists); + // We know that all LongLists coming from a BSFile are sorted, non-overlapping + // and increasing, that means we can just concatenate them and get a sorted + // list. + final List longLists = bsFile.streamOfLongLists().collect(Collectors.toList()); + final LongList concatenatedLists = concatenateLists(longLists); - Preconditions.checkTrue(concatenatedLists.isSorted(), - "The LongLists containing document ids must be sorted, " - + "non-overlapping and increasing, so that the concatenation " - + "is sorted. This is guaranteed by the fact that document ids " - + "are generated in monotonically increasing order."); + Preconditions.checkTrue(concatenatedLists.isSorted(), + "The LongLists containing document ids must be sorted, " + + "non-overlapping and increasing, so that the concatenation " + + "is sorted. This is guaranteed by the fact that document ids " + + "are generated in monotonically increasing order."); - result.add(concatenatedLists); + docIdsForCluster.add(concatenatedLists); + } } - } - }); + }); + + final LongList mergedDocsIdsForCluster = merge(docIdsForCluster); + result.put(clusterId, mergedDocsIdsForCluster); + } LOGGER.trace("filterByWildcard: for key {} took {}ms", propertyName, (System.nanoTime() - start) / 1_000_000.0); diff --git a/data-store/src/test/java/org/lucares/pdb/datastore/internal/DataStoreTest.java b/data-store/src/test/java/org/lucares/pdb/datastore/internal/DataStoreTest.java index 088762a..a640e8d 100644 --- a/data-store/src/test/java/org/lucares/pdb/datastore/internal/DataStoreTest.java +++ b/data-store/src/test/java/org/lucares/pdb/datastore/internal/DataStoreTest.java @@ -312,13 +312,14 @@ public class DataStoreTest { Assert.assertFalse(result.isEmpty(), "The query '" + query + "' must return a result, but didn't."); } - private void assertSearch(final DateTimeRange dateRange, final String query, final Tags... tags) { - final List actualDocs = dataStore.search(new Query(query, dateRange)); + private void assertSearch(final DateTimeRange dateRange, final String queryString, final Tags... tags) { + final Query query = new Query(queryString, dateRange); + final List actualDocs = dataStore.search(query); final List actual = CollectionUtils.map(actualDocs, Doc::getRootBlockNumber); final List expectedPaths = CollectionUtils.map(tags, tagsToBlockStorageRootBlockNumber::get); - Assert.assertEquals(actual, expectedPaths, "Query: " + query + " Found: " + actual); + Assert.assertEquals(actual, expectedPaths, "Query: " + queryString + " Found: " + actual); } } diff --git a/pdb-api/src/main/java/org/lucares/pdb/api/DateTimeRange.java b/pdb-api/src/main/java/org/lucares/pdb/api/DateTimeRange.java index e92d745..f68a4ad 100644 --- a/pdb-api/src/main/java/org/lucares/pdb/api/DateTimeRange.java +++ b/pdb-api/src/main/java/org/lucares/pdb/api/DateTimeRange.java @@ -2,11 +2,16 @@ package org.lucares.pdb.api; import java.time.Duration; import java.time.OffsetDateTime; +import java.time.ZoneOffset; import java.time.temporal.ChronoUnit; import java.time.temporal.TemporalUnit; public class DateTimeRange { + private static final DateTimeRange MAX = new DateTimeRange( + OffsetDateTime.of(1900, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC), + OffsetDateTime.of(2100, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)); + private final OffsetDateTime start; private final OffsetDateTime end; @@ -15,6 +20,10 @@ public class DateTimeRange { this.end = end; } + public static DateTimeRange max() { + return MAX; + } + public static DateTimeRange now() { return relativeMillis(0); } @@ -101,5 +110,4 @@ public class DateTimeRange { || timeRange.inRange(start)// || timeRange.inRange(end); } - } diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/PdbController.java b/pdb-ui/src/main/java/org/lucares/pdbui/PdbController.java index 5659c96..386d14a 100644 --- a/pdb-ui/src/main/java/org/lucares/pdbui/PdbController.java +++ b/pdb-ui/src/main/java/org/lucares/pdbui/PdbController.java @@ -236,9 +236,7 @@ public class PdbController implements HardcodedValues, PropertyKeys { ) @ResponseBody List fields() { - // TODO get date range from UI - // TODO time range must not be static - final DateTimeRange dateTimeRange = DateTimeRange.relativeYears(5); + final DateTimeRange dateTimeRange = DateTimeRange.max(); final List fields = db.getFields(dateTimeRange); fields.sort(Collator.getInstance(Locale.ENGLISH)); @@ -255,9 +253,7 @@ public class PdbController implements HardcodedValues, PropertyKeys { SortedSet fields(@PathVariable(name = "fieldName") final String fieldName, @RequestParam(name = "query") final String query) { - // TODO get date range from UI - // TODO time range must not be static - final DateTimeRange dateRange = DateTimeRange.relativeYears(5); + final DateTimeRange dateRange = DateTimeRange.max(); final Query q = new Query(query, dateRange); final SortedSet fields = db.getFieldsValues(q, fieldName);