This commit is contained in:
2020-09-28 10:01:20 +00:00
32 changed files with 1436 additions and 2043 deletions

View File

@@ -53,6 +53,7 @@ public class DiskStorage implements AutoCloseable {
try {
LOGGER.trace("read block={} file={}", blockOffset, relativeDatabaseFileForLogging);
DiskStoreStats.incrementDiskRead();
final var byteBuffer = fileChannel.map(MapMode.READ_WRITE, blockOffset, blockSize);
return new DiskBlock(blockOffset, byteBuffer);
@@ -227,6 +228,7 @@ public class DiskStorage implements AutoCloseable {
private FreeListNode readFreeListNode(final long freeListNodePosition) throws IOException {
final var freeListNode = ByteBuffer.allocate(FREE_LIST_NODE_SIZE);
DiskStoreStats.incrementDiskRead();
fileChannel.read(freeListNode, freeListNodePosition);
final long offset = freeListNodePosition;
final long next = freeListNode.getLong(FREE_LIST_NEXT_POINTER);
@@ -250,6 +252,7 @@ public class DiskStorage implements AutoCloseable {
private long readFreeListRootNodePosition() throws IOException {
final var freeListFirstBlock = ByteBuffer.allocate(8);
DiskStoreStats.incrementDiskRead();
fileChannel.read(freeListFirstBlock, FREE_LIST_ROOT_OFFSET);
return freeListFirstBlock.getLong(0);
}

View File

@@ -0,0 +1,24 @@
package org.lucares.pdb.diskstorage;
import java.util.concurrent.atomic.AtomicLong;
public class DiskStoreStats {
private static final AtomicLong diskRead = new AtomicLong(0);
public static void incrementDiskRead() {
diskRead.incrementAndGet();
}
public static void resetDiskRead() {
diskRead.set(0);
}
public static long getDiskRead() {
return diskRead.get();
}
public static long getAndResetDiskRead() {
return diskRead.getAndSet(0);
}
}

View File

@@ -21,11 +21,11 @@ import org.lucares.utils.byteencoder.VariableByteEncoder;
* <pre>
* Node layout:
*
* ─────── Prefix ──────▶ ◀───────────────── Suffix ──────────────────
* ─────── Prefix ──────▶ ◀───────────────── Suffix ──────────────────
* ┏━━━┳━━━━━┳━━━━━┳━━━━━┳━━━┳╸╺╸╺╸╺╸╺┳━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┓
* ┃ 6 ┃ 5,6 ┃ 3,6 ┃ 3,2 ┃ ∅ ┃ ┃"ba"->"147"┃"foobar"->"467"┃"foobaz"->"value"┃
* ┗━━━┻━━━━━┻━━━━━┻━━━━━┻━━━┻╸╺╸╺╸╺╸╺┻━━━━━━━━━━━┻━━━━━━━━━━━━━━━┻━━━━━━━━━━━━━━━━━┛
* │ │ │ │ │ │ │ └▶ null byte that serves as a separator for the prefix.
* │ │ │ │ │ │ │ └▶ null byte that serves as a terminator for the prefix.
* │ │ │ │ │ │ └▶ size of the third last key ("ba" in this example)
* │ │ │ │ │ └▶ size of the third last value ("147" in this example)
* │ │ │ │ └▶ size of the second last key ("foobar" in this example)
@@ -202,13 +202,13 @@ public class PersistentMapDiskNode {
+ String.join("\n", entries.values().stream().map(NodeEntry::toString).collect(Collectors.toList()));
}
public <K, V> String toString(Function<byte[], K> keyDecoder, Function<byte[], V> valueDecoder) {
StringBuilder result = new StringBuilder();
public <K, V> String toString(final Function<byte[], K> keyDecoder, final Function<byte[], V> valueDecoder) {
final StringBuilder result = new StringBuilder();
result.append("@");
result.append(nodeOffset);
result.append(": ");
for (NodeEntry e : entries.values()) {
String s = e.toString(keyDecoder, valueDecoder);
for (final NodeEntry e : entries.values()) {
final String s = e.toString(keyDecoder, valueDecoder);
result.append("\n");
result.append(s);
}

View File

@@ -14,23 +14,23 @@ buildscript {
dependencies {
// usage: gradlew dependencyUpdates -Drevision=release
classpath 'com.github.ben-manes:gradle-versions-plugin:0.29.0'
classpath 'com.github.ben-manes:gradle-versions-plugin:0.33.0'
}
}
ext {
javaVersion=12
javaVersion=14
version_log4j2= '2.13.3' // keep in sync with spring-boot-starter-log4j2
version_spring = '2.3.2.RELEASE'
version_junit = '5.6.1'
version_junit_platform = '1.6.1'
version_spring = '2.3.4.RELEASE'
version_junit = '5.7.0'
version_junit_platform = '1.7.0'
lib_antlr = "org.antlr:antlr4:4.8-1"
lib_commons_collections4 = 'org.apache.commons:commons-collections4:4.4'
lib_commons_lang3 = 'org.apache.commons:commons-lang3:3.10'
lib_commons_lang3 = 'org.apache.commons:commons-lang3:3.11'
lib_jackson_databind = 'com.fasterxml.jackson.core:jackson-databind:2.11.2'
lib_log4j2_core = "org.apache.logging.log4j:log4j-core:${version_log4j2}"
@@ -92,7 +92,7 @@ subprojects {
// dependencies that all sub-projects have
dependencies {
testImplementation "org.junit.jupiter:junit-jupiter-engine:${version_junit}"
testImplementation("org.junit.jupiter:junit-jupiter-engine:${version_junit}")
testImplementation "org.junit.jupiter:junit-jupiter-params:${version_junit}" // for @ParameterizedTest
testImplementation "org.junit.platform:junit-platform-launcher:${version_junit_platform}" // needed by eclipse
}

View File

@@ -32,6 +32,7 @@ import org.lucares.pdb.datastore.lang.Expression;
import org.lucares.pdb.datastore.lang.ExpressionToDocIdVisitor;
import org.lucares.pdb.datastore.lang.NewProposerParser;
import org.lucares.pdb.datastore.lang.QueryLanguageParser;
import org.lucares.pdb.diskstorage.DiskStoreStats;
import org.lucares.pdb.map.PersistentMap;
import org.lucares.utils.Preconditions;
import org.lucares.utils.cache.HotEntryCache;
@@ -235,11 +236,16 @@ public class DataStore implements AutoCloseable {
public List<Doc> search(final Query query) {
try {
DiskStoreStats.resetDiskRead();
final List<Doc> result = new ArrayList<>();
final PartitionLongList docIdsList = executeQuery(query);
LOGGER.trace("query {} found {} docs", query, docIdsList.size());
DiskStoreStats.incrementDiskRead();
LOGGER.info("Disk reads - search {}", DiskStoreStats.getAndResetDiskRead());
final List<Doc> docs = mapDocIdsToDocs(docIdsList);
LOGGER.info("Disk reads - mappedToDocIds {}", DiskStoreStats.getAndResetDiskRead());
result.addAll(docs);
return result;

View File

@@ -1,6 +1,5 @@
package org.lucares.pdb.datastore.internal;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
@@ -8,11 +7,12 @@ import java.util.List;
import java.util.Set;
import org.lucares.pdb.api.DateTimeRange;
import org.lucares.utils.DateBucketUnit;
import org.lucares.utils.LongToDateBucket;
public class DateIndexExtension {
private static final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM", ChronoUnit.MONTHS);
private static final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM", DateBucketUnit.MONTH);
static Set<String> toDateIndexPrefix(final DateTimeRange dateRange) {
return longToDateBucket.toDateIndexPrefix(dateRange.getStart(), dateRange.getEnd());
@@ -31,7 +31,7 @@ public class DateIndexExtension {
*/
static List<ParititionId> toPartitionIds(final DateTimeRange dateRange) {
final List<String> partitionIds = longToDateBucket.toPartitionIds(dateRange.getStart(), dateRange.getEnd(),
ChronoUnit.MONTHS);
DateBucketUnit.MONTH);
final List<ParititionId> result = new ArrayList<>();
for (final String partitionId : partitionIds) {

3077
pdb-js/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -12,30 +12,30 @@
},
"private": true,
"dependencies": {
"@angular/animations": "^10.0.8",
"@angular/cdk": "^10.1.3",
"@angular/common": "^10.0.8",
"@angular/compiler": "^10.0.8",
"@angular/core": "^10.0.8",
"@angular/forms": "^10.0.8",
"@angular/material": "^10.1.3",
"@angular/platform-browser": "^10.0.8",
"@angular/platform-browser-dynamic": "^10.0.8",
"@angular/router": "^10.0.8",
"moment": "^2.27.0",
"rxjs": "^6.5.5",
"rxjs-compat": "^6.5.5",
"@angular/animations": "^10.1.2",
"@angular/cdk": "^10.2.1",
"@angular/common": "^10.1.2",
"@angular/compiler": "^10.1.2",
"@angular/core": "^10.1.2",
"@angular/forms": "^10.1.2",
"@angular/material": "^10.2.1",
"@angular/platform-browser": "^10.1.2",
"@angular/platform-browser-dynamic": "^10.1.2",
"@angular/router": "^10.1.2",
"moment": "^2.28.0",
"rxjs": "^6.6.3",
"rxjs-compat": "^6.6.3",
"tslib": "^2.0.0",
"zone.js": "~0.10.2"
},
"devDependencies": {
"@angular-devkit/build-angular": "^0.1000.5",
"@angular/cli": "^10.0.5",
"@angular/compiler-cli": "^10.0.8",
"@angular/language-service": "^10.0.8",
"@types/jasmine": "^3.5.10",
"@angular-devkit/build-angular": "^0.1000.8",
"@angular/cli": "^10.1.2",
"@angular/compiler-cli": "^10.1.2",
"@angular/language-service": "^10.1.2",
"@types/jasmine": "^3.5.14",
"@types/jasminewd2": "~2.0.8",
"@types/node": "^12.12.47",
"@types/node": "^12.12.62",
"codelyzer": "^6.0.0",
"jasmine-core": "~3.5.0",
"jasmine-spec-reporter": "~5.0.0",

View File

@@ -202,6 +202,7 @@ export class PlotRequest {
generateThumbnail : boolean;
intervalUnit: string;
intervalValue: number;
renderBarChartTickLabels: boolean = false;
copy(): PlotRequest {
return JSON.parse(JSON.stringify(this));

View File

@@ -36,6 +36,7 @@
<mat-label>Intervals (only bar chart):</mat-label>
<mat-select [(value)]="intervalUnit">
<mat-option value="NO_INTERVAL">-</mat-option>
<mat-option value="SECOND">second</mat-option>
<mat-option value="MINUTE">minute</mat-option>
<mat-option value="HOUR">hour</mat-option>
<mat-option value="DAY">day</mat-option>
@@ -45,6 +46,9 @@
</mat-select>
</mat-form-field>
</div>
<div [hidden]="!selectedPlotTypesContains('BAR')">
<mat-checkbox [(ngModel)]="renderBarChartTickLabels">Show Tic Labels (bar chart)</mat-checkbox>
</div>
<pdb-y-axis-definition #y1AxisDefinitionComponent yIndex="1"></pdb-y-axis-definition>
<pdb-y-axis-definition #y2AxisDefinitionComponent yIndex="2" [hidden]="!y2AxisAvailable"></pdb-y-axis-definition>

View File

@@ -53,6 +53,7 @@ export class VisualizationPageComponent implements OnInit {
intervalUnit = 'NO_INTERVAL';
intervalValue = 1;
renderBarChartTickLabels = false;
constructor(private plotService: PlotService, private snackBar: MatSnackBar) {
}
@@ -172,6 +173,7 @@ export class VisualizationPageComponent implements OnInit {
request.generateThumbnail = this.enableGallery;
request.intervalUnit = this.intervalUnit;
request.intervalValue = this.intervalValue;
request.renderBarChartTickLabels = this.renderBarChartTickLabels;
return request;
}

View File

@@ -9,7 +9,7 @@ import { YAxisDefinition } from '../plot.service';
export class YAxisDefinitionComponent {
yAxisScale: string = "LOG10";
yAxisUnit: string = "SECONDS";
yAxisUnit: string = "MINUTES";
minYValue: number = 0;
maxYValue: number = 300;

View File

@@ -82,7 +82,7 @@ public class BarChartAggregator implements CustomAggregator, IndexedAggregator,
@Override
public String renderLabels(final GnuplotAxis xAxis) {
return String.format("set label at %s %f, %d '%s' center front offset 0,0.3", // front
return String.format(Locale.US, "set label at %s %f, %d '%s' center front offset 0,0.3", // front
xAxis == GnuplotAxis.X1 ? "first" : "second", //
getIndex() + 0.5, //
getCount(), //

View File

@@ -8,6 +8,7 @@ import java.util.UUID;
import java.util.concurrent.atomic.AtomicLong;
import org.lucares.recommind.logs.GnuplotAxis;
import org.lucares.recommind.logs.GnuplotSettings;
public class BarChartAggregatorForIntervals implements CustomAggregator, IndexedAggregator, BarChart {
@@ -24,8 +25,11 @@ public class BarChartAggregatorForIntervals implements CustomAggregator, Indexed
private int count;
public BarChartAggregatorForIntervals(final Interval interval) {
this.interval = interval;
private final PlotSettings settings;
public BarChartAggregatorForIntervals(final PlotSettings settings) {
this.settings = settings;
this.interval = settings.getInterval().get();
buckets = interval.getBuckets();
}
@@ -72,20 +76,35 @@ public class BarChartAggregatorForIntervals implements CustomAggregator, Indexed
final StringBuilder csv = new StringBuilder();
final boolean isMiddleSeries = getIndex() == numberOfDataSeries / 2;
int i = 0;
int offset = 0;
for (final String bucketId : bucketIds()) {
final long count = buckets.get(bucketId).get();
csv.append(String.format(Locale.US, "%f", offset + getIndex() * SPACE_BETWEEN_BARS + 0.5));
csv.append(",");
csv.append(renderLabels && isMiddleSeries ? bucketId : "");
csv.append(renderLabels && isMiddleSeries && showLabel(i, buckets.size()) ? bucketId : "");
csv.append(",");
csv.append(count);
csv.append("\n");
offset += numberOfDataSeries;
i++;
}
return csv.toString();
}
private boolean showLabel(final int index, final int numberOfBuckets) {
final int width = settings.getWidth();
final int widthInPx = width - GnuplotSettings.GNUPLOT_LEFT_RIGHT_MARGIN;
final long maxLabels = Math.max(1, widthInPx / (GnuplotSettings.TICKS_FONT_SIZE * 8));
if (maxLabels >= numberOfBuckets) {
return true;
} else {
return index % (int) Math.ceil(numberOfBuckets / (double) maxLabels) == 0;
}
}
private SortedSet<String> bucketIds() {
return new TreeSet<>(buckets.keySet());

View File

@@ -56,7 +56,7 @@ public class BarChartHandler extends AggregateHandler {
appendln(result, barAggregator.asCsv(settings.isRenderLabels()));
appendln(result, "EOD");
if (settings.isRenderLabels()) {
if (settings.isRenderLabels() && settings.isRenderBarChartTickLabels()) {
appendfln(result, barAggregator.renderLabels(getxAxis()));
}
@@ -82,7 +82,7 @@ public class BarChartHandler extends AggregateHandler {
CustomAggregator createCustomAggregator(final Path tmpDir, final PlotSettings plotSettings,
final long fromEpochMilli, final long toEpochMilli) {
if (plotSettings.getInterval().isPresent()) {
return new BarChartAggregatorForIntervals(plotSettings.getInterval().get());
return new BarChartAggregatorForIntervals(plotSettings);
} else {
return new BarChartAggregator();
}

View File

@@ -1,6 +1,5 @@
package org.lucares.pdb.plot.api;
import java.time.temporal.ChronoUnit;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -10,38 +9,6 @@ import org.lucares.pdb.api.DateTimeRange;
import org.lucares.utils.LongToDateBucket;
public class Interval {
public enum IntervalTimeUnit {
MINUTE, HOUR, DAY, WEEK, MONTH, YEAR;
public static boolean isValid(final String value) {
for (final IntervalTimeUnit e : values()) {
if (e.name().equals(value)) {
return true;
}
}
return false;
}
public ChronoUnit toChronoUnit() {
switch (this) {
case MINUTE:
return ChronoUnit.MINUTES;
case HOUR:
return ChronoUnit.HOURS;
case DAY:
return ChronoUnit.DAYS;
case WEEK:
return ChronoUnit.WEEKS;
case MONTH:
return ChronoUnit.MONTHS;
case YEAR:
return ChronoUnit.YEARS;
default:
throw new IllegalArgumentException("Unexpected value: " + this);
}
}
}
private final IntervalTimeUnit intervalTimeUnit;
private final int value;
@@ -61,10 +28,12 @@ public class Interval {
private String toDateFormatForBucketer(final IntervalTimeUnit intervalTimeUnit) {
switch (intervalTimeUnit) {
case SECOND:
return "yyyy-MM-dd'\\n'HH:mm:ss";
case MINUTE:
return "yyyy-MM-dd HH:mm";
return "yyyy-MM-dd'\\n'HH:mm";
case HOUR:
return "yyyy-MM-dd HH";
return "yyyy-MM-dd'\\n'HH:00";
case DAY:
return "yyyy-MM-dd";
case WEEK:

View File

@@ -0,0 +1,37 @@
package org.lucares.pdb.plot.api;
import org.lucares.utils.DateBucketUnit;
public enum IntervalTimeUnit {
SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, YEAR;
public static boolean isValid(final String value) {
for (final IntervalTimeUnit e : values()) {
if (e.name().equals(value)) {
return true;
}
}
return false;
}
public DateBucketUnit toChronoUnit() {
switch (this) {
case SECOND:
return DateBucketUnit.SECOND;
case MINUTE:
return DateBucketUnit.MINUTE;
case HOUR:
return DateBucketUnit.HOUR;
case DAY:
return DateBucketUnit.DAY;
case WEEK:
return DateBucketUnit.WEEK;
case MONTH:
return DateBucketUnit.MONTH;
case YEAR:
return DateBucketUnit.YEAR;
default:
throw new IllegalArgumentException("Unexpected value: " + this);
}
}
}

View File

@@ -45,6 +45,8 @@ public class PlotSettings {
private Interval interval;
private boolean renderBarChartTickLabels;
public String getQuery() {
return query;
}
@@ -197,4 +199,12 @@ public class PlotSettings {
this.interval = interval;
}
public boolean isRenderBarChartTickLabels() {
return renderBarChartTickLabels;
}
public void setRenderBarChartTickLabels(final boolean renderBarChartTickLabels) {
this.renderBarChartTickLabels = renderBarChartTickLabels;
}
}

View File

@@ -35,6 +35,7 @@ public class GnuplotSettings {
private AxisSettings xAxisSettings = new AxisSettings();
private boolean renderLabels = true;
private DateTimeRange dateTimeRange;
private boolean renderBarChartTickLabels;
public GnuplotSettings(final Path output) {
this.output = output;
@@ -151,6 +152,14 @@ public class GnuplotSettings {
}
}
public boolean isRenderBarChartTickLabels() {
return renderBarChartTickLabels;
}
public void setRenderBarChartTickLabels(final boolean renderBarChartTickLabels) {
this.renderBarChartTickLabels = renderBarChartTickLabels;
}
// plot 'sample.txt' using 1:2 title 'Bytes' with linespoints 2
}

View File

@@ -113,6 +113,7 @@ public class Plotter {
gnuplotSettings.setAggregates(plotSettings.getAggregates());
gnuplotSettings.setKeyOutside(plotSettings.isKeyOutside());
gnuplotSettings.setRenderBarChartTickLabels(plotSettings.isRenderBarChartTickLabels());
gnuplot.plot(gnuplotSettings, dataSeries);
}

View File

@@ -26,6 +26,7 @@ dependencies {
testImplementation(lib_spring_boot_test){
exclude module: 'spring-boot-starter-logging'
exclude module: 'junit'
exclude module: 'junit-vintage-engine'
}
}

View File

@@ -35,6 +35,7 @@ class PlotSettingsTransformer {
result.setY2(request.getY2());
result.setAggregates(toAggregateInternal(request.getY1(), request.getY2(), request.getAggregates()));
result.setInterval(Interval.create(request.getIntervalUnit(), request.getIntervalValue(), result.dateRange()));
result.setRenderBarChartTickLabels(request.isRenderBarChartTickLabels());
return result;
}

View File

@@ -37,6 +37,8 @@ public class PlotRequest {
private String intervalUnit;
private int intervalValue;
private boolean renderBarChartTickLabels;
public String getQuery() {
return query;
}
@@ -169,4 +171,12 @@ public class PlotRequest {
public void setIntervalValue(final int intervalValue) {
this.intervalValue = intervalValue;
}
public boolean isRenderBarChartTickLabels() {
return renderBarChartTickLabels;
}
public void setRenderBarChartTickLabels(final boolean renderBarChartTickLabels) {
this.renderBarChartTickLabels = renderBarChartTickLabels;
}
}

View File

@@ -44,9 +44,9 @@
<logger name="org.lucares.metrics.plotter" level="DEBUG" />
<logger name="org.lucares.metrics.gnuplot" level="DEBUG" />
<logger name="org.lucares.metrics.aggregator.parallelRequests" level="DEBUG" />
<logger name="org.lucares.metrics.dataStore" level="DEBUG" />
<!--
<logger name="org.lucares.metrics.ingestion.tagsToFile.newPdbWriter" level="DEBUG" />
<logger name="org.lucares.metrics.dataStore" level="DEBUG" />
<logger name="org.lucares.pdb.datastore.lang.QueryCompletionPdbLangParser" level="TRACE" />
<logger name="org.lucares.pdb.datastore.lang.ExpressionToDocIdVisitor" level="TRACE" />
-->

View File

@@ -44,8 +44,7 @@ public class FastISODateParserTest {
Arguments.of("2018-11-18T14:55:49.Z"), //
Arguments.of("2018-11-18T14:55:49.-01:23"), //
Arguments.of("2018-11-18T14:55:49Z"), //
Arguments.of("2018-11-18T14:55:49-01:23"), //
Arguments.of("2018-11-18 14:55:49,123") //
Arguments.of("2018-11-18T14:55:49-01:23") //
);
}
@@ -76,7 +75,8 @@ public class FastISODateParserTest {
Arguments.of("2018-11-18T14:42:49,123+12:34", "2018-11-18T14:42:49.123+12:34"), // with comma instead of
// dot
Arguments.of("2018-11-18T14:42:49.123", "2018-11-18T14:42:49.123Z"), // without timezone
Arguments.of("2018-11-18T14:42:49,123", "2018-11-18T14:42:49.123Z") // with command, without timezone
Arguments.of("2018-11-18T14:42:49,123", "2018-11-18T14:42:49.123Z"), // with command, without timezone
Arguments.of("2018-11-18 14:42:49,123", "2018-11-18T14:42:49.123Z") // without the 'T'
);
}

View File

@@ -6,9 +6,9 @@ import java.time.temporal.TemporalAdjuster;
public class BeginningOfNextInterval implements TemporalAdjuster {
private final ChronoUnit unit;
private final DateBucketUnit unit;
public BeginningOfNextInterval(final ChronoUnit unit) {
public BeginningOfNextInterval(final DateBucketUnit unit) {
this.unit = unit;
}
@@ -19,27 +19,31 @@ public class BeginningOfNextInterval implements TemporalAdjuster {
result = result.with(startOfInterval);
switch (unit) {
case MINUTES: {
case SECOND: {
result = result.plus(1, ChronoUnit.SECONDS);
break;
}
case MINUTE: {
result = result.plus(1, ChronoUnit.MINUTES);
break;
}
case HOURS: {
case HOUR: {
result = result.plus(1, ChronoUnit.HOURS);
break;
}
case DAYS: {
case DAY: {
result = result.plus(1, ChronoUnit.DAYS);
break;
}
case WEEKS: {
case WEEK: {
result = result.plus(1, ChronoUnit.WEEKS);
break;
}
case MONTHS: {
case MONTH: {
result = result.plus(1, ChronoUnit.MONTHS);
break;
}
case YEARS: {
case YEAR: {
result = result.plus(1, ChronoUnit.YEARS);
break;
}

View File

@@ -0,0 +1,5 @@
package org.lucares.utils;
public enum DateBucketUnit {
SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, YEAR;
}

View File

@@ -6,9 +6,9 @@ import java.time.temporal.TemporalAdjuster;
public class EndOfInterval implements TemporalAdjuster {
private final ChronoUnit unit;
private final DateBucketUnit unit;
public EndOfInterval(final ChronoUnit unit) {
public EndOfInterval(final DateBucketUnit unit) {
this.unit = unit;
}

View File

@@ -4,7 +4,6 @@ import java.time.Instant;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
@@ -55,14 +54,14 @@ public class LongToDateBucket {
*/
private final DateTimeFormatter datePattern;
ChronoUnit chronoUnit;
DateBucketUnit chronoUnit;
// visible for test
final ConcurrentNavigableMap<Long, DatePrefixAndRange> datePrefixCache = new ConcurrentSkipListMap<>();
private final AtomicReference<DatePrefixAndRange> lastAccessed = new AtomicReference<>(null);
public LongToDateBucket(final String dateFormatPattern, final ChronoUnit chronoUnit) {
public LongToDateBucket(final String dateFormatPattern, final DateBucketUnit chronoUnit) {
this.chronoUnit = chronoUnit;
this.datePattern = DateTimeFormatter.ofPattern(dateFormatPattern);
}
@@ -118,7 +117,7 @@ public class LongToDateBucket {
* @return
*/
public List<String> toPartitionIds(final OffsetDateTime start, final OffsetDateTime end,
final ChronoUnit chronoUnit) {
final DateBucketUnit chronoUnit) {
final List<String> result = new ArrayList<>();
OffsetDateTime current = start;

View File

@@ -1,75 +1,69 @@
package org.lucares.utils;
import java.time.temporal.ChronoField;
import java.time.temporal.ChronoUnit;
import java.time.temporal.Temporal;
import java.time.temporal.TemporalAdjuster;
public class StartOfInterval implements TemporalAdjuster {
private final ChronoUnit unit;
private final DateBucketUnit unit;
public StartOfInterval(final ChronoUnit unit) {
public StartOfInterval(final DateBucketUnit unit) {
this.unit = unit;
}
@Override
public Temporal adjustInto(final Temporal temporal) {
Temporal result = temporal;
for (final ChronoUnit chronoUnit : ChronoUnit.values()) {
if (chronoUnit.compareTo(unit) >= 0) {
result = result.with(ChronoField.NANO_OF_SECOND, 0);
result = result.with(ChronoField.MICRO_OF_SECOND, 0);
result = result.with(ChronoField.MILLI_OF_SECOND, 0);
for (final DateBucketUnit dateBucketUnit : DateBucketUnit.values()) {
if (dateBucketUnit.compareTo(unit) >= 0) {
break;
}
switch (chronoUnit) {
case NANOS: {
result = result.with(ChronoField.NANO_OF_SECOND, 0);
break;
}
case MICROS: {
result = result.with(ChronoField.MICRO_OF_SECOND, 0);
break;
}
case MILLIS: {
result = result.with(ChronoField.MILLI_OF_SECOND, 0);
break;
}
case SECONDS: {
switch (dateBucketUnit) {
case SECOND: {
result = result.with(ChronoField.SECOND_OF_MINUTE, 0);
break;
}
case MINUTES: {
case MINUTE: {
result = result.with(ChronoField.MINUTE_OF_HOUR, 0);
break;
}
case HOURS: {
case HOUR: {
result = result.with(ChronoField.HOUR_OF_DAY, 0);
break;
}
case DAYS: {
case DAY: {
switch (unit) {
case WEEKS: {
case WEEK: {
result = result.with(ChronoField.DAY_OF_WEEK, 1);
break;
}
case MONTHS: {
case MONTH: {
result = result.with(ChronoField.DAY_OF_MONTH, 1);
break;
}
case YEAR: {
result = result.with(ChronoField.MONTH_OF_YEAR, 1);
break;
}
default:
throw new IllegalArgumentException("Unexpected value: " + unit);
}
break;
}
case MONTHS: {
case MONTH: {
result = result.with(ChronoField.MONTH_OF_YEAR, 1);
break;
}
case HALF_DAYS:
case WEEKS:
case WEEK:
break;
default:
throw new IllegalArgumentException("Unexpected value: " + chronoUnit);
throw new IllegalArgumentException("Unexpected value: " + dateBucketUnit);
}
}

View File

@@ -1,13 +0,0 @@
package org.lucares.utils.cache;
import java.util.concurrent.ExecutionException;
public class RuntimeExcecutionException extends RuntimeException {
private static final long serialVersionUID = -3626851728980513527L;
public RuntimeExcecutionException(final ExecutionException e) {
super(e);
}
}

View File

@@ -2,7 +2,6 @@ package org.lucares.utils;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.DoubleSummaryStatistics;
@@ -49,61 +48,42 @@ public class LongToDateBucketTest {
@MethodSource("provider")
public void test(final OffsetDateTime start, final OffsetDateTime end, final Set<String> expected) {
final Set<String> actual = new LongToDateBucket("yyyyMM", ChronoUnit.MONTHS).toDateIndexPrefix(start, end);
final Set<String> actual = new LongToDateBucket("yyyyMM", DateBucketUnit.MONTH).toDateIndexPrefix(start, end);
Assertions.assertEquals(expected, actual);
}
// @Test
// public void testDateToDateIndexPrefix() {
//
// final long mid_201711 = OffsetDateTime.of(2017, 11, 23, 2, 2, 2, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
// final long mid_201712 = OffsetDateTime.of(2017, 12, 7, 1, 1, 1, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
// final long min_201801 = OffsetDateTime.of(2018, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
// final long max_201801 = OffsetDateTime.of(2018, 1, 31, 23, 59, 59, 999_999_999, ZoneOffset.UTC).toInstant()
// .toEpochMilli();
// final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM");
//
// Assertions.assertEquals("201712", longToDateBucket.toDateIndexPrefix(mid_201712));
// Assertions.assertEquals("201801", longToDateBucket.toDateIndexPrefix(min_201801));
// Assertions.assertEquals("201801", longToDateBucket.toDateIndexPrefix(max_201801));
// Assertions.assertEquals("201711", longToDateBucket.toDateIndexPrefix(mid_201711));
// }
@Test
public void testDateRanges() {
final OffsetDateTime mid_201712 = OffsetDateTime.of(2017, 12, 7, 1, 1, 1, 0, ZoneOffset.UTC)
.withOffsetSameInstant(ZoneOffset.ofHours(-2));
final OffsetDateTime min_201801 = OffsetDateTime.of(2018, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)
.withOffsetSameInstant(ZoneOffset.ofHours(-8));
final OffsetDateTime min_201802 = OffsetDateTime.of(2018, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC)
.withOffsetSameInstant(ZoneOffset.ofHours(12));
final OffsetDateTime mid_201712 = OffsetDateTime.of(2017, 12, 7, 1, 1, 1, 0, ZoneOffset.UTC);
final OffsetDateTime min_201801 = OffsetDateTime.of(2018, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final OffsetDateTime min_201802 = OffsetDateTime.of(2018, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM", ChronoUnit.MONTHS);
final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM", DateBucketUnit.MONTH);
final List<String> dateIndexPrefixesWithEmptyCache = longToDateBucket.toPartitionIds(mid_201712, min_201802,
ChronoUnit.MONTHS);
DateBucketUnit.MONTH);
Assertions.assertEquals(Arrays.asList("201712", "201801", "201802"), dateIndexPrefixesWithEmptyCache);
final List<String> dateIndexPrefixesWithFilledCache = longToDateBucket.toPartitionIds(mid_201712, min_201801,
ChronoUnit.MONTHS);
DateBucketUnit.MONTH);
Assertions.assertEquals(Arrays.asList("201712", "201801"), dateIndexPrefixesWithFilledCache);
final List<String> dateIndexPrefixesOneMonth = longToDateBucket.toPartitionIds(mid_201712, mid_201712,
ChronoUnit.MONTHS);
DateBucketUnit.MONTH);
Assertions.assertEquals(Arrays.asList("201712"), dateIndexPrefixesOneMonth);
}
@Test
public void testDateRangeToEpochMilli() {
final OffsetDateTime mid_201712 = OffsetDateTime.of(2017, 12, 7, 1, 1, 1, 0, ZoneOffset.ofHours(3));
final OffsetDateTime min_201802 = OffsetDateTime.of(2018, 2, 15, 0, 0, 0, 0, ZoneOffset.ofHours(7));
final OffsetDateTime mid_201712 = OffsetDateTime.of(2017, 12, 7, 1, 1, 1, 0, ZoneOffset.UTC);
final OffsetDateTime min_201802 = OffsetDateTime.of(2018, 2, 15, 0, 0, 0, 0, ZoneOffset.UTC);
final long exp_201712 = OffsetDateTime.of(2017, 12, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
final long exp_201801 = OffsetDateTime.of(2018, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
final long exp_201802 = OffsetDateTime.of(2018, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM", ChronoUnit.MONTHS);
final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM", DateBucketUnit.MONTH);
final List<Long> dateIndexEpochMillis = longToDateBucket.toDateIndexEpochMillis(mid_201712, min_201802);
Assertions.assertEquals(Arrays.asList(exp_201712, exp_201801, exp_201802), dateIndexEpochMillis);
@@ -121,7 +101,7 @@ public class LongToDateBucketTest {
final int warmup = 20 * factor;
final int rounds = warmup + 20;
final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM", ChronoUnit.MONTHS);
final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM", DateBucketUnit.MONTH);
// fill the cache
for (long i = min; i < max; i += 3600 * 24 * 28) {