add interval splitting for bar charts

This commit is contained in:
2020-04-05 08:14:09 +02:00
parent 75391f21ff
commit 50f555d23c
19 changed files with 600 additions and 80 deletions

View File

@@ -1,5 +1,6 @@
package org.lucares.pdb.datastore.internal;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
@@ -11,7 +12,7 @@ import org.lucares.utils.LongToDateBucket;
public class DateIndexExtension {
private static final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM");
private static final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM", ChronoUnit.MONTHS);
static Set<String> toDateIndexPrefix(final DateTimeRange dateRange) {
return longToDateBucket.toDateIndexPrefix(dateRange.getStart(), dateRange.getEnd());
@@ -21,10 +22,6 @@ public class DateIndexExtension {
return new ParititionId(longToDateBucket.toPartitionId(epochMilli));
}
public static String toDateIndexPrefix(final long epochMilli) {
return longToDateBucket.toDateIndexPrefix(epochMilli);
}
/**
* only for tests, use toPartitionIds(final DateTimeRange dateRange,final
* Collection<? extends PartitionId> availablePartitionIds) instead
@@ -33,7 +30,8 @@ public class DateIndexExtension {
* @return
*/
static List<ParititionId> toPartitionIds(final DateTimeRange dateRange) {
final List<String> partitionIds = longToDateBucket.toPartitionIds(dateRange.getStart(), dateRange.getEnd());
final List<String> partitionIds = longToDateBucket.toPartitionIds(dateRange.getStart(), dateRange.getEnd(),
ChronoUnit.MONTHS);
final List<ParititionId> result = new ArrayList<>();
for (final String partitionId : partitionIds) {

View File

@@ -200,6 +200,8 @@ export class PlotRequest {
aggregates : Array<string>;
keyOutside : boolean = false;
generateThumbnail : boolean;
intervalUnit: string;
intervalValue: number;
copy(): PlotRequest {
return JSON.parse(JSON.stringify(this));

View File

@@ -32,6 +32,19 @@
<pdb-limit-by #limitbycomponent></pdb-limit-by>
<mat-form-field>
<mat-label>Intervals:</mat-label>
<mat-select [(value)]="intervalUnit">
<mat-option value="NO_INTERVAL">-</mat-option>
<mat-option value="MINUTE">minute</mat-option>
<mat-option value="HOUR">hour</mat-option>
<mat-option value="DAY">day</mat-option>
<mat-option value="WEEK">week</mat-option>
<mat-option value="MONTH">month</mat-option>
<mat-option value="YEAR">year</mat-option>
</mat-select>
</mat-form-field>
<pdb-y-axis-definition #y1AxisDefinitionComponent yIndex="1"></pdb-y-axis-definition>
<pdb-y-axis-definition #y2AxisDefinitionComponent yIndex="2" *ngIf="y2AxisAvailable"></pdb-y-axis-definition>

View File

@@ -51,6 +51,9 @@ export class VisualizationPageComponent implements OnInit {
splitBy = null;
y2AxisAvailable = false;
intervalUnit = 'NO_INTERVAL';
intervalValue = 1;
constructor(private plotService: PlotService, private snackBar: MatSnackBar) {
}
@@ -163,6 +166,8 @@ export class VisualizationPageComponent implements OnInit {
request.aggregates = aggregates;
request.keyOutside = false;
request.generateThumbnail = this.enableGallery;
request.intervalUnit = this.intervalUnit;
request.intervalValue = this.intervalValue;
return request;
}

View File

@@ -126,7 +126,7 @@ public class AggregateHandlerCollection {
if (optionalAggregator.isPresent()) {
final CustomAggregator aggregator = optionalAggregator.get();
if (aggregator instanceof IndexedAggregator) {
((IndexedAggregator) aggregator).setIndex(index);
((IndexedAggregator) aggregator).setIndex(index, dataSeries.size());
index++;
}

View File

@@ -0,0 +1,20 @@
package org.lucares.pdb.plot.api;
import org.lucares.recommind.logs.GnuplotAxis;
public interface BarChart extends IndexedAggregator {
String asCsv();
String getDataName();
/**
* Total number of values over all intervals.
*
* @return
*/
long getCount();
String renderLabels(GnuplotAxis xAxis);
}

View File

@@ -5,17 +5,20 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Locale;
import java.util.UUID;
import org.lucares.pdb.api.RuntimeIOException;
import org.lucares.recommind.logs.GnuplotAxis;
public class BarChartAggregator implements CustomAggregator, IndexedAggregator {
public class BarChartAggregator implements CustomAggregator, IndexedAggregator, BarChart {
long count = 0;
private final Path tmpDir;
private Long index = null;
private Long numberOfDataSeries;
private final String dataName = "$data" + UUID.randomUUID().toString().replace("-", "");
@@ -25,8 +28,9 @@ public class BarChartAggregator implements CustomAggregator, IndexedAggregator {
}
@Override
public void setIndex(final long index) {
public void setIndex(final long index, final long numberOfDataSeries) {
this.index = index;
this.numberOfDataSeries = numberOfDataSeries;
}
@Override
@@ -37,6 +41,15 @@ public class BarChartAggregator implements CustomAggregator, IndexedAggregator {
return this.index;
}
@Override
public long getNumberOfDataSeries() throws IllegalStateException {
if (this.numberOfDataSeries == null) {
throw new IllegalStateException("index was not set");
}
return this.numberOfDataSeries;
}
@Override
public long getCount() {
return count;
}
@@ -46,6 +59,7 @@ public class BarChartAggregator implements CustomAggregator, IndexedAggregator {
count++;
}
@Override
public String asCsv() {
final StringBuilder csv = new StringBuilder();
@@ -77,9 +91,19 @@ public class BarChartAggregator implements CustomAggregator, IndexedAggregator {
return Aggregate.BAR;
}
@Override
public String getDataName() {
return dataName;
}
@Override
public String renderLabels(final GnuplotAxis xAxis) {
return String.format("set label at %s %f, %d '%s' center front offset 0,0.3", // front
xAxis == GnuplotAxis.X1 ? "first" : "second", //
getIndex() + 0.5, //
getCount(), //
String.format(Locale.US, "%,d", getCount()));
}
}

View File

@@ -0,0 +1,145 @@
package org.lucares.pdb.plot.api;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Locale;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicLong;
import org.lucares.pdb.api.RuntimeIOException;
import org.lucares.recommind.logs.GnuplotAxis;
public class BarChartAggregatorForIntervals implements CustomAggregator, IndexedAggregator, BarChart {
private final Path tmpDir;
private Long index = null;
private Long numberOfDataSeries;
private final String dataName = "$data" + UUID.randomUUID().toString().replace("-", "");
private final Interval interval;
private final Map<String, AtomicLong> buckets;
private int count;
public BarChartAggregatorForIntervals(final Path tmpDir, final Interval interval) {
this.tmpDir = tmpDir;
this.interval = interval;
buckets = interval.getBuckets();
}
@Override
public void setIndex(final long index, final long numberOfDataSeries) {
this.index = index;
this.numberOfDataSeries = numberOfDataSeries;
}
@Override
public long getCount() {
return count;
}
@Override
public long getIndex() throws IllegalStateException {
if (this.index == null) {
throw new IllegalStateException("index was not set");
}
return this.index;
}
@Override
public long getNumberOfDataSeries() throws IllegalStateException {
if (this.numberOfDataSeries == null) {
throw new IllegalStateException("index was not set");
}
return this.numberOfDataSeries;
}
@Override
public void addValue(final long epochMilli, final long value) {
try {
final String bucketId = interval.toBucket(epochMilli);
buckets.get(bucketId).incrementAndGet();
count++;
} catch (final NullPointerException e) {
System.out.println();
}
}
@Override
public String asCsv() {
final StringBuilder csv = new StringBuilder();
int offset = 0;
for (final String bucketId : bucketIds()) {
final long count = buckets.get(bucketId).get();
csv.append(getIndex() + 0.5 + offset);
csv.append(",");
csv.append(bucketId);
csv.append(",");
csv.append(count);
csv.append("\n");
offset += numberOfDataSeries;
}
return csv.toString();
}
private SortedSet<String> bucketIds() {
return new TreeSet<>(buckets.keySet());
}
@Override
public AggregatedData getAggregatedData() {
try {
final File dataFile = File.createTempFile("bar", ".dat", tmpDir.toFile());
final String csv = asCsv();
Files.writeString(dataFile.toPath(), csv, StandardCharsets.UTF_8);
final AggregatedData result = new AggregatedData("label", dataFile);
return result;
} catch (final IOException e) {
throw new RuntimeIOException(e);
}
}
@Override
public Aggregate getType() {
return Aggregate.BAR;
}
@Override
public String getDataName() {
return dataName;
}
@Override
public String renderLabels(final GnuplotAxis xAxis) {
final StringBuilder result = new StringBuilder();
int offset = 0;
for (final String bucketId : bucketIds()) {
final long count = buckets.get(bucketId).get();
final String label = String.format("set label at %s %f, %d '%s' center front offset 0,0.3\n", // front
xAxis == GnuplotAxis.X1 ? "first" : "second", //
getIndex() + 0.5 + offset, //
count, //
String.format(Locale.US, "%,d", count));
result.append(label);
offset += numberOfDataSeries;
}
return result.toString();
}
}

View File

@@ -4,7 +4,6 @@ import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import java.util.Optional;
import org.lucares.recommind.logs.AxisSettings;
@@ -43,8 +42,9 @@ public class BarChartHandler extends AggregateHandler {
result.setType(Type.Group);
result.setAxis(getxAxis());
result.setTicsEnabled(false);
// TODO revert next two lines
result.setFrom("0");
result.setTo(String.valueOf(dataSeries.size()));
// result.setTo(String.valueOf(dataSeries.size()));
final List<String> ticsLabels = new ArrayList<>();
int index = 1;
@@ -60,18 +60,14 @@ public class BarChartHandler extends AggregateHandler {
String beforePlot(final CustomAggregator aggregator, final GnuplotSettings settings) {
final StringBuilder result = new StringBuilder();
final BarChartAggregator barAggregator = (BarChartAggregator) aggregator;
final BarChart barAggregator = (BarChart) aggregator;
appendfln(result, "%s <<EOD", barAggregator.getDataName());
appendln(result, barAggregator.asCsv());
appendln(result, "EOD");
if (settings.isRenderLabels()) {
appendfln(result, "set label at %s %f, %d '%s' center front offset 0,0.3", // front
getxAxis() == GnuplotAxis.X1 ? "first" : "second", //
barAggregator.getIndex() + 0.5, //
barAggregator.getCount(), //
String.format(Locale.US, "%,d", barAggregator.getCount()));
appendfln(result, barAggregator.renderLabels(getxAxis()));
}
return result.toString();
@@ -80,20 +76,7 @@ public class BarChartHandler extends AggregateHandler {
@Override
String addPlot(final CustomAggregator aggregator, final LineStyle lineStyle, final Optional<String> title) {
final BarChartAggregator barAggregator = (BarChartAggregator) aggregator;
/*
* appendfln(result,
* "'%s' using 1:3:xtic(2) notitle with %s axes %s fs solid %s, \\", //
* aggregatedData.getDataFile(), // GnuplotLineType.Bar, // gnuplotXYAxis(), //
* lineStyle// );
*/
// return formatln("'%s' using 1:3:xtic(2) %s with %s axes %s fs solid %s, \\", //
// barAggregator.getDataName(), //
// gnuplotTitle(title), //
// GnuplotLineType.Bar, //
// gnuplotXYAxis(), //
// lineStyle.asGnuplotLineStyleBright()//
// );
final BarChart barAggregator = (BarChart) aggregator;
return formatln("'%s' using 1:3:%stic(2) %s with %s axes %s fs solid %s, \\", //
barAggregator.getDataName(), //
@@ -108,7 +91,11 @@ public class BarChartHandler extends AggregateHandler {
@Override
CustomAggregator createCustomAggregator(final Path tmpDir, final PlotSettings plotSettings,
final long fromEpochMilli, final long toEpochMilli) {
if (plotSettings.getInterval().isPresent()) {
return new BarChartAggregatorForIntervals(tmpDir, plotSettings.getInterval().get());
} else {
return new BarChartAggregator(tmpDir);
}
}
}

View File

@@ -4,8 +4,10 @@ public interface IndexedAggregator {
/**
* Set the index of this {@link CustomAggregator}.
*
* @param numberOfDataSeries
*/
public void setIndex(long index);
public void setIndex(long index, long numberOfDataSeries);
/**
* Returns the index.
@@ -13,4 +15,12 @@ public interface IndexedAggregator {
* @throws IllegalStateException if the index was no set
*/
public long getIndex() throws IllegalStateException;
/**
* Returns the number of dataSeries.
*
* @return number of data series
* @throws IllegalStateException if the number of data series was not set
*/
public long getNumberOfDataSeries() throws IllegalStateException;
}

View File

@@ -0,0 +1,117 @@
package org.lucares.pdb.plot.api;
import java.time.temporal.ChronoUnit;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import org.lucares.pdb.api.DateTimeRange;
import org.lucares.utils.LongToDateBucket;
public class Interval {
public enum IntervalTimeUnit {
MINUTE, HOUR, DAY, WEEK, MONTH, YEAR;
public static boolean isValid(final String value) {
for (final IntervalTimeUnit e : values()) {
if (e.name().equals(value)) {
return true;
}
}
return false;
}
public ChronoUnit toChronoUnit() {
switch (this) {
case MINUTE:
return ChronoUnit.MINUTES;
case HOUR:
return ChronoUnit.HOURS;
case DAY:
return ChronoUnit.DAYS;
case WEEK:
return ChronoUnit.WEEKS;
case MONTH:
return ChronoUnit.MONTHS;
case YEAR:
return ChronoUnit.YEARS;
default:
throw new IllegalArgumentException("Unexpected value: " + this);
}
}
}
private final IntervalTimeUnit intervalTimeUnit;
private final int value;
private final DateTimeRange dateTimeRange;
private final LongToDateBucket bucketer;
private Interval(final String intervalTimeUnit, final int value, final DateTimeRange dateTimeRange) {
this.dateTimeRange = dateTimeRange;
this.intervalTimeUnit = IntervalTimeUnit.valueOf(intervalTimeUnit);
this.value = value;
this.bucketer = new LongToDateBucket(toDateFormatForBucketer(this.intervalTimeUnit),
this.intervalTimeUnit.toChronoUnit());
}
private String toDateFormatForBucketer(final IntervalTimeUnit intervalTimeUnit) {
switch (intervalTimeUnit) {
case MINUTE:
return "yyyyMMddHHmm";
case HOUR:
return "yyyyMMddHH";
case DAY:
return "yyyyMMdd";
case WEEK:
return "YYYYww"; // use week based year! Otherwise intervals over the year boundary will be wrong
case MONTH:
return "yyyyMM";
case YEAR:
return "yyyy";
default:
throw new IllegalArgumentException("Unexpected value: " + intervalTimeUnit);
}
}
public String toBucket(final long epochMilli) {
return bucketer.toPartitionId(epochMilli);
}
public IntervalTimeUnit getIntervalTimeUnit() {
return intervalTimeUnit;
}
public int getValue() {
return value;
}
@Override
public String toString() {
return value + " " + intervalTimeUnit;
}
public static Interval create(final String intervalUnit, final int intervalValue,
final DateTimeRange dateTimeRange) {
if (IntervalTimeUnit.isValid(intervalUnit)) {
return new Interval(intervalUnit, intervalValue, dateTimeRange);
}
return null;
}
public Map<String, AtomicLong> getBuckets() {
final Map<String, AtomicLong> result = new HashMap<>();
final List<String> bucketIds = bucketer.toPartitionIds(dateTimeRange.getStart(), dateTimeRange.getEnd(),
intervalTimeUnit.toChronoUnit());
for (final String bucketId : bucketIds) {
result.put(bucketId, new AtomicLong(0));
}
return result;
}
}

View File

@@ -5,6 +5,7 @@ import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Optional;
import java.util.regex.Pattern;
import org.lucares.pdb.api.DateTimeRange;
@@ -42,6 +43,8 @@ public class PlotSettings {
private boolean generateThumbnail;
private Interval interval;
public String getQuery() {
return query;
}
@@ -184,6 +187,14 @@ public class PlotSettings {
default:
throw new IllegalArgumentException("Unexpected value: " + yAxis);
}
}
public Optional<Interval> getInterval() {
return Optional.ofNullable(interval);
}
public void setInterval(final Interval interval) {
this.interval = interval;
}
}

View File

@@ -7,6 +7,7 @@ import org.lucares.pdb.plot.api.AggregateHandlerCollection;
import org.lucares.pdb.plot.api.BarChartHandler;
import org.lucares.pdb.plot.api.CumulativeDistributionHandler;
import org.lucares.pdb.plot.api.HistogramHandler;
import org.lucares.pdb.plot.api.Interval;
import org.lucares.pdb.plot.api.ParallelRequestsAggregate;
import org.lucares.pdb.plot.api.PlotSettings;
import org.lucares.pdb.plot.api.ScatterAggregateHandler;
@@ -33,6 +34,7 @@ class PlotSettingsTransformer {
result.setY1(request.getY1());
result.setY2(request.getY2());
result.setAggregates(toAggregateInternal(request.getY1(), request.getY2(), request.getAggregates()));
result.setInterval(Interval.create(request.getIntervalUnit(), request.getIntervalValue(), result.dateRange()));
return result;
}

View File

@@ -34,6 +34,8 @@ public class PlotRequest {
private boolean keyOutside;
private boolean generateThumbnail;
private String intervalUnit;
private int intervalValue;
public String getQuery() {
return query;
@@ -151,4 +153,20 @@ public class PlotRequest {
public void setY2(final YAxisDefinition y2) {
this.y2 = y2;
}
public String getIntervalUnit() {
return intervalUnit;
}
public void setIntervalUnit(final String intervalUnit) {
this.intervalUnit = intervalUnit;
}
public int getIntervalValue() {
return intervalValue;
}
public void setIntervalValue(final int intervalValue) {
this.intervalValue = intervalValue;
}
}

View File

@@ -0,0 +1,53 @@
package org.lucares.utils;
import java.time.temporal.ChronoUnit;
import java.time.temporal.Temporal;
import java.time.temporal.TemporalAdjuster;
public class BeginningOfNextInterval implements TemporalAdjuster {
private final ChronoUnit unit;
public BeginningOfNextInterval(final ChronoUnit unit) {
this.unit = unit;
}
@Override
public Temporal adjustInto(final Temporal temporal) {
Temporal result = temporal;
final StartOfInterval startOfInterval = new StartOfInterval(unit);
result = result.with(startOfInterval);
switch (unit) {
case MINUTES: {
result = result.plus(1, ChronoUnit.MINUTES);
break;
}
case HOURS: {
result = result.plus(1, ChronoUnit.HOURS);
break;
}
case DAYS: {
result = result.plus(1, ChronoUnit.DAYS);
break;
}
case WEEKS: {
result = result.plus(1, ChronoUnit.WEEKS);
break;
}
case MONTHS: {
result = result.plus(1, ChronoUnit.MONTHS);
break;
}
case YEARS: {
result = result.plus(1, ChronoUnit.YEARS);
break;
}
default:
throw new IllegalArgumentException("Unexpected value: " + unit);
}
return result;
}
}

View File

@@ -0,0 +1,25 @@
package org.lucares.utils;
import java.time.temporal.ChronoUnit;
import java.time.temporal.Temporal;
import java.time.temporal.TemporalAdjuster;
public class EndOfInterval implements TemporalAdjuster {
private final ChronoUnit unit;
public EndOfInterval(final ChronoUnit unit) {
this.unit = unit;
}
@Override
public Temporal adjustInto(final Temporal temporal) {
Temporal result = temporal;
final BeginningOfNextInterval beginningOfnextInterval = new BeginningOfNextInterval(unit);
result = result.with(beginningOfnextInterval);
result = result.minus(1, ChronoUnit.NANOS);
return result;
}
}

View File

@@ -4,6 +4,7 @@ import java.time.Instant;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
@@ -54,12 +55,15 @@ public class LongToDateBucket {
*/
private final DateTimeFormatter datePattern;
ChronoUnit chronoUnit;
// visible for test
final ConcurrentNavigableMap<Long, DatePrefixAndRange> datePrefixCache = new ConcurrentSkipListMap<>();
private final AtomicReference<DatePrefixAndRange> lastAccessed = new AtomicReference<>(null);
public LongToDateBucket(final String dateFormatPattern) {
public LongToDateBucket(final String dateFormatPattern, final ChronoUnit chronoUnit) {
this.chronoUnit = chronoUnit;
this.datePattern = DateTimeFormatter.ofPattern(dateFormatPattern);
}
@@ -104,40 +108,43 @@ public class LongToDateBucket {
return result;
}
public String toDateIndexPrefix(final long epochMilli) {
final Entry<Long, DatePrefixAndRange> value = datePrefixCache.floorEntry(epochMilli);
String result;
if (value == null || !value.getValue().contains(epochMilli)) {
final DatePrefixAndRange newValue = toDatePrefixAndRange(epochMilli);
datePrefixCache.put(newValue.getMinEpochMilli(), newValue);
result = newValue.getDatePrefix();
} else {
result = value.getValue().getDatePrefix();
}
return result;
}
// public String toDateIndexPrefix(final long epochMilli) {
//
// final Entry<Long, DatePrefixAndRange> value = datePrefixCache.floorEntry(epochMilli);
//
// String result;
// if (value == null || !value.getValue().contains(epochMilli)) {
// final DatePrefixAndRange newValue = toDatePrefixAndRange(epochMilli);
// datePrefixCache.put(newValue.getMinEpochMilli(), newValue);
// result = newValue.getDatePrefix();
// } else {
// result = value.getValue().getDatePrefix();
// }
//
// return result;
// }
/**
* only for tests, use toPartitionIds(final DateTimeRange dateRange,final
* Collection<? extends PartitionId> availablePartitionIds) instead
*
* @param chronoUnit
*
* @param dateRange
* @return
*/
public List<String> toPartitionIds(final OffsetDateTime start, final OffsetDateTime end) {
public List<String> toPartitionIds(final OffsetDateTime start, final OffsetDateTime end,
final ChronoUnit chronoUnit) {
final List<String> result = new ArrayList<>();
OffsetDateTime current = start;
current = current.withOffsetSameInstant(ZoneOffset.UTC).withDayOfMonth(1).withHour(0).withMinute(0)
.withSecond(0).withNano(0);
current = current.with(new StartOfInterval(chronoUnit));
while (!current.isAfter(end)) {
final String id = toDateIndexPrefix(current);
result.add(id);
current = current.plusMonths(1);
current = current.with(new BeginningOfNextInterval(chronoUnit));
}
return result;
@@ -145,12 +152,12 @@ public class LongToDateBucket {
private DatePrefixAndRange toDatePrefixAndRange(final long epochMilli) {
final OffsetDateTime date = Instant.ofEpochMilli(epochMilli).atOffset(ZoneOffset.UTC);
final OffsetDateTime beginOfMonth = date.withDayOfMonth(1).withHour(0).withMinute(0).withSecond(0).withNano(0);
final OffsetDateTime endOfMonth = beginOfMonth.plusMonths(1).minusNanos(1);
final OffsetDateTime begin = date.with(new StartOfInterval(chronoUnit));
final OffsetDateTime end = begin.with(new EndOfInterval(chronoUnit));
final String datePrefix = date.format(datePattern);
final long minEpochMilli = beginOfMonth.toInstant().toEpochMilli();
final long maxEpochMilli = endOfMonth.toInstant().toEpochMilli();
final long minEpochMilli = begin.toInstant().toEpochMilli();
final long maxEpochMilli = end.toInstant().toEpochMilli();
return new DatePrefixAndRange(datePrefix, minEpochMilli, maxEpochMilli);
}

View File

@@ -0,0 +1,79 @@
package org.lucares.utils;
import java.time.temporal.ChronoField;
import java.time.temporal.ChronoUnit;
import java.time.temporal.Temporal;
import java.time.temporal.TemporalAdjuster;
public class StartOfInterval implements TemporalAdjuster {
private final ChronoUnit unit;
public StartOfInterval(final ChronoUnit unit) {
this.unit = unit;
}
@Override
public Temporal adjustInto(final Temporal temporal) {
Temporal result = temporal;
for (final ChronoUnit chronoUnit : ChronoUnit.values()) {
if (chronoUnit.compareTo(unit) >= 0) {
break;
}
switch (chronoUnit) {
case NANOS: {
result = result.with(ChronoField.NANO_OF_SECOND, 0);
break;
}
case MICROS: {
result = result.with(ChronoField.MICRO_OF_SECOND, 0);
break;
}
case MILLIS: {
result = result.with(ChronoField.MILLI_OF_SECOND, 0);
break;
}
case SECONDS: {
result = result.with(ChronoField.SECOND_OF_MINUTE, 0);
break;
}
case MINUTES: {
result = result.with(ChronoField.MINUTE_OF_HOUR, 0);
break;
}
case HOURS: {
result = result.with(ChronoField.HOUR_OF_DAY, 0);
break;
}
case DAYS: {
switch (unit) {
case WEEKS: {
result = result.with(ChronoField.DAY_OF_WEEK, 1);
break;
}
case MONTHS: {
result = result.with(ChronoField.DAY_OF_MONTH, 1);
break;
}
default:
throw new IllegalArgumentException("Unexpected value: " + unit);
}
break;
}
case MONTHS: {
result = result.with(ChronoField.MONTH_OF_YEAR, 1);
break;
}
case HALF_DAYS:
case WEEKS:
break;
default:
throw new IllegalArgumentException("Unexpected value: " + chronoUnit);
}
}
return result;
}
}

View File

@@ -2,6 +2,7 @@ package org.lucares.utils;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.DoubleSummaryStatistics;
@@ -48,26 +49,26 @@ public class LongToDateBucketTest {
@MethodSource("provider")
public void test(final OffsetDateTime start, final OffsetDateTime end, final Set<String> expected) {
final Set<String> actual = new LongToDateBucket("yyyyMM").toDateIndexPrefix(start, end);
final Set<String> actual = new LongToDateBucket("yyyyMM", ChronoUnit.MONTHS).toDateIndexPrefix(start, end);
Assertions.assertEquals(expected, actual);
}
@Test
public void testDateToDateIndexPrefix() {
final long mid_201711 = OffsetDateTime.of(2017, 11, 23, 2, 2, 2, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
final long mid_201712 = OffsetDateTime.of(2017, 12, 7, 1, 1, 1, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
final long min_201801 = OffsetDateTime.of(2018, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
final long max_201801 = OffsetDateTime.of(2018, 1, 31, 23, 59, 59, 999_999_999, ZoneOffset.UTC).toInstant()
.toEpochMilli();
final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM");
Assertions.assertEquals("201712", longToDateBucket.toDateIndexPrefix(mid_201712));
Assertions.assertEquals("201801", longToDateBucket.toDateIndexPrefix(min_201801));
Assertions.assertEquals("201801", longToDateBucket.toDateIndexPrefix(max_201801));
Assertions.assertEquals("201711", longToDateBucket.toDateIndexPrefix(mid_201711));
}
// @Test
// public void testDateToDateIndexPrefix() {
//
// final long mid_201711 = OffsetDateTime.of(2017, 11, 23, 2, 2, 2, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
// final long mid_201712 = OffsetDateTime.of(2017, 12, 7, 1, 1, 1, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
// final long min_201801 = OffsetDateTime.of(2018, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
// final long max_201801 = OffsetDateTime.of(2018, 1, 31, 23, 59, 59, 999_999_999, ZoneOffset.UTC).toInstant()
// .toEpochMilli();
// final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM");
//
// Assertions.assertEquals("201712", longToDateBucket.toDateIndexPrefix(mid_201712));
// Assertions.assertEquals("201801", longToDateBucket.toDateIndexPrefix(min_201801));
// Assertions.assertEquals("201801", longToDateBucket.toDateIndexPrefix(max_201801));
// Assertions.assertEquals("201711", longToDateBucket.toDateIndexPrefix(mid_201711));
// }
@Test
public void testDateRanges() {
@@ -78,15 +79,18 @@ public class LongToDateBucketTest {
final OffsetDateTime min_201802 = OffsetDateTime.of(2018, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC)
.withOffsetSameInstant(ZoneOffset.ofHours(12));
final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM");
final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM", ChronoUnit.MONTHS);
final List<String> dateIndexPrefixesWithEmptyCache = longToDateBucket.toPartitionIds(mid_201712, min_201802);
final List<String> dateIndexPrefixesWithEmptyCache = longToDateBucket.toPartitionIds(mid_201712, min_201802,
ChronoUnit.MONTHS);
Assertions.assertEquals(Arrays.asList("201712", "201801", "201802"), dateIndexPrefixesWithEmptyCache);
final List<String> dateIndexPrefixesWithFilledCache = longToDateBucket.toPartitionIds(mid_201712, min_201801);
final List<String> dateIndexPrefixesWithFilledCache = longToDateBucket.toPartitionIds(mid_201712, min_201801,
ChronoUnit.MONTHS);
Assertions.assertEquals(Arrays.asList("201712", "201801"), dateIndexPrefixesWithFilledCache);
final List<String> dateIndexPrefixesOneMonth = longToDateBucket.toPartitionIds(mid_201712, mid_201712);
final List<String> dateIndexPrefixesOneMonth = longToDateBucket.toPartitionIds(mid_201712, mid_201712,
ChronoUnit.MONTHS);
Assertions.assertEquals(Arrays.asList("201712"), dateIndexPrefixesOneMonth);
}
@@ -99,7 +103,7 @@ public class LongToDateBucketTest {
final long exp_201801 = OffsetDateTime.of(2018, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
final long exp_201802 = OffsetDateTime.of(2018, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant().toEpochMilli();
final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM");
final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM", ChronoUnit.MONTHS);
final List<Long> dateIndexEpochMillis = longToDateBucket.toDateIndexEpochMillis(mid_201712, min_201802);
Assertions.assertEquals(Arrays.asList(exp_201712, exp_201801, exp_201802), dateIndexEpochMillis);
@@ -117,7 +121,7 @@ public class LongToDateBucketTest {
final int warmup = 20 * factor;
final int rounds = warmup + 20;
final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM");
final LongToDateBucket longToDateBucket = new LongToDateBucket("yyyyMM", ChronoUnit.MONTHS);
// fill the cache
for (long i = min; i < max; i += 3600 * 24 * 28) {