Merge remote-tracking branch 'origin/master'
This commit is contained in:
@@ -18,7 +18,9 @@ public enum Aggregate {
|
||||
*/
|
||||
CUM_DISTRIBUTION("Cumulative Distribution"),
|
||||
|
||||
HISTOGRAM("Histogram");
|
||||
HISTOGRAM("Histogram"),
|
||||
|
||||
BOX("Box");
|
||||
|
||||
private final String axisLabel;
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ public class BarChartAggregatorForIntervals implements CustomAggregator, Indexed
|
||||
public BarChartAggregatorForIntervals(final PlotSettings settings) {
|
||||
this.settings = settings;
|
||||
this.interval = settings.getInterval().get();
|
||||
buckets = interval.getBuckets();
|
||||
buckets = interval.getBuckets(AtomicLong::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -0,0 +1,76 @@
|
||||
package org.lucares.pdb.plot.api;
|
||||
|
||||
import java.util.Locale;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.lucares.collections.LongObjHashMap;
|
||||
import org.lucares.recommind.logs.GnuplotAxis;
|
||||
|
||||
public class BoxAggregator implements CustomAggregator {
|
||||
private final String dataName = "$data" + UUID.randomUUID().toString().replace("-", "");
|
||||
|
||||
private final Interval interval;
|
||||
|
||||
private final LongObjHashMap<PercentilesAggregator> buckets;
|
||||
|
||||
public BoxAggregator(final PlotSettings settings) {
|
||||
|
||||
this.interval = settings.getInterval().get();
|
||||
this.buckets = interval.getMiddleTimeBuckets(PercentilesAggregator::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addValue(final long epochMilli, final long value) {
|
||||
|
||||
final long bucketId = interval.toBucketMiddleTime(epochMilli);
|
||||
final PercentilesAggregator bucket = buckets.get(bucketId);
|
||||
bucket.addValue(epochMilli, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AggregatedData getAggregatedData() {
|
||||
// not needed - usually this method is used to write the data to file, but bar
|
||||
// charts use inline data
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Aggregate getType() {
|
||||
return Aggregate.BOX;
|
||||
}
|
||||
|
||||
public Object getDataName() {
|
||||
return dataName;
|
||||
}
|
||||
|
||||
public Interval getInterval() {
|
||||
return interval;
|
||||
}
|
||||
|
||||
public String asCsv(final boolean renderLabels) {
|
||||
final StringBuilder csv = new StringBuilder();
|
||||
|
||||
buckets.forEachOrdered((final long bucketId, final PercentilesAggregator percentilesAggregator) -> {
|
||||
final Percentiles percentiles = percentilesAggregator.getPercentiles();
|
||||
if (percentiles.get("25.000") != null) {
|
||||
csv.append(String.format(Locale.US, "%d,%d,%d,%d,%d,%d", //
|
||||
bucketId / 1000, //
|
||||
percentiles.get("0.000"), //
|
||||
percentiles.get("25.000"), //
|
||||
percentiles.get("50.000"), //
|
||||
percentiles.get("75.000"), //
|
||||
percentiles.get("100.000")//
|
||||
));
|
||||
csv.append("\n");
|
||||
}
|
||||
});
|
||||
|
||||
return csv.toString();
|
||||
}
|
||||
|
||||
public String renderLabels(final GnuplotAxis xAxis) {
|
||||
final StringBuilder result = new StringBuilder();
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,99 @@
|
||||
package org.lucares.pdb.plot.api;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.Collection;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.lucares.recommind.logs.AxisSettings;
|
||||
import org.lucares.recommind.logs.AxisTime;
|
||||
import org.lucares.recommind.logs.DataSeries;
|
||||
import org.lucares.recommind.logs.GnuplotAxis;
|
||||
import org.lucares.recommind.logs.GnuplotLineType;
|
||||
import org.lucares.recommind.logs.GnuplotSettings;
|
||||
import org.lucares.recommind.logs.LineStyle;
|
||||
import org.lucares.recommind.logs.Type;
|
||||
|
||||
public class BoxChartHandler extends AggregateHandler {
|
||||
|
||||
@Override
|
||||
Type getAxisType(final GnuplotAxis axis) {
|
||||
switch (axis) {
|
||||
case X1:
|
||||
case X2:
|
||||
return Type.Time;
|
||||
case Y1:
|
||||
case Y2:
|
||||
return Type.Duration;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unexpected value: " + axis);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
Aggregate getAggregateType() {
|
||||
return Aggregate.BOX;
|
||||
}
|
||||
|
||||
@Override
|
||||
AxisSettings createXAxisSettings(final GnuplotSettings settings, final Collection<DataSeries> dataSeries) {
|
||||
final AxisSettings result = AxisTime.createXAxis(settings);
|
||||
result.setAxis(getxAxis());
|
||||
result.setShowGrid(getxAxis() == GnuplotAxis.X1);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
String beforePlot(final CustomAggregator aggregator, final GnuplotSettings settings) {
|
||||
final StringBuilder result = new StringBuilder();
|
||||
|
||||
final BoxAggregator boxAggregator = (BoxAggregator) aggregator;
|
||||
|
||||
appendfln(result, "%s <<EOD", boxAggregator.getDataName());
|
||||
appendln(result, boxAggregator.asCsv(settings.isRenderLabels()));
|
||||
appendln(result, "EOD");
|
||||
|
||||
if (settings.isRenderLabels() && settings.isRenderBarChartTickLabels()) {
|
||||
appendfln(result, boxAggregator.renderLabels(getxAxis()));
|
||||
}
|
||||
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
String addPlot(final CustomAggregator aggregator, final LineStyle lineStyle, final Optional<String> title) {
|
||||
final BoxAggregator boxAggregator = (BoxAggregator) aggregator;
|
||||
|
||||
final String candlestick = formatln(
|
||||
"'%s' using 1:3:2:6:5:(%.1f) %s axes %s with %s whiskerbars 0.5 fs empty %s linewidth 1, \\", //
|
||||
boxAggregator.getDataName(), //
|
||||
width(boxAggregator.getInterval().getIntervalTimeUnit()), //
|
||||
gnuplotTitle(title), //
|
||||
gnuplotXYAxis(), //
|
||||
GnuplotLineType.BOX, //
|
||||
lineStyle.asGnuplotLineStyle()//
|
||||
);
|
||||
final String median = formatln(
|
||||
"'%s' using 1:4:4:4:4:(%.1f) axes %s with candlesticks notitle fs empty %s linewidth 2, \\", //
|
||||
boxAggregator.getDataName(), //
|
||||
width(boxAggregator.getInterval().getIntervalTimeUnit()), //
|
||||
gnuplotXYAxis(), //
|
||||
lineStyle.asGnuplotLineStyle());
|
||||
return candlestick + median;
|
||||
}
|
||||
|
||||
private double width(final IntervalTimeUnit intervalTimeUnit) {
|
||||
return intervalTimeUnit.toMillis() / 1000;
|
||||
}
|
||||
|
||||
@Override
|
||||
CustomAggregator createCustomAggregator(final Path tmpDir, final PlotSettings plotSettings,
|
||||
final long fromEpochMilli, final long toEpochMilli) {
|
||||
if (plotSettings.getInterval().isPresent()) {
|
||||
return new BoxAggregator(plotSettings);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -8,90 +8,27 @@ import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.lucares.collections.LongLongConsumer;
|
||||
import org.lucares.collections.LongLongHashMap;
|
||||
import org.lucares.pdb.api.RuntimeIOException;
|
||||
|
||||
public class CumulativeDistributionCustomAggregator implements CustomAggregator {
|
||||
|
||||
private final static int POINTS = 500;
|
||||
|
||||
private static final class ToPercentiles implements LongLongConsumer {
|
||||
|
||||
private long cumulativeCount = 0;
|
||||
|
||||
private long maxValue = 0;
|
||||
|
||||
private final Percentiles percentiles = new Percentiles(POINTS);
|
||||
|
||||
private final double stepSize;
|
||||
|
||||
private double lastPercentile;
|
||||
private double nextPercentile;
|
||||
|
||||
private final long totalValues;
|
||||
|
||||
public ToPercentiles(final long totalValues) {
|
||||
this.totalValues = totalValues;
|
||||
stepSize = 100.0 / POINTS;
|
||||
nextPercentile = stepSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(final long duration, final long count) {
|
||||
maxValue = duration;
|
||||
|
||||
cumulativeCount += count;
|
||||
final double newPercentile = cumulativeCount * 100.0 / totalValues;
|
||||
|
||||
if (newPercentile >= nextPercentile) {
|
||||
double currentPercentile = lastPercentile + stepSize;
|
||||
while (currentPercentile <= newPercentile) {
|
||||
final String percentile = String.format(Locale.US, "%.3f", currentPercentile);
|
||||
percentiles.put(percentile, duration);
|
||||
currentPercentile += stepSize;
|
||||
}
|
||||
nextPercentile = currentPercentile;
|
||||
lastPercentile = currentPercentile - stepSize;
|
||||
}
|
||||
}
|
||||
|
||||
public Percentiles getPercentiles() {
|
||||
return percentiles;
|
||||
}
|
||||
|
||||
public void collect(final LongLongHashMap map) {
|
||||
map.forEachOrdered(this);
|
||||
percentiles.put("100.000", maxValue);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// the rather large initial capacity should prevent too many grow&re-hash phases
|
||||
private final LongLongHashMap map = new LongLongHashMap(5_000, 0.75);
|
||||
|
||||
private long totalValues = 0;
|
||||
|
||||
private final Path tmpDir;
|
||||
|
||||
private final PercentilesAggregator percentilesAggregator;
|
||||
|
||||
public CumulativeDistributionCustomAggregator(final Path tmpDir) {
|
||||
this.tmpDir = tmpDir;
|
||||
percentilesAggregator = new PercentilesAggregator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addValue(final long epochMilli, final long value) {
|
||||
map.compute(value, 0, (__, l) -> l + 1);
|
||||
totalValues++;
|
||||
percentilesAggregator.addValue(epochMilli, value);
|
||||
}
|
||||
|
||||
public Percentiles getPercentiles() {
|
||||
final ToPercentiles toPercentiles = new ToPercentiles(totalValues);
|
||||
toPercentiles.collect(map);
|
||||
|
||||
final Percentiles result = toPercentiles.getPercentiles();
|
||||
return result;
|
||||
return percentilesAggregator.getPercentiles();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -100,17 +37,14 @@ public class CumulativeDistributionCustomAggregator implements CustomAggregator
|
||||
final char separator = ',';
|
||||
final char newline = '\n';
|
||||
|
||||
final ToPercentiles toPercentiles = new ToPercentiles(totalValues);
|
||||
toPercentiles.collect(map);
|
||||
|
||||
final File dataFile = File.createTempFile("data", ".dat", tmpDir.toFile());
|
||||
try (final Writer output = new BufferedWriter(
|
||||
new OutputStreamWriter(new FileOutputStream(dataFile), StandardCharsets.US_ASCII));) {
|
||||
|
||||
final StringBuilder data = new StringBuilder();
|
||||
if (map.size() > 0) {
|
||||
if (percentilesAggregator.hasValues()) {
|
||||
// compute the percentiles
|
||||
toPercentiles.getPercentiles().forEach((percentile, value) -> {
|
||||
percentilesAggregator.getPercentiles().forEach((percentile, value) -> {
|
||||
|
||||
data.append(percentile);
|
||||
data.append(separator);
|
||||
|
||||
@@ -3,8 +3,9 @@ package org.lucares.pdb.plot.api;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.lucares.collections.LongObjHashMap;
|
||||
import org.lucares.pdb.api.DateTimeRange;
|
||||
import org.lucares.pdb.datastore.internal.LongToDateBucket;
|
||||
|
||||
@@ -51,6 +52,30 @@ public class Interval {
|
||||
return bucketer.toPartitionId(epochMilli);
|
||||
}
|
||||
|
||||
public long toBucketMiddleTime(final long epochMilli) {
|
||||
switch (intervalTimeUnit) {
|
||||
case SECOND:
|
||||
return epochMilli - epochMilli % 1000 + 500;
|
||||
case MINUTE:
|
||||
return epochMilli - epochMilli % 60000 + 30000;
|
||||
case HOUR:
|
||||
return epochMilli - epochMilli % 3600000 + 1800000;
|
||||
case DAY:
|
||||
return epochMilli - epochMilli % 86400000 + 43200000;
|
||||
case WEEK:
|
||||
return epochMilli - epochMilli % (7 * 24 * 3600 * 1000) + 7 * 24 * 3600 * 500; // use week based year!
|
||||
// Otherwise intervals over
|
||||
// the year boundary will be
|
||||
// wrong
|
||||
case MONTH:
|
||||
return epochMilli - epochMilli % (30 * 24 * 3600 * 1000L) + 30 * 24 * 3600 * 500L;
|
||||
case YEAR:
|
||||
return epochMilli - epochMilli % (365 * 24 * 3600 * 1000L) + 365 * 24 * 3600 * 500L;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unexpected value: " + intervalTimeUnit);
|
||||
}
|
||||
}
|
||||
|
||||
public IntervalTimeUnit getIntervalTimeUnit() {
|
||||
return intervalTimeUnit;
|
||||
}
|
||||
@@ -72,13 +97,30 @@ public class Interval {
|
||||
return null;
|
||||
}
|
||||
|
||||
public Map<String, AtomicLong> getBuckets() {
|
||||
final Map<String, AtomicLong> result = new HashMap<>();
|
||||
public <T> Map<String, T> getBuckets(final Supplier<T> initialValueSupplier) {
|
||||
final Map<String, T> result = new HashMap<>();
|
||||
final List<String> bucketIds = bucketer.toPartitionIds(dateTimeRange.getStart(), dateTimeRange.getEnd(),
|
||||
intervalTimeUnit.toChronoUnit());
|
||||
|
||||
for (final String bucketId : bucketIds) {
|
||||
result.put(bucketId, new AtomicLong(0));
|
||||
result.put(bucketId, initialValueSupplier.get());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public <T> LongObjHashMap<T> getMiddleTimeBuckets(final Supplier<T> initialValueSupplier) {
|
||||
final LongObjHashMap<T> result = new LongObjHashMap<>();
|
||||
|
||||
long current = dateTimeRange.getStart().toInstant().toEpochMilli();
|
||||
final long end = dateTimeRange.getEnd().toInstant().toEpochMilli() + intervalTimeUnit.toMillis();
|
||||
|
||||
while (current <= end) {
|
||||
|
||||
final long id = toBucketMiddleTime(current);
|
||||
System.out.println("add bucket: " + id);
|
||||
result.put(id, initialValueSupplier.get());
|
||||
current += intervalTimeUnit.toMillis();
|
||||
}
|
||||
|
||||
return result;
|
||||
|
||||
@@ -34,4 +34,25 @@ public enum IntervalTimeUnit {
|
||||
throw new IllegalArgumentException("Unexpected value: " + this);
|
||||
}
|
||||
}
|
||||
|
||||
public long toMillis() {
|
||||
switch (this) {
|
||||
case SECOND:
|
||||
return 1000;
|
||||
case MINUTE:
|
||||
return 60 * 1000;
|
||||
case HOUR:
|
||||
return 3600 * 1000;
|
||||
case DAY:
|
||||
return 24 * 3600 * 1000;
|
||||
case WEEK:
|
||||
return 7 * 24 * 3600 * 1000;
|
||||
case MONTH:
|
||||
return 30 * 24 * 3600 * 1000L;
|
||||
case YEAR:
|
||||
return 365 * 24 * 3600 * 1000L;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unexpected value: " + this);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,91 @@
|
||||
package org.lucares.pdb.plot.api;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
import org.lucares.collections.LongLongConsumer;
|
||||
import org.lucares.collections.LongLongHashMap;
|
||||
|
||||
public class PercentilesAggregator {
|
||||
private final static int POINTS = 500;
|
||||
|
||||
private static final class ToPercentiles implements LongLongConsumer {
|
||||
|
||||
private long cumulativeCount = 0;
|
||||
|
||||
private long minValue = Long.MAX_VALUE;
|
||||
private long maxValue = 0;
|
||||
|
||||
private final Percentiles percentiles = new Percentiles(POINTS);
|
||||
|
||||
private final double stepSize;
|
||||
|
||||
private double lastPercentile;
|
||||
private double nextPercentile;
|
||||
|
||||
private final long totalValues;
|
||||
|
||||
public ToPercentiles(final long totalValues) {
|
||||
this.totalValues = totalValues;
|
||||
stepSize = 100.0 / POINTS;
|
||||
nextPercentile = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void accept(final long duration, final long count) {
|
||||
minValue = Math.min(minValue, duration);
|
||||
maxValue = duration;
|
||||
|
||||
cumulativeCount += count;
|
||||
final double newPercentile = cumulativeCount * 100.0 / totalValues;
|
||||
|
||||
if (newPercentile >= nextPercentile) {
|
||||
double currentPercentile = lastPercentile + stepSize;
|
||||
while (currentPercentile <= newPercentile) {
|
||||
final String percentile = String.format(Locale.US, "%.3f", currentPercentile);
|
||||
percentiles.put(percentile, duration);
|
||||
currentPercentile += stepSize;
|
||||
}
|
||||
nextPercentile = currentPercentile;
|
||||
lastPercentile = currentPercentile - stepSize;
|
||||
}
|
||||
}
|
||||
|
||||
public Percentiles getPercentiles() {
|
||||
return percentiles;
|
||||
}
|
||||
|
||||
public void collect(final LongLongHashMap map) {
|
||||
percentiles.put("0.000", 0L); // make sure "0.000" is the first element in the sorted percentiles. Will be
|
||||
// overwritten with the correct value later
|
||||
map.forEachOrdered(this);
|
||||
percentiles.put("0.000", minValue);
|
||||
percentiles.put("100.000", maxValue);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// the rather large initial capacity should prevent too many grow&re-hash phases
|
||||
private final LongLongHashMap map = new LongLongHashMap(5_000, 0.75);
|
||||
|
||||
private long totalValues = 0;
|
||||
|
||||
public PercentilesAggregator() {
|
||||
}
|
||||
|
||||
public void addValue(final long epochMilli, final long value) {
|
||||
map.compute(value, 0, (__, l) -> l + 1);
|
||||
totalValues++;
|
||||
}
|
||||
|
||||
public Percentiles getPercentiles() {
|
||||
final ToPercentiles toPercentiles = new ToPercentiles(totalValues);
|
||||
toPercentiles.collect(map);
|
||||
|
||||
final Percentiles result = toPercentiles.getPercentiles();
|
||||
return result;
|
||||
}
|
||||
|
||||
public boolean hasValues() {
|
||||
return map.size() > 0;
|
||||
}
|
||||
}
|
||||
@@ -66,6 +66,7 @@ public class GnuplotFileGenerator implements Appender {
|
||||
|
||||
// appendfln(result, "set xrange [-1:1]");
|
||||
appendfln(result, "set boxwidth 0.5");
|
||||
// appendfln(result, "set boxwidth 3600");
|
||||
|
||||
appendfln(result, "set style fill transparent solid 0.5");
|
||||
|
||||
@@ -75,7 +76,7 @@ public class GnuplotFileGenerator implements Appender {
|
||||
// render images when there are not data points on it.
|
||||
appendf(result, "-1 with lines notitle");
|
||||
|
||||
LOGGER.debug("{}", result);
|
||||
LOGGER.info("{}", result);
|
||||
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
@@ -5,6 +5,8 @@ public enum GnuplotLineType {
|
||||
|
||||
Bar("boxes"),
|
||||
|
||||
BOX("candlesticks"),
|
||||
|
||||
Points("points");
|
||||
|
||||
private String gnuplotLineType;
|
||||
|
||||
@@ -13,13 +13,7 @@ public class LineStyle {
|
||||
}
|
||||
|
||||
private String asGnuplotLineStyle(final String colorHex) {
|
||||
// TODO revert
|
||||
// return String.format("lt rgb \"#%s\" dt %s ", //
|
||||
// colorHex, //
|
||||
// dashType.toGnuplotDashType()//
|
||||
// );
|
||||
|
||||
return String.format("lt rgb \"#%s\" ", //
|
||||
return String.format("linetype rgb \"#%s\" ", //
|
||||
colorHex//
|
||||
);
|
||||
}
|
||||
|
||||
@@ -195,7 +195,7 @@ public class Plotter {
|
||||
|
||||
METRICS_LOGGER.debug("wrote {} values to csv in: {}ms (ignored {} values) use millis: {}, grouping={}",
|
||||
plottedValues, (System.nanoTime() - start) / 1_000_000.0, ignoredValues, Boolean.toString(useMillis),
|
||||
groupResult.getGroupedBy().asString());
|
||||
Tags.STRING_COMPRESSOR.asString(groupResult.getGroupedBy()));
|
||||
return new CsvSummary(count, statsMaxValue, statsCurrentAverage, aggregator);
|
||||
|
||||
}
|
||||
@@ -208,7 +208,7 @@ public class Plotter {
|
||||
static String title(final Tags tags, final CsvSummary csvSummary) {
|
||||
// TODO title must be computed by the AggregateHandler, because it is the only
|
||||
// one knowing how many values are plotted
|
||||
final StringBuilder result = new StringBuilder(tags.asValueString());
|
||||
final StringBuilder result = new StringBuilder(Tags.STRING_COMPRESSOR.asValueString(tags));
|
||||
|
||||
final int values = csvSummary.getValues();
|
||||
result.append(" (");
|
||||
|
||||
Reference in New Issue
Block a user