add plots for percentiles

This commit is contained in:
ahr
2017-11-06 16:57:22 +01:00
parent 92dde94443
commit 64db4c48a2
19 changed files with 301 additions and 115 deletions

View File

@@ -2,17 +2,21 @@ package org.lucares.recommind.logs;
import java.io.File;
import org.lucares.pdb.plot.api.AggregatedData;
class CsvSummary {
private final int values;
private long maxValue;
private File dataFile;
private AggregatedData aggregatedData;
public CsvSummary(File dataFile, final int values, long maxValue) {
public CsvSummary(File dataFile, final int values, long maxValue, AggregatedData aggregatedData) {
super();
this.dataFile = dataFile;
this.values = values;
this.maxValue = maxValue;
this.aggregatedData = aggregatedData;
}
public File getDataFile() {
@@ -26,4 +30,8 @@ class CsvSummary {
public long getMaxValue() {
return maxValue;
}
public AggregatedData getAggregatedData() {
return aggregatedData;
}
}

View File

@@ -1,76 +1,70 @@
package org.lucares.recommind.logs;
import java.io.File;
import java.util.Comparator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class DataSeries {
public static final Comparator<? super DataSeries> BY_NUMBER_OF_VALUES = (a, b) -> {
return a.getValues() - b.getValues();
};
public static final Comparator<? super DataSeries> BY_MAX_VALUE = (a, b) -> {
final long result = a.getMaxValue() - b.getMaxValue();
return result <0 ? -1 : (result > 0 ? 1 : 0);
};
private final File dataFile;
private final String title;
private final GnuplotColor color;
private final Integer pointType;
private final int values;
private long maxValue;
public DataSeries(final File dataFile, final String title, final int values, long maxValue) {
super();
this.dataFile = dataFile;
this.title = title;
this.values = values;
this.maxValue = maxValue;
this.color = null;
this.pointType = null;
}
public GnuplotColor getColor() {
return color;
}
public Integer getPointType() {
return pointType;
}
public File getDataFile() {
return dataFile;
}
public String getTitle() {
return title;
}
public int getValues() {
return values;
}
public long getMaxValue() {
return maxValue;
}
public static Map<String, Integer> toMap(final List<DataSeries> dataSeries) {
final Map<String, Integer> result = new LinkedHashMap<>();
for (final DataSeries dataSerie : dataSeries) {
result.put(dataSerie.getTitle(), dataSerie.values);
}
return result;
}
}
package org.lucares.recommind.logs;
import java.io.File;
import java.util.Comparator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.lucares.pdb.plot.api.AggregatedData;
public class DataSeries {
public static final Comparator<? super DataSeries> BY_NUMBER_OF_VALUES = (
a, b) -> {
return a.getValues() - b.getValues();
};
public static final Comparator<? super DataSeries> BY_MAX_VALUE = (a, b) -> {
final long result = a.getMaxValue() - b.getMaxValue();
return result < 0 ? -1 : (result > 0 ? 1 : 0);
};
private final String title;
private CsvSummary csvSummary;
private String id;
public DataSeries(String id, String title, CsvSummary csvSummary) {
this.id = id;
this.title = title;
this.csvSummary = csvSummary;
}
public String getId() {
return id;
}
public File getDataFile() {
return csvSummary.getDataFile();
}
public String getTitle() {
return title;
}
public int getValues() {
return csvSummary.getValues();
}
public long getMaxValue() {
return csvSummary.getMaxValue();
}
public AggregatedData getAggregatedData() {
return csvSummary.getAggregatedData();
}
public static Map<String, Integer> toMap(final List<DataSeries> dataSeries) {
final Map<String, Integer> result = new LinkedHashMap<>();
for (final DataSeries dataSerie : dataSeries) {
result.put(dataSerie.getTitle(), dataSerie.getValues());
}
return result;
}
}

View File

@@ -2,7 +2,6 @@ package org.lucares.recommind.logs;
import java.util.Collection;
import org.lucares.pdb.plot.api.AggreateInternal;
import org.lucares.pdb.plot.api.AxisScale;
public class GnuplotFileGenerator {
@@ -18,7 +17,7 @@ public class GnuplotFileGenerator {
appendfln(result, "set datafile separator \"%s\"", settings.getDatafileSeparator());
settings.getAggregate().addStats(result, dataSeries);
settings.getAggregate().addGnuplotDefinitions(result, settings.getDatafileSeparator(), dataSeries);
appendfln(result, "set timefmt '%s'", settings.getTimefmt());

View File

@@ -2,7 +2,6 @@ package org.lucares.recommind.logs;
import java.nio.file.Path;
import org.lucares.pdb.plot.api.AggreateInternal;
import org.lucares.pdb.plot.api.AggregateHandler;
import org.lucares.pdb.plot.api.AxisScale;

View File

@@ -26,8 +26,7 @@ import org.lucares.pdb.api.Entry;
import org.lucares.pdb.api.GroupResult;
import org.lucares.pdb.api.Result;
import org.lucares.pdb.api.Tags;
import org.lucares.pdb.plot.api.AggreateInternal;
import org.lucares.pdb.plot.api.AggregateHandler;
import org.lucares.pdb.plot.api.CustomAggregator;
import org.lucares.pdb.plot.api.Limit;
import org.lucares.pdb.plot.api.PlotSettings;
import org.lucares.performance.db.PerformanceDb;
@@ -92,6 +91,7 @@ public class Plotter {
final Result result = db.get(query, groupBy);
int idCounter = 0;
for (final GroupResult groupResult : result.getGroups()) {
final Stream<Entry> entries = groupResult.asStream();
@@ -99,10 +99,11 @@ public class Plotter {
final CsvSummary csvSummary = toCsv(entries, tmpDir, dateFrom, dateTo, plotSettings);
final String title = title(groupResult.getGroupedBy(), csvSummary.getValues());
final DataSeries dataSerie = new DataSeries(csvSummary.getDataFile(), title, csvSummary.getValues(), csvSummary.getMaxValue());
final DataSeries dataSerie = new DataSeries("id"+idCounter, title, csvSummary);
if (dataSerie.getValues() > 0) {
dataSeries.add(dataSerie);
}
idCounter++;
}
if (dataSeries.isEmpty()) {
@@ -233,8 +234,7 @@ public class Plotter {
final long fromEpochMilli = dateFrom.toInstant().toEpochMilli();
final long toEpochMilli = dateTo.toInstant().toEpochMilli();
final boolean useMillis = (toEpochMilli - fromEpochMilli) < TimeUnit.MINUTES.toMillis(5);
final AggregateHandler aggregate = plotSettings.getAggregate();
final CustomAggregator aggregator = plotSettings.getAggregate().createCustomAggregator(fromEpochMilli, toEpochMilli);
long maxValue = 0;
long ignoredValues = 0;
@@ -248,26 +248,30 @@ public class Plotter {
while (it.hasNext()) {
final Entry entry = it.next();
if (fromEpochMilli <= entry.getEpochMilli() && entry.getEpochMilli() <= toEpochMilli) {
long epochMilli = entry.getEpochMilli();
if (fromEpochMilli <= epochMilli && epochMilli <= toEpochMilli) {
final String value = longToString(entry.getValue());
long value = entry.getValue();
final String stringValue = longToString(value);
final String formattedDate;
if (useMillis){
formattedDateBuilder.delete(0, formattedDateBuilder.length());
formatter.format("%.3f", entry.getEpochMilli() / 1000.0);
formatter.format("%.3f", epochMilli / 1000.0);
formattedDate = formattedDateBuilder.toString();
}else {
formattedDate = String.valueOf(entry.getEpochMilli() / 1000);
formattedDate = String.valueOf(epochMilli / 1000);
}
output.write(formattedDate);
output.write(separator);
output.write(value);
output.write(stringValue);
output.write(newline);
aggregator.addValue(epochMilli, value);
count++;
maxValue = Math.max(maxValue, entry.getValue());
maxValue = Math.max(maxValue, value);
}else {
ignoredValues++;
}
@@ -275,7 +279,7 @@ public class Plotter {
}
METRICS_LOGGER.debug("wrote {} values to csv in: {}ms (ignored {} values) use millis: {}", count, (System.nanoTime() - start) / 1_000_000.0, ignoredValues, Boolean.toString(useMillis));
return new CsvSummary(dataFile, count, maxValue);
return new CsvSummary(dataFile, count, maxValue, aggregator.getAggregatedData());
}
private static String longToString(final long value){