add trace logging
This commit is contained in:
@@ -182,6 +182,7 @@ public class DataStore {
|
|||||||
public List<Doc> search(final String query) {
|
public List<Doc> search(final String query) {
|
||||||
|
|
||||||
final IntList docIdsList = executeQuery(query);
|
final IntList docIdsList = executeQuery(query);
|
||||||
|
LOGGER.trace("query {} found {} docs", query, docIdsList.size());
|
||||||
final List<Doc> result = mapDocIdsToDocs(docIdsList);
|
final List<Doc> result = mapDocIdsToDocs(docIdsList);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import org.lucares.pdb.api.Entry;
|
import org.lucares.pdb.api.Entry;
|
||||||
|
import org.lucares.pdb.api.GroupResult;
|
||||||
import org.lucares.pdb.api.Result;
|
import org.lucares.pdb.api.Result;
|
||||||
import org.lucares.pdb.api.Tags;
|
import org.lucares.pdb.api.Tags;
|
||||||
import org.lucares.pdb.plot.api.CustomAggregator;
|
import org.lucares.pdb.plot.api.CustomAggregator;
|
||||||
@@ -96,9 +97,9 @@ public class Plotter {
|
|||||||
final AtomicInteger idCounter = new AtomicInteger(0);
|
final AtomicInteger idCounter = new AtomicInteger(0);
|
||||||
result.getGroups().stream().parallel().forEach(groupResult -> {
|
result.getGroups().stream().parallel().forEach(groupResult -> {
|
||||||
try{
|
try{
|
||||||
final Stream<Entry> entries = groupResult.asStream();
|
|
||||||
|
|
||||||
final CsvSummary csvSummary = toCsv(entries, tmpDir, dateFrom, dateTo, plotSettings);
|
|
||||||
|
final CsvSummary csvSummary = toCsv(groupResult, tmpDir, dateFrom, dateTo, plotSettings);
|
||||||
|
|
||||||
final int id = idCounter.getAndIncrement();
|
final int id = idCounter.getAndIncrement();
|
||||||
final String title = title(groupResult.getGroupedBy(), csvSummary.getValues());
|
final String title = title(groupResult.getGroupedBy(), csvSummary.getValues());
|
||||||
@@ -232,11 +233,12 @@ public class Plotter {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static CsvSummary toCsv(final Stream<Entry> entries, final Path tmpDir, final OffsetDateTime dateFrom,
|
private static CsvSummary toCsv(final GroupResult groupResult, final Path tmpDir, final OffsetDateTime dateFrom,
|
||||||
final OffsetDateTime dateTo, PlotSettings plotSettings) throws IOException {
|
final OffsetDateTime dateTo, PlotSettings plotSettings) throws IOException {
|
||||||
|
|
||||||
final File dataFile = File.createTempFile("data", ".dat", tmpDir.toFile());
|
final File dataFile = File.createTempFile("data", ".dat", tmpDir.toFile());
|
||||||
final long start = System.nanoTime();
|
final long start = System.nanoTime();
|
||||||
|
final Stream<Entry> entries = groupResult.asStream();
|
||||||
int count = 0;
|
int count = 0;
|
||||||
final long fromEpochMilli = dateFrom.toInstant().toEpochMilli();
|
final long fromEpochMilli = dateFrom.toInstant().toEpochMilli();
|
||||||
final long toEpochMilli = dateTo.toInstant().toEpochMilli();
|
final long toEpochMilli = dateTo.toInstant().toEpochMilli();
|
||||||
@@ -285,7 +287,7 @@ public class Plotter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
METRICS_LOGGER.debug("wrote {} values to csv in: {}ms (ignored {} values) use millis: {}", count, (System.nanoTime() - start) / 1_000_000.0, ignoredValues, Boolean.toString(useMillis));
|
METRICS_LOGGER.debug("wrote {} values to csv in: {}ms (ignored {} values) use millis: {}, grouping={}, file={}", count, (System.nanoTime() - start) / 1_000_000.0, ignoredValues, Boolean.toString(useMillis), groupResult.getGroupedBy(),dataFile);
|
||||||
return new CsvSummary(dataFile, count, maxValue, aggregator.getAggregatedData());
|
return new CsvSummary(dataFile, count, maxValue, aggregator.getAggregatedData());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -26,4 +26,9 @@ class Group {
|
|||||||
public void addFile(final PdbFile file) {
|
public void addFile(final PdbFile file) {
|
||||||
files.add(file);
|
files.add(file);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return tags + ": " + files.size()+" files";
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,8 +8,11 @@ import java.util.List;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.lucares.pdb.api.Tags;
|
import org.lucares.pdb.api.Tags;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
public class Grouping {
|
public class Grouping {
|
||||||
|
private static final Logger LOGGER = LoggerFactory.getLogger(Grouping.class);
|
||||||
|
|
||||||
public static final List<String> NO_GROUPING = Collections.emptyList();
|
public static final List<String> NO_GROUPING = Collections.emptyList();
|
||||||
|
|
||||||
@@ -42,6 +45,7 @@ public class Grouping {
|
|||||||
}
|
}
|
||||||
result = new Grouping(grouping.values());
|
result = new Grouping(grouping.values());
|
||||||
}
|
}
|
||||||
|
LOGGER.trace("grouped {} files by {}: {}", pdbFiles.size(), groupByField, result);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -60,4 +64,9 @@ public class Grouping {
|
|||||||
public Collection<Group> getGroups() {
|
public Collection<Group> getGroups() {
|
||||||
return groups;
|
return groups;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return String.valueOf(groups);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user