@@ -166,8 +166,6 @@ public final class CsvReaderSettings {
|
||||
|
||||
private String comment = "#";
|
||||
|
||||
private String indexId = "default";
|
||||
|
||||
public CsvReaderSettings() {
|
||||
this("@timestamp", "duration", ",", new ColumnDefinitions());
|
||||
}
|
||||
@@ -236,14 +234,6 @@ public final class CsvReaderSettings {
|
||||
return bytes[0];
|
||||
}
|
||||
|
||||
public void setIndexId(final String indexId) {
|
||||
this.indexId = indexId;
|
||||
}
|
||||
|
||||
public String getIndexId() {
|
||||
return indexId;
|
||||
}
|
||||
|
||||
public void putAdditionalTag(final String field, final String value) {
|
||||
additionalTags.put(field, value);
|
||||
}
|
||||
@@ -263,4 +253,5 @@ public final class CsvReaderSettings {
|
||||
public void setColumnDefinitions(final ColumnDefinitions columnDefinitions) {
|
||||
this.columnDefinitions = columnDefinitions;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -14,12 +14,11 @@ import java.util.function.Function;
|
||||
import org.lucares.collections.IntList;
|
||||
import org.lucares.pdb.api.Tags;
|
||||
import org.lucares.pdb.api.TagsBuilder;
|
||||
import org.lucares.pdb.datastore.Entries;
|
||||
import org.lucares.pdb.datastore.Entry;
|
||||
import org.lucares.pdb.datastore.PdbIndexId;
|
||||
import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions;
|
||||
import org.lucares.pdbui.CsvReaderSettings.PostProcessors;
|
||||
import org.lucares.pdbui.date.FastISODateParser;
|
||||
import org.lucares.performance.db.Entries;
|
||||
import org.lucares.utils.CollectionUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -44,13 +43,11 @@ class CsvToEntryTransformer {
|
||||
|
||||
void readCSV(final InputStream in) throws IOException, InterruptedException, TimeoutException {
|
||||
final int chunksize = 1000;
|
||||
PdbIndexId indexId = new PdbIndexId(settings.getIndexId());
|
||||
Entries entries = new Entries(indexId, chunksize);
|
||||
Entries entries = new Entries(chunksize);
|
||||
|
||||
final byte newline = '\n';
|
||||
final byte separator = settings.separatorByte();
|
||||
final byte comment = settings.commentByte();
|
||||
final byte indexIdLinePrefix = 0x01; // Start of Heading (ASCII)
|
||||
final byte[] line = new byte[64 * 1024]; // max line length
|
||||
int offsetInLine = 0;
|
||||
int offsetInBuffer = 0;
|
||||
@@ -76,22 +73,18 @@ class CsvToEntryTransformer {
|
||||
bytesInLine = offsetInLine + length;
|
||||
separatorPositions.add(offsetInLine + i - offsetInBuffer);
|
||||
|
||||
if (line[0] == indexIdLinePrefix) {
|
||||
queue.put(entries);
|
||||
indexId = new PdbIndexId(new String(line, 1, bytesInLine - 1, StandardCharsets.UTF_8));
|
||||
entries = new Entries(indexId, chunksize);
|
||||
} else if (line[0] == comment) {
|
||||
if (line[0] == comment) {
|
||||
// ignore
|
||||
} else if (compressedHeaders != null) {
|
||||
|
||||
final Entry entry = handleCsvLine(line, bytesInLine, separatorPositions, keyTimestamp,
|
||||
keyDuration, dateParser, additionalTags, indexId);
|
||||
keyDuration, dateParser, additionalTags);
|
||||
if (entry != null) {
|
||||
entries.add(entry);
|
||||
}
|
||||
if (entries.size() >= chunksize) {
|
||||
queue.put(entries);
|
||||
entries = new Entries(indexId, chunksize);
|
||||
entries = new Entries(chunksize);
|
||||
}
|
||||
} else {
|
||||
handleCsvHeaderLine(line, bytesInLine, separatorPositions);
|
||||
@@ -115,7 +108,7 @@ class CsvToEntryTransformer {
|
||||
}
|
||||
}
|
||||
final Entry entry = handleCsvLine(line, bytesInLine, separatorPositions, keyTimestamp, keyDuration, dateParser,
|
||||
additionalTags, indexId);
|
||||
additionalTags);
|
||||
if (entry != null) {
|
||||
entries.add(entry);
|
||||
}
|
||||
@@ -173,7 +166,7 @@ class CsvToEntryTransformer {
|
||||
|
||||
private Entry handleCsvLine(final byte[] line, final int bytesInLine, final IntList separatorPositions,
|
||||
final int keyTimestamp, final int keyDuration, final FastISODateParser dateParser,
|
||||
final Tags additionalTags, final PdbIndexId indexId) {
|
||||
final Tags additionalTags) {
|
||||
try {
|
||||
final int[] columns = compressedHeaders;
|
||||
if (separatorPositions.size() != columns.length) {
|
||||
|
||||
@@ -12,7 +12,7 @@ import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.lucares.performance.db.Entries;
|
||||
import org.lucares.pdb.datastore.Entries;
|
||||
import org.lucares.performance.db.PerformanceDb;
|
||||
import org.lucares.utils.file.FileUtils;
|
||||
import org.slf4j.Logger;
|
||||
@@ -50,7 +50,6 @@ public class CsvUploadHandler implements PropertyKeys, DisposableBean {
|
||||
synchronized (this) {
|
||||
final CsvToEntryTransformer csvToEntryTransformer = new CsvToEntryTransformer(queue, settings);
|
||||
try (InputStream in = file.getInputStream()) {
|
||||
|
||||
csvToEntryTransformer.readCSV(in);
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("csv ingestion failed", e);
|
||||
|
||||
@@ -0,0 +1,127 @@
|
||||
package org.lucares.pdbui;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ArrayBlockingQueue;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.lucares.pdb.api.Tags;
|
||||
import org.lucares.pdb.api.TagsBuilder;
|
||||
import org.lucares.pdb.datastore.Entries;
|
||||
import org.lucares.pdb.datastore.Entry;
|
||||
import org.lucares.performance.db.PdbExport;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
*
|
||||
* File format goals: Minimal size/ minimal repetition while also providing a
|
||||
* file format that can be used for "normal" ingestion, not just backup/restore.
|
||||
* It should be easy to implement in any language. It should be easy to debug.
|
||||
* <p>
|
||||
* Note: Line breaks are written as {@code \n}.
|
||||
*
|
||||
* <pre>
|
||||
* # // # is the magic byte for the file format used to detect this format
|
||||
* $123:key1=value1,key2=value2\n // $ marks the beginning of a dictionary entry that says: the following number will be used to refer to the following tags.
|
||||
* // In this case the tags key1=value1,key2=value2 will be identified by 123.
|
||||
* // The newline is used as an end marker.
|
||||
* 1534567890,456,123\n // Defines an entry with timestamp 1534567890, duration 456 and tags key1=value1,key2=value2.
|
||||
* 1,789,123\n // Timestamps are encoded using delta encoding. That means this triple defines
|
||||
* // an entry with timestamp 1534567891, duration 789 and tags key1=value1,key2=value2
|
||||
* -2,135,123\n // Timestamp delta encoding can contain negative numbers. This triple defines an entry
|
||||
* // with timestamp 1534567889, duration 135 and tags key1=value1,key2=value2
|
||||
* </pre>
|
||||
*/
|
||||
|
||||
public class CustomExportFormatToEntryTransformer {
|
||||
|
||||
private static final int ENTRY_BUFFER_SIZE = 100;
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(CustomExportFormatToEntryTransformer.class);
|
||||
|
||||
private final Pattern splitByComma = Pattern.compile(",");
|
||||
|
||||
private final Map<Long, Tags> tagsDictionary = new HashMap<>();
|
||||
|
||||
private long lastEpochMilli;
|
||||
|
||||
public void read(final BufferedReader in, final ArrayBlockingQueue<Entries> queue) throws IOException {
|
||||
|
||||
Entries bufferedEntries = new Entries(ENTRY_BUFFER_SIZE);
|
||||
|
||||
try {
|
||||
String line;
|
||||
while ((line = in.readLine()) != null) {
|
||||
try {
|
||||
if (line.startsWith(PdbExport.MARKER_DICT_ENTRY)) {
|
||||
readDictionaryEntry(line);
|
||||
} else {
|
||||
final Entry entry = readEntry(line);
|
||||
if (entry != null) {
|
||||
|
||||
bufferedEntries.add(entry);
|
||||
|
||||
if (bufferedEntries.size() == ENTRY_BUFFER_SIZE) {
|
||||
queue.put(bufferedEntries);
|
||||
bufferedEntries = new Entries(ENTRY_BUFFER_SIZE);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("ignoring line '{}'", line, e);
|
||||
}
|
||||
queue.put(bufferedEntries);
|
||||
bufferedEntries = new Entries(ENTRY_BUFFER_SIZE);
|
||||
}
|
||||
} catch (final InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
LOGGER.info("aborting because of interruption");
|
||||
}
|
||||
}
|
||||
|
||||
private Entry readEntry(final String line) {
|
||||
|
||||
final String[] timeValueTags = splitByComma.split(line);
|
||||
|
||||
final long timeDelta = Long.parseLong(timeValueTags[0]);
|
||||
final long value = Long.parseLong(timeValueTags[1]);
|
||||
final long tagsId = Long.parseLong(timeValueTags[2]);
|
||||
|
||||
lastEpochMilli = lastEpochMilli + timeDelta;
|
||||
|
||||
final Tags tags = tagsDictionary.get(tagsId);
|
||||
if (tags == null) {
|
||||
LOGGER.info("no tags available for tagsId {}. Ignoring line '{}'", tagsId, line);
|
||||
return null;
|
||||
}
|
||||
|
||||
return new Entry(lastEpochMilli, value, tags);
|
||||
}
|
||||
|
||||
private void readDictionaryEntry(final String line) {
|
||||
final String[] tagsIdToSerializedTags = line.split(Pattern.quote(PdbExport.SEPARATOR_TAG_ID));
|
||||
|
||||
final Long tagId = Long.parseLong(tagsIdToSerializedTags[0], 1, tagsIdToSerializedTags[0].length(), 10);
|
||||
final Tags tags = tagsFromCsv(tagsIdToSerializedTags[1]);
|
||||
tagsDictionary.put(tagId, tags);
|
||||
}
|
||||
|
||||
public static Tags tagsFromCsv(final String line) {
|
||||
|
||||
final TagsBuilder tagsBuilder = new TagsBuilder();
|
||||
final String[] tagsAsString = line.split(Pattern.quote(","));
|
||||
|
||||
for (final String tagAsString : tagsAsString) {
|
||||
final String[] keyValue = tagAsString.split(Pattern.quote("="));
|
||||
|
||||
final int key = Tags.STRING_COMPRESSOR.put(keyValue[0]);
|
||||
final int value = Tags.STRING_COMPRESSOR.put(keyValue[1]);
|
||||
tagsBuilder.add(key, value);
|
||||
}
|
||||
|
||||
return tagsBuilder.build();
|
||||
}
|
||||
}
|
||||
@@ -1,18 +1,26 @@
|
||||
package org.lucares.pdbui;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.PrintWriter;
|
||||
import java.net.Socket;
|
||||
import java.net.SocketAddress;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.ArrayBlockingQueue;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
|
||||
import org.lucares.pdb.datastore.Entries;
|
||||
import org.lucares.pdb.datastore.Entry;
|
||||
import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions;
|
||||
import org.lucares.performance.db.Entries;
|
||||
import org.lucares.performance.db.PdbExport;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
|
||||
public final class IngestionHandler implements Callable<Void> {
|
||||
|
||||
@@ -47,7 +55,12 @@ public final class IngestionHandler implements Callable<Void> {
|
||||
private void handleInputStream(final InputStream in) throws IOException, InterruptedException, TimeoutException {
|
||||
in.mark(1);
|
||||
final byte firstByte = (byte) in.read();
|
||||
if (isGZIP(firstByte)) {
|
||||
if (firstByte == '{') {
|
||||
in.reset();
|
||||
readJSON(in);
|
||||
} else if (firstByte == PdbExport.MAGIC_BYTE) {
|
||||
readCustomExportFormat(in);
|
||||
} else if (isGZIP(firstByte)) {
|
||||
in.reset();
|
||||
final GZIPInputStream gzip = new GZIPInputStream(in);
|
||||
|
||||
@@ -66,4 +79,50 @@ public final class IngestionHandler implements Callable<Void> {
|
||||
// I am cheap and only check the first byte
|
||||
return firstByte == 0x1f;
|
||||
}
|
||||
|
||||
private void readCustomExportFormat(final InputStream in) throws IOException {
|
||||
|
||||
final CustomExportFormatToEntryTransformer transformer = new CustomExportFormatToEntryTransformer();
|
||||
|
||||
final BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
|
||||
transformer.read(reader, queue);
|
||||
|
||||
}
|
||||
|
||||
private void readJSON(final InputStream in) throws IOException, InterruptedException {
|
||||
final int chunksize = 100;
|
||||
Entries entries = new Entries(chunksize);
|
||||
|
||||
final BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
|
||||
|
||||
String line = reader.readLine();
|
||||
|
||||
final JsonToEntryTransformer transformer = new JsonToEntryTransformer();
|
||||
final Optional<Entry> firstEntry = transformer.toEntry(line);
|
||||
if (firstEntry.isPresent()) {
|
||||
TcpIngestor.LOGGER.debug("adding entry to queue: {}", firstEntry);
|
||||
entries.add(firstEntry.get());
|
||||
}
|
||||
|
||||
while ((line = reader.readLine()) != null) {
|
||||
|
||||
try {
|
||||
final Optional<Entry> entry = transformer.toEntry(line);
|
||||
|
||||
if (entry.isPresent()) {
|
||||
TcpIngestor.LOGGER.debug("adding entry to queue: {}", entry);
|
||||
entries.add(entry.get());
|
||||
}
|
||||
} catch (final JsonParseException e) {
|
||||
TcpIngestor.LOGGER.info("json parse error in line '" + line + "'", e);
|
||||
}
|
||||
|
||||
if (entries.size() == chunksize) {
|
||||
queue.put(entries);
|
||||
entries = new Entries(chunksize);
|
||||
}
|
||||
}
|
||||
queue.put(entries);
|
||||
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,97 @@
|
||||
package org.lucares.pdbui;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.lucares.pdb.api.Tags;
|
||||
import org.lucares.pdb.api.TagsBuilder;
|
||||
import org.lucares.pdb.datastore.Entry;
|
||||
import org.lucares.pdbui.date.FastISODateParser;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.ObjectReader;
|
||||
|
||||
public class JsonToEntryTransformer implements LineToEntryTransformer {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(JsonToEntryTransformer.class);
|
||||
|
||||
private final TypeReference<Map<String, Object>> typeReferenceForMap = new TypeReference<Map<String, Object>>() {
|
||||
};
|
||||
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
private final ObjectReader objectReader = objectMapper.readerFor(typeReferenceForMap);
|
||||
private final FastISODateParser fastISODateParser = new FastISODateParser();
|
||||
|
||||
@Override
|
||||
public Optional<Entry> toEntry(final String line) throws IOException {
|
||||
|
||||
final Map<String, Object> object = objectReader.readValue(line);
|
||||
|
||||
final Optional<Entry> entry = createEntry(object);
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
public Optional<Entry> createEntry(final Map<String, Object> map) {
|
||||
try {
|
||||
|
||||
if (map.containsKey("duration") && map.containsKey("@timestamp")) {
|
||||
final long epochMilli = getDate(map);
|
||||
final long duration = (int) map.get("duration");
|
||||
|
||||
final Tags tags = createTags(map);
|
||||
|
||||
final Entry entry = new Entry(epochMilli, duration, tags);
|
||||
return Optional.of(entry);
|
||||
} else {
|
||||
LOGGER.info("Skipping invalid entry: " + map);
|
||||
return Optional.empty();
|
||||
}
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("Failed to create entry from map: " + map, e);
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
private Tags createTags(final Map<String, Object> map) {
|
||||
final TagsBuilder tags = TagsBuilder.create();
|
||||
for (final java.util.Map.Entry<String, Object> e : map.entrySet()) {
|
||||
|
||||
final String key = e.getKey();
|
||||
final Object value = e.getValue();
|
||||
|
||||
switch (key) {
|
||||
case "@timestamp":
|
||||
case "duration":
|
||||
// these fields are not tags
|
||||
break;
|
||||
case "tags":
|
||||
// ignore: we only support key/value tags
|
||||
break;
|
||||
default:
|
||||
final int keyAsInt = Tags.STRING_COMPRESSOR.put(key);
|
||||
final int valueAsInt;
|
||||
if (value instanceof String) {
|
||||
valueAsInt = Tags.STRING_COMPRESSOR.put((String) value);
|
||||
} else if (value != null) {
|
||||
valueAsInt = Tags.STRING_COMPRESSOR.put(String.valueOf(value));
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
tags.add(keyAsInt, valueAsInt);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return tags.build();
|
||||
}
|
||||
|
||||
private long getDate(final Map<String, Object> map) {
|
||||
final String timestamp = (String) map.get("@timestamp");
|
||||
|
||||
return fastISODateParser.parseAsEpochMilli(timestamp);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -15,22 +15,16 @@ import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import javax.websocket.server.PathParam;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.lucares.pdb.api.DateTimeRange;
|
||||
import org.lucares.pdb.api.QueryWithCaretMarker;
|
||||
import org.lucares.pdb.api.QueryWithCaretMarker.ResultMode;
|
||||
import org.lucares.pdb.datastore.PdbIndex;
|
||||
import org.lucares.pdb.datastore.PdbIndexId;
|
||||
import org.lucares.pdb.datastore.Proposal;
|
||||
import org.lucares.pdb.plot.api.PlotSettings;
|
||||
import org.lucares.pdbui.domain.AutocompleteProposal;
|
||||
import org.lucares.pdbui.domain.AutocompleteProposalByValue;
|
||||
import org.lucares.pdbui.domain.AutocompleteResponse;
|
||||
import org.lucares.pdbui.domain.FilterDefaults;
|
||||
import org.lucares.pdbui.domain.Index;
|
||||
import org.lucares.pdbui.domain.IndexesResponse;
|
||||
import org.lucares.pdbui.domain.PlotRequest;
|
||||
import org.lucares.pdbui.domain.PlotResponse;
|
||||
import org.lucares.pdbui.domain.PlotResponseStats;
|
||||
@@ -90,39 +84,16 @@ public class PdbController implements HardcodedValues, PropertyKeys {
|
||||
this.csvUploadHandler = csvUploadHandler;
|
||||
}
|
||||
|
||||
@RequestMapping(path = "/indexes", //
|
||||
@RequestMapping(path = "/plots", //
|
||||
method = RequestMethod.POST, //
|
||||
consumes = MediaType.APPLICATION_JSON_VALUE, //
|
||||
produces = MediaType.APPLICATION_JSON_VALUE //
|
||||
)
|
||||
@ResponseBody
|
||||
public IndexesResponse getIndexes() {
|
||||
final List<Index> indexes = new ArrayList<>();
|
||||
ResponseEntity<PlotResponse> createPlot(@RequestBody final PlotRequest request)
|
||||
throws InternalPlottingException, InterruptedException {
|
||||
|
||||
final List<PdbIndex> availableIndexes = db.getIndexes();
|
||||
for (final PdbIndex pdbIndex : availableIndexes) {
|
||||
|
||||
final String id = pdbIndex.getId().getId();
|
||||
final String name = pdbIndex.getName();
|
||||
final String description = pdbIndex.getDescription();
|
||||
|
||||
indexes.add(new Index(id, name, description));
|
||||
}
|
||||
|
||||
final IndexesResponse result = new IndexesResponse(indexes);
|
||||
return result;
|
||||
}
|
||||
|
||||
@RequestMapping(path = "/indexes/{index}/plots", //
|
||||
method = RequestMethod.POST, //
|
||||
consumes = MediaType.APPLICATION_JSON_VALUE, //
|
||||
produces = MediaType.APPLICATION_JSON_VALUE //
|
||||
)
|
||||
@ResponseBody
|
||||
ResponseEntity<PlotResponse> createPlot(@PathVariable("index") final String index,
|
||||
@RequestBody final PlotRequest request) throws InternalPlottingException, InterruptedException {
|
||||
|
||||
final PlotSettings plotSettings = PlotSettingsTransformer.toSettings(index, request);
|
||||
final PlotSettings plotSettings = PlotSettingsTransformer.toSettings(request);
|
||||
if (StringUtils.isBlank(plotSettings.getQuery())) {
|
||||
throw new BadRequest("The query must not be empty!");
|
||||
}
|
||||
@@ -213,20 +184,19 @@ public class PdbController implements HardcodedValues, PropertyKeys {
|
||||
* } else { throw new
|
||||
* ServiceUnavailableException("Too many parallel requests!"); } }; }
|
||||
*/
|
||||
@RequestMapping(path = "/indexes/{index}/autocomplete", //
|
||||
@RequestMapping(path = "/autocomplete", //
|
||||
method = RequestMethod.GET, //
|
||||
produces = MediaType.APPLICATION_JSON_VALUE //
|
||||
)
|
||||
@ResponseBody
|
||||
AutocompleteResponse autocomplete(@PathParam("index") final String index,
|
||||
@RequestParam(name = "query") final String query, @RequestParam(name = "caretIndex") final int caretIndex,
|
||||
AutocompleteResponse autocomplete(@RequestParam(name = "query") final String query,
|
||||
@RequestParam(name = "caretIndex") final int caretIndex,
|
||||
@RequestParam(name = "resultMode", defaultValue = "CUT_AT_DOT") final ResultMode resultMode) {
|
||||
|
||||
// TODO get date range from UI
|
||||
final DateTimeRange dateRange = DateTimeRange.max();
|
||||
final int zeroBasedCaretIndex = caretIndex - 1;
|
||||
final QueryWithCaretMarker q = new QueryWithCaretMarker(query, dateRange, zeroBasedCaretIndex, resultMode,
|
||||
index);
|
||||
final QueryWithCaretMarker q = new QueryWithCaretMarker(query, dateRange, zeroBasedCaretIndex, resultMode);
|
||||
|
||||
final AutocompleteResponse result = new AutocompleteResponse();
|
||||
|
||||
@@ -257,29 +227,28 @@ public class PdbController implements HardcodedValues, PropertyKeys {
|
||||
return result;
|
||||
}
|
||||
|
||||
@RequestMapping(path = "/indexes/{index}/fields", //
|
||||
@RequestMapping(path = "/fields", //
|
||||
method = RequestMethod.GET, //
|
||||
// consumes = MediaType.APPLICATION_JSON_UTF8_VALUE, //
|
||||
produces = MediaType.APPLICATION_JSON_VALUE //
|
||||
)
|
||||
@ResponseBody
|
||||
List<String> fields(@PathVariable("index") final String index) {
|
||||
List<String> fields() {
|
||||
final DateTimeRange dateTimeRange = DateTimeRange.max();
|
||||
final List<String> fields = db.getFields(dateTimeRange, new PdbIndexId(index));
|
||||
final List<String> fields = db.getFields(dateTimeRange);
|
||||
|
||||
fields.sort(Collator.getInstance(Locale.ENGLISH));
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
@RequestMapping(path = "/indexes/{index}/fields/{fieldName}/values", //
|
||||
@RequestMapping(path = "/fields/{fieldName}/values", //
|
||||
method = RequestMethod.GET, //
|
||||
consumes = MediaType.APPLICATION_JSON_VALUE, //
|
||||
produces = MediaType.APPLICATION_JSON_VALUE //
|
||||
)
|
||||
@ResponseBody
|
||||
SortedSet<String> fields(@PathVariable("index") final String index,
|
||||
@PathVariable(name = "fieldName") final String fieldName,
|
||||
SortedSet<String> fields(@PathVariable(name = "fieldName") final String fieldName,
|
||||
@RequestParam(name = "query") final String query) {
|
||||
|
||||
// TODO get date range from UI
|
||||
@@ -289,7 +258,7 @@ public class PdbController implements HardcodedValues, PropertyKeys {
|
||||
final int zeroBasedCaretIndex = q.length();
|
||||
final DateTimeRange dateRange = DateTimeRange.max();
|
||||
final QueryWithCaretMarker autocompleteQuery = new QueryWithCaretMarker(q, dateRange, zeroBasedCaretIndex,
|
||||
ResultMode.FULL_VALUES, index);
|
||||
ResultMode.FULL_VALUES);
|
||||
|
||||
final List<Proposal> result = db.autocomplete(autocompleteQuery);
|
||||
|
||||
@@ -298,14 +267,14 @@ public class PdbController implements HardcodedValues, PropertyKeys {
|
||||
return fields;
|
||||
}
|
||||
|
||||
@RequestMapping(path = "/indexes/{index}/filters/defaults", //
|
||||
@RequestMapping(path = "/filters/defaults", //
|
||||
method = RequestMethod.GET, //
|
||||
produces = MediaType.APPLICATION_JSON_VALUE //
|
||||
)
|
||||
@ResponseBody
|
||||
public FilterDefaults getFilterDefaults(@PathVariable("index") final String index) {
|
||||
public FilterDefaults getFilterDefaults() {
|
||||
final Set<String> groupBy = defaultsGroupBy.isBlank() ? Set.of() : Set.of(defaultsGroupBy.split("\\s*,\\s*"));
|
||||
final List<String> fields = fields(index);
|
||||
final List<String> fields = fields();
|
||||
return new FilterDefaults(fields, groupBy, defaultsSplitBy);
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ package org.lucares.pdbui;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.lucares.pdb.datastore.PdbIndexId;
|
||||
import org.lucares.pdb.plot.api.Aggregate;
|
||||
import org.lucares.pdb.plot.api.AggregateHandlerCollection;
|
||||
import org.lucares.pdb.plot.api.BarChartHandler;
|
||||
@@ -16,12 +15,11 @@ import org.lucares.pdb.plot.api.YAxisDefinition;
|
||||
import org.lucares.pdbui.domain.PlotRequest;
|
||||
|
||||
class PlotSettingsTransformer {
|
||||
static PlotSettings toSettings(final String index, final PlotRequest request) {
|
||||
static PlotSettings toSettings(final PlotRequest request) {
|
||||
|
||||
final PlotSettings result = new PlotSettings();
|
||||
|
||||
result.setQuery(request.getQuery());
|
||||
result.setIndex(new PdbIndexId(index));
|
||||
result.setGroupBy(request.getGroupBy());
|
||||
result.setHeight(request.getHeight());
|
||||
result.setWidth(request.getWidth());
|
||||
|
||||
@@ -15,7 +15,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import javax.annotation.PreDestroy;
|
||||
|
||||
import org.lucares.performance.db.Entries;
|
||||
import org.lucares.pdb.datastore.Entries;
|
||||
import org.lucares.performance.db.PerformanceDb;
|
||||
import org.lucares.recommind.logs.Config;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
package org.lucares.pdbui.domain;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class Index {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
|
||||
public Index() {
|
||||
super();
|
||||
}
|
||||
|
||||
public Index(final String id, final String name, final String description) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(final String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public void setName(final String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setDescription(final String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(id, description, name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
final Index other = (Index) obj;
|
||||
return Objects.equals(id, other.id) && Objects.equals(description, other.description)
|
||||
&& Objects.equals(name, other.name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
final StringBuilder builder = new StringBuilder();
|
||||
builder.append("Index [id=");
|
||||
builder.append(id);
|
||||
builder.append(", name=");
|
||||
builder.append(name);
|
||||
builder.append(", description=");
|
||||
builder.append(description);
|
||||
builder.append("]");
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
package org.lucares.pdbui.domain;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class IndexesResponse {
|
||||
private List<Index> indexes;
|
||||
|
||||
public IndexesResponse(final List<Index> indexes) {
|
||||
super();
|
||||
this.indexes = indexes;
|
||||
}
|
||||
|
||||
public void setIndexes(final List<Index> indexes) {
|
||||
this.indexes = indexes;
|
||||
}
|
||||
|
||||
public List<Index> getIndexes() {
|
||||
return indexes;
|
||||
}
|
||||
}
|
||||
@@ -19,9 +19,8 @@ import org.junit.jupiter.api.Test;
|
||||
import org.lucares.collections.LongList;
|
||||
import org.lucares.pdb.api.DateTimeRange;
|
||||
import org.lucares.pdb.api.Query;
|
||||
import org.lucares.pdb.datastore.PdbIndexId;
|
||||
import org.lucares.pdb.datastore.Entries;
|
||||
import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions;
|
||||
import org.lucares.performance.db.Entries;
|
||||
import org.lucares.performance.db.PerformanceDb;
|
||||
import org.lucares.utils.file.FileUtils;
|
||||
|
||||
@@ -44,14 +43,9 @@ public class CsvToEntryTransformerTest {
|
||||
final OffsetDateTime dateA = OffsetDateTime.now();
|
||||
final OffsetDateTime dateB = OffsetDateTime.now();
|
||||
|
||||
final String index = "test";
|
||||
final PdbIndexId indexId = new PdbIndexId(index);
|
||||
try (final PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
|
||||
db.createIndex(indexId, "test", "");
|
||||
|
||||
final String csv = "\u0001" + index + "\n" //
|
||||
+ "@timestamp,duration,tag\n"//
|
||||
final String csv = "@timestamp,duration,tag\n"//
|
||||
+ dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + ",1,tagValue\n"//
|
||||
+ dateB.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + ",2,tagValue\n";
|
||||
|
||||
@@ -64,8 +58,7 @@ public class CsvToEntryTransformerTest {
|
||||
}
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("tag=tagValue", DateTimeRange.max(), indexId.getId()))
|
||||
.singleGroup().flatMap();
|
||||
final LongList result = db.get(new Query("tag=tagValue", DateTimeRange.max())).singleGroup().flatMap();
|
||||
Assertions.assertEquals(result.size(), 4);
|
||||
|
||||
Assertions.assertEquals(result.get(0), dateA.toInstant().toEpochMilli());
|
||||
@@ -90,13 +83,9 @@ public class CsvToEntryTransformerTest {
|
||||
@Test
|
||||
public void testIgnoreColumns() throws IOException, InterruptedException, TimeoutException {
|
||||
|
||||
final String index = "test";
|
||||
final PdbIndexId indexId = new PdbIndexId(index);
|
||||
try (final PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
db.createIndex(indexId, "test", "");
|
||||
|
||||
final String csv = "\u0001" + index + "\n"//
|
||||
+ "@timestamp,duration,ignoredColumn,-otherIgnoredColumn,tag\n"//
|
||||
final String csv = "@timestamp,duration,ignoredColumn,-otherIgnoredColumn,tag\n"//
|
||||
+ "2000-01-01T00:00:00.000Z,1,ignoreValue,ignoreValue,tagValue\n"//
|
||||
+ "2000-01-01T00:00:00.001Z,2,ignoreValue,ignoreValue,tagValue\n";
|
||||
|
||||
@@ -111,7 +100,7 @@ public class CsvToEntryTransformerTest {
|
||||
}
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final List<String> availableFields = db.getFields(DateTimeRange.max(), indexId);
|
||||
final List<String> availableFields = db.getFields(DateTimeRange.max());
|
||||
Assertions.assertEquals(List.of("tag").toString(), availableFields.toString(),
|
||||
"the ignored field is not returned");
|
||||
}
|
||||
|
||||
@@ -14,7 +14,6 @@ import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.lucares.collections.LongList;
|
||||
import org.lucares.pdb.api.DateTimeRange;
|
||||
import org.lucares.pdb.api.Query;
|
||||
import org.lucares.pdb.datastore.PdbIndexId;
|
||||
import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions;
|
||||
import org.lucares.pdbui.CsvReaderSettings.PostProcessors;
|
||||
import org.lucares.performance.db.PerformanceDb;
|
||||
@@ -51,9 +50,6 @@ public class PdbControllerTest {
|
||||
@Test
|
||||
public void testUploadCsv() throws InterruptedException {
|
||||
|
||||
final PdbIndexId indexId = new PdbIndexId("test");
|
||||
performanceDb.createIndex(indexId, "test", "");
|
||||
|
||||
final String additionalColumn = "additionalColumn";
|
||||
final String additionalValue = "additionalValue";
|
||||
final String ignoredColumn = "ignoredColumn";
|
||||
@@ -63,7 +59,6 @@ public class PdbControllerTest {
|
||||
final OffsetDateTime dateB = OffsetDateTime.now();
|
||||
|
||||
final String csv = "# first line is a comment\n"//
|
||||
+ "\u0001" + indexId.getId() + "\n"//
|
||||
+ timeColumn + "," + valueColumn + ",tag," + ignoredColumn + "\n"//
|
||||
+ dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + ",1,tagVALUE,ignoredValue\n"//
|
||||
+ dateB.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + ",2,TAGvalue,ignoredValue\n";
|
||||
@@ -75,12 +70,11 @@ public class PdbControllerTest {
|
||||
settings.putAdditionalTag(additionalColumn, additionalValue);
|
||||
uploadCsv(settings, csv);
|
||||
{
|
||||
final LongList resultTagValue = performanceDb
|
||||
.get(new Query("tag=tagvalue", DateTimeRange.ofDay(dateA), indexId.getId())).singleGroup()
|
||||
.flatMap();
|
||||
final LongList resultAdditionalValue = performanceDb.get(
|
||||
new Query(additionalColumn + "=" + additionalValue, DateTimeRange.ofDay(dateA), indexId.getId()))
|
||||
final LongList resultTagValue = performanceDb.get(new Query("tag=tagvalue", DateTimeRange.ofDay(dateA)))
|
||||
.singleGroup().flatMap();
|
||||
final LongList resultAdditionalValue = performanceDb
|
||||
.get(new Query(additionalColumn + "=" + additionalValue, DateTimeRange.ofDay(dateA))).singleGroup()
|
||||
.flatMap();
|
||||
System.out.println(PdbTestUtil.timeValueLongListToString(resultTagValue));
|
||||
|
||||
Assertions.assertEquals(resultTagValue, resultAdditionalValue,
|
||||
@@ -96,7 +90,7 @@ public class PdbControllerTest {
|
||||
Assertions.assertEquals(2, resultTagValue.get(3));
|
||||
}
|
||||
{
|
||||
final List<String> fields = performanceDb.getFields(DateTimeRange.max(), indexId);
|
||||
final List<String> fields = performanceDb.getFields(DateTimeRange.max());
|
||||
Assertions.assertTrue(!fields.contains(ignoredColumn), "ignoredColumn not in fields. fields: " + fields);
|
||||
Assertions.assertTrue(fields.contains(additionalColumn),
|
||||
additionalColumn + " expected in fields. Fields were: " + fields);
|
||||
|
||||
@@ -20,39 +20,55 @@ import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.LinkedBlockingDeque;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.lucares.collections.LongList;
|
||||
import org.lucares.pdb.datastore.PdbIndexId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
public class PdbTestUtil {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(PdbTestUtil.class);
|
||||
|
||||
static final Map<String, Object> POISON = new HashMap<>();
|
||||
|
||||
@SafeVarargs
|
||||
public static final void sendAsCsv(final PdbIndexId indexId, final int port, final Map<String, Object>... entries)
|
||||
public static final void send(final String format, final Collection<Map<String, Object>> entries, final int port)
|
||||
throws IOException, InterruptedException {
|
||||
sendAsCsv(indexId, Arrays.asList(entries), port);
|
||||
switch (format) {
|
||||
case "csv":
|
||||
sendAsCsv(entries, port);
|
||||
break;
|
||||
case "json":
|
||||
sendAsJson(entries, port);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("unhandled format: " + format);
|
||||
}
|
||||
}
|
||||
|
||||
public static final void sendAsCsv(final PdbIndexId indexId, final Collection<Map<String, Object>> entries,
|
||||
final int port) throws IOException, InterruptedException {
|
||||
@SafeVarargs
|
||||
public static final void sendAsCsv(final int port, final Map<String, Object>... entries)
|
||||
throws IOException, InterruptedException {
|
||||
sendAsCsv(Arrays.asList(entries), port);
|
||||
}
|
||||
|
||||
public static final void sendAsCsv(final Collection<Map<String, Object>> entries, final int port)
|
||||
throws IOException, InterruptedException {
|
||||
|
||||
final Set<String> keys = entries.stream().map(Map::keySet).flatMap(Set::stream).collect(Collectors.toSet());
|
||||
|
||||
sendAsCsv(indexId, keys, entries, port);
|
||||
sendAsCsv(keys, entries, port);
|
||||
}
|
||||
|
||||
public static final void sendAsCsv(final PdbIndexId indexId, final Collection<String> keys,
|
||||
final Collection<Map<String, Object>> entries, final int port) throws IOException, InterruptedException {
|
||||
public static final void sendAsCsv(final Collection<String> keys, final Collection<Map<String, Object>> entries,
|
||||
final int port) throws IOException, InterruptedException {
|
||||
|
||||
final StringBuilder csv = new StringBuilder();
|
||||
|
||||
csv.append("\u0001" + indexId.getId());
|
||||
csv.append(String.join(",", keys));
|
||||
csv.append("\n");
|
||||
|
||||
@@ -69,6 +85,48 @@ public class PdbTestUtil {
|
||||
send(csv.toString(), port);
|
||||
}
|
||||
|
||||
@SafeVarargs
|
||||
public static final void sendAsJson(final int port, final Map<String, Object>... entries)
|
||||
throws IOException, InterruptedException {
|
||||
|
||||
sendAsJson(Arrays.asList(entries), port);
|
||||
}
|
||||
|
||||
public static final void sendAsJson(final Collection<Map<String, Object>> entries, final int port)
|
||||
throws IOException, InterruptedException {
|
||||
final LinkedBlockingDeque<Map<String, Object>> queue = new LinkedBlockingDeque<>(entries);
|
||||
queue.put(POISON);
|
||||
sendAsJson(queue, port);
|
||||
}
|
||||
|
||||
public static final void sendAsJson(final BlockingQueue<Map<String, Object>> aEntriesSupplier, final int port)
|
||||
throws IOException {
|
||||
|
||||
final ObjectMapper mapper = new ObjectMapper();
|
||||
final SocketChannel channel = connect(port);
|
||||
|
||||
Map<String, Object> entry;
|
||||
while ((entry = aEntriesSupplier.poll()) != POISON) {
|
||||
|
||||
final StringBuilder streamData = new StringBuilder();
|
||||
streamData.append(mapper.writeValueAsString(entry));
|
||||
streamData.append("\n");
|
||||
|
||||
final ByteBuffer src = ByteBuffer.wrap(streamData.toString().getBytes(StandardCharsets.UTF_8));
|
||||
channel.write(src);
|
||||
}
|
||||
|
||||
try {
|
||||
// ugly workaround: the channel was closed too early and not all
|
||||
// data was received
|
||||
TimeUnit.MILLISECONDS.sleep(10);
|
||||
} catch (final InterruptedException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
channel.close();
|
||||
LOGGER.trace("closed sender connection");
|
||||
}
|
||||
|
||||
public static final void send(final String data, final int port) throws IOException {
|
||||
|
||||
final SocketChannel channel = connect(port);
|
||||
|
||||
@@ -20,10 +20,13 @@ import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.lucares.collections.LongList;
|
||||
import org.lucares.pdb.api.DateTimeRange;
|
||||
import org.lucares.pdb.api.Query;
|
||||
import org.lucares.pdb.datastore.PdbIndexId;
|
||||
import org.lucares.pdb.datastore.internal.DataStore;
|
||||
import org.lucares.performance.db.PdbExport;
|
||||
import org.lucares.performance.db.PerformanceDb;
|
||||
import org.lucares.utils.file.FileUtils;
|
||||
import org.slf4j.Logger;
|
||||
@@ -54,13 +57,10 @@ public class TcpIngestorTest {
|
||||
final OffsetDateTime dateB = OffsetDateTime.now();
|
||||
final String host = "someHost";
|
||||
|
||||
final PdbIndexId indexId = new PdbIndexId("test");
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
ingestor.useRandomPort();
|
||||
ingestor.start();
|
||||
|
||||
ingestor.getDb().createIndex(indexId, "test", "");
|
||||
|
||||
final Map<String, Object> entryA = new HashMap<>();
|
||||
entryA.put("duration", 1);
|
||||
entryA.put("@timestamp", dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
@@ -73,15 +73,15 @@ public class TcpIngestorTest {
|
||||
entryB.put("host", host);
|
||||
entryB.put("tags", Collections.emptyList());
|
||||
|
||||
PdbTestUtil.sendAsCsv(indexId, ingestor.getPort(), entryA, entryB);
|
||||
PdbTestUtil.sendAsJson(ingestor.getPort(), entryA, entryB);
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, DateTimeRange.ofDay(dateA), indexId.getId()))
|
||||
.singleGroup().flatMap();
|
||||
final LongList result = db.get(new Query("host=" + host, DateTimeRange.ofDay(dateA))).singleGroup()
|
||||
.flatMap();
|
||||
Assertions.assertEquals(4, result.size());
|
||||
|
||||
Assertions.assertEquals(dateA.toInstant().toEpochMilli(), result.get(0));
|
||||
@@ -92,6 +92,66 @@ public class TcpIngestorTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIngestDataViaTcpStream_CustomFormat() throws Exception {
|
||||
|
||||
final long dateA = Instant.now().toEpochMilli();
|
||||
final long dateB = Instant.now().toEpochMilli() + 1;
|
||||
final long dateC = Instant.now().toEpochMilli() - 1;
|
||||
final DateTimeRange dateRange = DateTimeRange.relativeMinutes(1);
|
||||
final String host = "someHost";
|
||||
|
||||
// 1. insert some data
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
ingestor.useRandomPort();
|
||||
ingestor.start();
|
||||
|
||||
final long deltaEpochMilliB = dateB - dateA;
|
||||
final long deltaEpochMilliC = dateC - dateB;
|
||||
|
||||
final String data = "#$0:host=someHost,pod=somePod\n"//
|
||||
+ dateA + ",1,0\n"// previous date is 0, therefore the delta is dateA / using tags with id 0
|
||||
+ "$1:host=someHost,pod=otherPod\n" //
|
||||
+ deltaEpochMilliB + ",2,1\n" // dates are the delta the the previous date / using tags with id 1
|
||||
+ deltaEpochMilliC + ",3,0"; // dates are the delta the the previous date / using tags with id 0
|
||||
|
||||
PdbTestUtil.send(data, ingestor.getPort());
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
// 2. export the data
|
||||
final List<Path> exportFiles = PdbExport.export(dataDirectory, dataDirectory.resolve("export"));
|
||||
|
||||
// 3. delete database
|
||||
FileUtils.delete(dataDirectory.resolve(DataStore.SUBDIR_STORAGE));
|
||||
|
||||
// 4. create a new database
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
ingestor.useRandomPort();
|
||||
ingestor.start();
|
||||
for (final Path exportFile : exportFiles) {
|
||||
PdbTestUtil.send(exportFile, ingestor.getPort());
|
||||
}
|
||||
}
|
||||
|
||||
// 5. check that the data is correctly inserted
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap();
|
||||
Assertions.assertEquals(6, result.size());
|
||||
|
||||
Assertions.assertEquals(dateA, result.get(0));
|
||||
Assertions.assertEquals(1, result.get(1));
|
||||
|
||||
Assertions.assertEquals(dateC, result.get(2));
|
||||
Assertions.assertEquals(3, result.get(3));
|
||||
|
||||
Assertions.assertEquals(dateB, result.get(4));
|
||||
Assertions.assertEquals(2, result.get(5));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIngestionThreadDoesNotDieOnErrors() throws Exception {
|
||||
final OffsetDateTime dateA = OffsetDateTime.now().minusMinutes(1);
|
||||
@@ -99,13 +159,10 @@ public class TcpIngestorTest {
|
||||
final DateTimeRange dateRange = new DateTimeRange(dateA, dateB);
|
||||
final String host = "someHost";
|
||||
|
||||
final PdbIndexId indexId = new PdbIndexId("test");
|
||||
try (TcpIngestor tcpIngestor = new TcpIngestor(dataDirectory)) {
|
||||
tcpIngestor.useRandomPort();
|
||||
tcpIngestor.start();
|
||||
|
||||
tcpIngestor.getDb().createIndex(indexId, "test", "");
|
||||
|
||||
// has a negative epoch time milli and negative value
|
||||
final Map<String, Object> entryA = new HashMap<>();
|
||||
entryA.put("duration", 1);
|
||||
@@ -135,8 +192,7 @@ public class TcpIngestorTest {
|
||||
}
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, dateRange, indexId.getId())).singleGroup()
|
||||
.flatMap();
|
||||
final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap();
|
||||
Assertions.assertEquals(4, result.size());
|
||||
|
||||
Assertions.assertEquals(dateA.toInstant().truncatedTo(ChronoUnit.MILLIS).toEpochMilli(), result.get(0));
|
||||
@@ -147,7 +203,9 @@ public class TcpIngestorTest {
|
||||
}
|
||||
}
|
||||
|
||||
public void testRandomOrder() throws Exception {
|
||||
@ParameterizedTest
|
||||
@ValueSource(strings = { "csv", "json" })
|
||||
public void testRandomOrder(final String format) throws Exception {
|
||||
|
||||
final ThreadLocalRandom rnd = ThreadLocalRandom.current();
|
||||
final String host = "someHost";
|
||||
@@ -157,13 +215,10 @@ public class TcpIngestorTest {
|
||||
|
||||
final LongList expected = new LongList();
|
||||
|
||||
final PdbIndexId indexId = new PdbIndexId("test");
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
ingestor.useRandomPort();
|
||||
ingestor.start();
|
||||
|
||||
ingestor.getDb().createIndex(indexId, "test", "");
|
||||
|
||||
final LinkedBlockingDeque<Map<String, Object>> queue = new LinkedBlockingDeque<>();
|
||||
|
||||
for (int i = 0; i < 103; i++) // use number of rows that is not a multiple of a page size
|
||||
@@ -183,15 +238,14 @@ public class TcpIngestorTest {
|
||||
expected.addAll(timestamp, duration);
|
||||
}
|
||||
|
||||
PdbTestUtil.sendAsCsv(indexId, queue, ingestor.getPort());
|
||||
PdbTestUtil.send(format, queue, ingestor.getPort());
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, dateRange, indexId.getId())).singleGroup()
|
||||
.flatMap();
|
||||
final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap();
|
||||
Assertions.assertEquals(LongPair.fromLongList(expected), LongPair.fromLongList(result));
|
||||
}
|
||||
}
|
||||
@@ -199,13 +253,10 @@ public class TcpIngestorTest {
|
||||
@Test
|
||||
public void testCsvIngestorIgnoresColumns() throws Exception {
|
||||
|
||||
final PdbIndexId indexId = new PdbIndexId("test");
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
ingestor.useRandomPort();
|
||||
ingestor.start();
|
||||
|
||||
ingestor.getDb().createIndex(indexId, "test", "");
|
||||
|
||||
final Map<String, Object> entry = new HashMap<>();
|
||||
entry.put("@timestamp",
|
||||
Instant.ofEpochMilli(1).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
@@ -213,14 +264,14 @@ public class TcpIngestorTest {
|
||||
entry.put("host", "someHost");
|
||||
entry.put(CsvToEntryTransformer.COLUM_IGNORE_PREFIX + "ignored", "ignoredValue");
|
||||
|
||||
PdbTestUtil.sendAsCsv(indexId, ingestor.getPort(), entry);
|
||||
PdbTestUtil.sendAsCsv(ingestor.getPort(), entry);
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final List<String> availableFields = db.getFields(DateTimeRange.max(), indexId);
|
||||
final List<String> availableFields = db.getFields(DateTimeRange.max());
|
||||
Assertions.assertEquals(List.of("host").toString(), availableFields.toString(),
|
||||
"the ignored field is not returned");
|
||||
}
|
||||
@@ -232,15 +283,10 @@ public class TcpIngestorTest {
|
||||
final String host = "someHost";
|
||||
final long value1 = 222;
|
||||
final long value2 = 1;
|
||||
|
||||
final PdbIndexId indexId = new PdbIndexId("test");
|
||||
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
ingestor.useRandomPort();
|
||||
ingestor.start();
|
||||
|
||||
ingestor.getDb().createIndex(indexId, "test", "");
|
||||
|
||||
final Map<String, Object> entry1 = new HashMap<>();
|
||||
entry1.put("@timestamp",
|
||||
Instant.ofEpochMilli(1).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
@@ -253,7 +299,7 @@ public class TcpIngestorTest {
|
||||
entry2.put("host", host);
|
||||
entry2.put("duration", value2);
|
||||
|
||||
PdbTestUtil.sendAsCsv(indexId, List.of("@timestamp", "host", "duration"), List.of(entry1, entry2),
|
||||
PdbTestUtil.sendAsCsv(List.of("@timestamp", "host", "duration"), List.of(entry1, entry2),
|
||||
ingestor.getPort());
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
@@ -261,8 +307,7 @@ public class TcpIngestorTest {
|
||||
}
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, DateTimeRange.max(), indexId.getId()))
|
||||
.singleGroup().flatMap();
|
||||
final LongList result = db.get(new Query("host=" + host, DateTimeRange.max())).singleGroup().flatMap();
|
||||
Assertions.assertEquals(4, result.size());
|
||||
|
||||
Assertions.assertEquals(value1, result.get(1));
|
||||
@@ -280,14 +325,10 @@ public class TcpIngestorTest {
|
||||
final OffsetDateTime dateNovember = OffsetDateTime.of(2019, 11, 30, 23, 59, 59, 999, ZoneOffset.UTC);
|
||||
final OffsetDateTime dateDecember = OffsetDateTime.of(2019, 12, 1, 0, 0, 0, 0, ZoneOffset.UTC);
|
||||
|
||||
final PdbIndexId indexId = new PdbIndexId("test");
|
||||
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
ingestor.useRandomPort();
|
||||
ingestor.start();
|
||||
|
||||
ingestor.getDb().createIndex(indexId, "test", "");
|
||||
|
||||
final Map<String, Object> entry1 = new HashMap<>();
|
||||
entry1.put("@timestamp", dateNovember.format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entry1.put("host", host);
|
||||
@@ -298,7 +339,7 @@ public class TcpIngestorTest {
|
||||
entry2.put("host", host);
|
||||
entry2.put("duration", value2);
|
||||
|
||||
PdbTestUtil.sendAsCsv(indexId, List.of("@timestamp", "host", "duration"), List.of(entry1, entry2),
|
||||
PdbTestUtil.sendAsCsv(List.of("@timestamp", "host", "duration"), List.of(entry1, entry2),
|
||||
ingestor.getPort());
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
@@ -307,15 +348,13 @@ public class TcpIngestorTest {
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final DateTimeRange rangeNovember = new DateTimeRange(dateNovember, dateNovember);
|
||||
final LongList resultNovember = db.get(new Query("host=" + host, rangeNovember, indexId.getId()))
|
||||
.singleGroup().flatMap();
|
||||
final LongList resultNovember = db.get(new Query("host=" + host, rangeNovember)).singleGroup().flatMap();
|
||||
Assertions.assertEquals(2, resultNovember.size());
|
||||
Assertions.assertEquals(dateNovember.toInstant().toEpochMilli(), resultNovember.get(0));
|
||||
Assertions.assertEquals(value1, resultNovember.get(1));
|
||||
|
||||
final DateTimeRange rangeDecember = new DateTimeRange(dateDecember, dateDecember);
|
||||
final LongList resultDecember = db.get(new Query("host=" + host, rangeDecember, indexId.getId()))
|
||||
.singleGroup().flatMap();
|
||||
final LongList resultDecember = db.get(new Query("host=" + host, rangeDecember)).singleGroup().flatMap();
|
||||
Assertions.assertEquals(2, resultDecember.size());
|
||||
Assertions.assertEquals(dateDecember.toInstant().toEpochMilli(), resultDecember.get(0));
|
||||
Assertions.assertEquals(value2, resultDecember.get(1));
|
||||
|
||||
Reference in New Issue
Block a user