add second parser that uses a standard CSV reader
This commit is contained in:
@@ -0,0 +1,76 @@
|
||||
package org.lucares.pdbui;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.concurrent.ArrayBlockingQueue;
|
||||
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.lucares.collections.LongList;
|
||||
import org.lucares.pdb.api.DateTimeRange;
|
||||
import org.lucares.pdb.api.Query;
|
||||
import org.lucares.pdb.datastore.Entries;
|
||||
import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions;
|
||||
import org.lucares.performance.db.PerformanceDb;
|
||||
import org.lucares.utils.file.FileUtils;
|
||||
|
||||
public class CsvReaderCsvToEntryTransformerTest {
|
||||
|
||||
private Path dataDirectory;
|
||||
|
||||
@BeforeEach
|
||||
public void beforeMethod() throws IOException {
|
||||
dataDirectory = Files.createTempDirectory("pdb");
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void afterMethod() throws IOException {
|
||||
FileUtils.delete(dataDirectory);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test() throws Exception {
|
||||
|
||||
final OffsetDateTime dateA = OffsetDateTime.now();
|
||||
final OffsetDateTime dateB = OffsetDateTime.now();
|
||||
|
||||
try (final PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
|
||||
final String csv = "#comment line\n"//
|
||||
+ "@timestamp,duration,tag,ignored\n"//
|
||||
+ dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + ",1,\"tagValue\",ignored\n"//
|
||||
+ dateB.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + ",2,\"tagValue\",ignored\n";
|
||||
|
||||
final ArrayBlockingQueue<Entries> queue = db.getQueue();
|
||||
final ColumnDefinitions columnDefinitions = new ColumnDefinitions();
|
||||
columnDefinitions.ignoreColumn("ignored");
|
||||
|
||||
final CsvReaderSettings settings = CsvReaderSettings.create("@timestamp", "duration", ",",
|
||||
columnDefinitions);
|
||||
|
||||
final CsvReaderCsvToEntryTransformer transformer = new CsvReaderCsvToEntryTransformer(queue, settings);
|
||||
transformer.readCSV(new ByteArrayInputStream(csv.getBytes(StandardCharsets.UTF_8)));
|
||||
queue.put(Entries.POISON);
|
||||
}
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("tag=tagValue", DateTimeRange.max())).singleGroup().flatMap();
|
||||
Assertions.assertEquals(result.size(), 4);
|
||||
|
||||
Assertions.assertEquals(result.get(0), dateA.toInstant().toEpochMilli());
|
||||
Assertions.assertEquals(result.get(1), 1);
|
||||
|
||||
Assertions.assertEquals(result.get(2), dateB.toInstant().truncatedTo(ChronoUnit.MILLIS).toEpochMilli());
|
||||
Assertions.assertEquals(result.get(3), 2);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -24,7 +24,7 @@ import org.lucares.pdbui.CsvReaderSettings.ColumnDefinitions;
|
||||
import org.lucares.performance.db.PerformanceDb;
|
||||
import org.lucares.utils.file.FileUtils;
|
||||
|
||||
public class CsvToEntryTransformerTest {
|
||||
public class NoCopyCsvToEntryTransformerTest {
|
||||
|
||||
private Path dataDirectory;
|
||||
|
||||
@@ -52,7 +52,7 @@ public class CsvToEntryTransformerTest {
|
||||
final ArrayBlockingQueue<Entries> queue = db.getQueue();
|
||||
final CsvReaderSettings settings = CsvReaderSettings.create("@timestamp", "duration", ",",
|
||||
new ColumnDefinitions());
|
||||
final CsvToEntryTransformer csvToEntryTransformer = new CsvToEntryTransformer(queue, settings);
|
||||
final NoCopyCsvToEntryTransformer csvToEntryTransformer = new NoCopyCsvToEntryTransformer(queue, settings);
|
||||
csvToEntryTransformer.readCSV(new ByteArrayInputStream(csv.getBytes(StandardCharsets.UTF_8)));
|
||||
queue.put(Entries.POISON);
|
||||
}
|
||||
@@ -94,7 +94,7 @@ public class CsvToEntryTransformerTest {
|
||||
columnDefinitions.ignoreColumn("ignoredColumn");
|
||||
final CsvReaderSettings settings = CsvReaderSettings.create("@timestamp", "duration", ",",
|
||||
columnDefinitions);
|
||||
final CsvToEntryTransformer csvToEntryTransformer = new CsvToEntryTransformer(queue, settings);
|
||||
final NoCopyCsvToEntryTransformer csvToEntryTransformer = new NoCopyCsvToEntryTransformer(queue, settings);
|
||||
csvToEntryTransformer.readCSV(new ByteArrayInputStream(csv.getBytes(StandardCharsets.UTF_8)));
|
||||
queue.put(Entries.POISON);
|
||||
}
|
||||
@@ -200,7 +200,7 @@ public class TcpIngestorTest {
|
||||
Instant.ofEpochMilli(1).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entry.put("duration", 1);
|
||||
entry.put("host", "someHost");
|
||||
entry.put(CsvToEntryTransformer.COLUM_IGNORE_PREFIX + "ignored", "ignoredValue");
|
||||
entry.put(NoCopyCsvToEntryTransformer.COLUM_IGNORE_PREFIX + "ignored", "ignoredValue");
|
||||
|
||||
PdbTestUtil.sendAsCsv(ingestor.getPort(), entry);
|
||||
} catch (final Exception e) {
|
||||
|
||||
Reference in New Issue
Block a user