apply new code formatter and save action
This commit is contained in:
@@ -14,88 +14,88 @@ import java.util.List;
|
||||
|
||||
public class CsvToEntryTransformerPerformanceTest {
|
||||
|
||||
private static final byte NEWLINE = '\n';
|
||||
private static final byte NEWLINE = '\n';
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public static void main(final String[] args) throws Exception {
|
||||
// final Path csvFile =
|
||||
// Paths.get("/home/andi/ws/performanceDb/data/production/1k.csv");
|
||||
final Path csvFile = Paths.get("/home/andi/ws/performanceDb/data/production/logs_2018-09-05_2018-09-05.csv");
|
||||
@SuppressWarnings("unused")
|
||||
public static void main(final String[] args) throws Exception {
|
||||
// final Path csvFile =
|
||||
// Paths.get("/home/andi/ws/performanceDb/data/production/1k.csv");
|
||||
final Path csvFile = Paths.get("/home/andi/ws/performanceDb/data/production/logs_2018-09-05_2018-09-05.csv");
|
||||
|
||||
final int skip = 0;
|
||||
final int skip = 0;
|
||||
|
||||
final List<Double> times = new ArrayList<>();
|
||||
for (int i = 0; i < 105; i++) {
|
||||
final long start = System.nanoTime();
|
||||
runtest(csvFile);
|
||||
final double duration = (System.nanoTime() - start) / 1_000_000.0;
|
||||
times.add(duration);
|
||||
// System.out.println("duration: " + duration + "ms");
|
||||
if (i >= skip) {
|
||||
System.out.println((int) Math.round(duration * 1000));
|
||||
}
|
||||
}
|
||||
final List<Double> times = new ArrayList<>();
|
||||
for (int i = 0; i < 105; i++) {
|
||||
final long start = System.nanoTime();
|
||||
runtest(csvFile);
|
||||
final double duration = (System.nanoTime() - start) / 1_000_000.0;
|
||||
times.add(duration);
|
||||
// System.out.println("duration: " + duration + "ms");
|
||||
if (i >= skip) {
|
||||
System.out.println((int) Math.round(duration * 1000));
|
||||
}
|
||||
}
|
||||
|
||||
final DoubleSummaryStatistics summaryStatisticsPut = times.stream().skip(skip).mapToDouble(d -> (double) d)
|
||||
.summaryStatistics();
|
||||
// System.out.println("summary: " + summaryStatisticsPut);
|
||||
}
|
||||
final DoubleSummaryStatistics summaryStatisticsPut = times.stream().skip(skip).mapToDouble(d -> (double) d)
|
||||
.summaryStatistics();
|
||||
// System.out.println("summary: " + summaryStatisticsPut);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
private static void runtest(final Path csvFile) throws IOException, FileNotFoundException {
|
||||
final byte newline = NEWLINE;
|
||||
@SuppressWarnings("unused")
|
||||
private static void runtest(final Path csvFile) throws IOException, FileNotFoundException {
|
||||
final byte newline = NEWLINE;
|
||||
|
||||
byte[] line = new byte[4096]; // max line length
|
||||
int offsetInLine = 0;
|
||||
int offsetInBuffer = 0;
|
||||
int linecount = 0;
|
||||
byte[] line = new byte[4096]; // max line length
|
||||
int offsetInLine = 0;
|
||||
int offsetInBuffer = 0;
|
||||
int linecount = 0;
|
||||
|
||||
try (final FileChannel channel = FileChannel.open(csvFile, StandardOpenOption.READ)) {
|
||||
int read = 0;
|
||||
int bytesInLine = 0;
|
||||
try (final FileChannel channel = FileChannel.open(csvFile, StandardOpenOption.READ)) {
|
||||
int read = 0;
|
||||
int bytesInLine = 0;
|
||||
|
||||
final ByteBuffer buffer = ByteBuffer.allocate(4096 * 4);
|
||||
while ((read = channel.read(buffer)) >= 0) {
|
||||
offsetInBuffer = 0;
|
||||
final ByteBuffer buffer = ByteBuffer.allocate(4096 * 4);
|
||||
while ((read = channel.read(buffer)) >= 0) {
|
||||
offsetInBuffer = 0;
|
||||
|
||||
final byte[] b = buffer.array();
|
||||
final byte[] b = buffer.array();
|
||||
|
||||
for (int i = 0; i < read; i++) {
|
||||
if (b[i] == newline) {
|
||||
final int length = i - offsetInBuffer;
|
||||
System.arraycopy(b, offsetInBuffer, line, offsetInLine, length);
|
||||
bytesInLine = offsetInLine + length;
|
||||
for (int i = 0; i < read; i++) {
|
||||
if (b[i] == newline) {
|
||||
final int length = i - offsetInBuffer;
|
||||
System.arraycopy(b, offsetInBuffer, line, offsetInLine, length);
|
||||
bytesInLine = offsetInLine + length;
|
||||
|
||||
linecount++;
|
||||
handleLine(line, bytesInLine);
|
||||
line = new byte[4096];
|
||||
linecount++;
|
||||
handleLine(line, bytesInLine);
|
||||
line = new byte[4096];
|
||||
|
||||
offsetInBuffer = i + 1;
|
||||
offsetInLine = 0;
|
||||
bytesInLine = 0;
|
||||
}
|
||||
}
|
||||
if (offsetInBuffer < read) {
|
||||
final int length = read - offsetInBuffer;
|
||||
System.arraycopy(b, offsetInBuffer, line, offsetInLine, length);
|
||||
bytesInLine = offsetInLine + length;
|
||||
offsetInLine += length;
|
||||
offsetInBuffer = 0;
|
||||
offsetInBuffer = i + 1;
|
||||
offsetInLine = 0;
|
||||
bytesInLine = 0;
|
||||
}
|
||||
}
|
||||
if (offsetInBuffer < read) {
|
||||
final int length = read - offsetInBuffer;
|
||||
System.arraycopy(b, offsetInBuffer, line, offsetInLine, length);
|
||||
bytesInLine = offsetInLine + length;
|
||||
offsetInLine += length;
|
||||
offsetInBuffer = 0;
|
||||
|
||||
}
|
||||
buffer.rewind();
|
||||
}
|
||||
}
|
||||
buffer.rewind();
|
||||
}
|
||||
|
||||
linecount++;
|
||||
handleLine(line, bytesInLine);
|
||||
}
|
||||
// System.out.println("lines: " + linecount);
|
||||
}
|
||||
linecount++;
|
||||
handleLine(line, bytesInLine);
|
||||
}
|
||||
// System.out.println("lines: " + linecount);
|
||||
}
|
||||
|
||||
private static void handleLine(final byte[] line, final int bytesInLine) {
|
||||
private static void handleLine(final byte[] line, final int bytesInLine) {
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
final String x = new String(line, 0, bytesInLine, StandardCharsets.UTF_8);
|
||||
// System.out.println(">" + x + "<");
|
||||
}
|
||||
@SuppressWarnings("unused")
|
||||
final String x = new String(line, 0, bytesInLine, StandardCharsets.UTF_8);
|
||||
// System.out.println(">" + x + "<");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,65 +8,65 @@ import java.util.List;
|
||||
import org.lucares.collections.LongList;
|
||||
|
||||
final class LongPair implements Comparable<LongPair> {
|
||||
private final long a, b;
|
||||
private final long a, b;
|
||||
|
||||
public LongPair(final long a, final long b) {
|
||||
super();
|
||||
this.a = a;
|
||||
this.b = b;
|
||||
}
|
||||
public LongPair(final long a, final long b) {
|
||||
super();
|
||||
this.a = a;
|
||||
this.b = b;
|
||||
}
|
||||
|
||||
public static List<LongPair> fromLongList(final LongList longList) {
|
||||
final List<LongPair> result = new ArrayList<>();
|
||||
for (int i = 0; i < longList.size(); i += 2) {
|
||||
public static List<LongPair> fromLongList(final LongList longList) {
|
||||
final List<LongPair> result = new ArrayList<>();
|
||||
for (int i = 0; i < longList.size(); i += 2) {
|
||||
|
||||
result.add(new LongPair(longList.get(i), longList.get(i + 1)));
|
||||
result.add(new LongPair(longList.get(i), longList.get(i + 1)));
|
||||
|
||||
}
|
||||
Collections.sort(result);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
Collections.sort(result);
|
||||
return result;
|
||||
}
|
||||
|
||||
public long getA() {
|
||||
return a;
|
||||
}
|
||||
public long getA() {
|
||||
return a;
|
||||
}
|
||||
|
||||
public long getB() {
|
||||
return b;
|
||||
}
|
||||
public long getB() {
|
||||
return b;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return a + "," + b;
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return a + "," + b;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(final LongPair o) {
|
||||
return Comparator.comparing(LongPair::getA).thenComparing(LongPair::getB).compare(this, o);
|
||||
}
|
||||
@Override
|
||||
public int compareTo(final LongPair o) {
|
||||
return Comparator.comparing(LongPair::getA).thenComparing(LongPair::getB).compare(this, o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + (int) (a ^ (a >>> 32));
|
||||
result = prime * result + (int) (b ^ (b >>> 32));
|
||||
return result;
|
||||
}
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + (int) (a ^ (a >>> 32));
|
||||
result = prime * result + (int) (b ^ (b >>> 32));
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
final LongPair other = (LongPair) obj;
|
||||
if (a != other.a)
|
||||
return false;
|
||||
if (b != other.b)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
@Override
|
||||
public boolean equals(final Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
final LongPair other = (LongPair) obj;
|
||||
if (a != other.a)
|
||||
return false;
|
||||
if (b != other.b)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -28,154 +28,153 @@ import org.slf4j.LoggerFactory;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
public class PdbTestUtil {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(PdbTestUtil.class);
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(PdbTestUtil.class);
|
||||
|
||||
static final Map<String, Object> POISON = new HashMap<>();
|
||||
static final Map<String, Object> POISON = new HashMap<>();
|
||||
|
||||
public static final void send(final String format, final Collection<Map<String, Object>> entries)
|
||||
throws IOException, InterruptedException {
|
||||
switch (format) {
|
||||
case "csv":
|
||||
sendAsCsv(entries);
|
||||
break;
|
||||
case "json":
|
||||
sendAsJson(entries);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("unhandled format: " + format);
|
||||
}
|
||||
}
|
||||
|
||||
@SafeVarargs
|
||||
public static final void sendAsCsv(final Map<String, Object>... entries) throws IOException, InterruptedException {
|
||||
sendAsCsv(Arrays.asList(entries));
|
||||
}
|
||||
|
||||
public static final void sendAsCsv(final Collection<Map<String, Object>> entries)
|
||||
throws IOException, InterruptedException {
|
||||
|
||||
final Set<String> keys = entries.stream().map(Map::keySet).flatMap(Set::stream).collect(Collectors.toSet());
|
||||
|
||||
sendAsCsv(keys, entries);
|
||||
}
|
||||
|
||||
public static final void sendAsCsv(Collection<String> keys, final Collection<Map<String, Object>> entries)
|
||||
throws IOException, InterruptedException {
|
||||
|
||||
|
||||
final StringBuilder csv = new StringBuilder();
|
||||
|
||||
csv.append(String.join(",", keys));
|
||||
csv.append("\n");
|
||||
|
||||
for (final Map<String, Object> entry : entries) {
|
||||
final List<String> line = new ArrayList<>();
|
||||
for (final String key : keys) {
|
||||
final String value = String.valueOf(entry.getOrDefault(key, ""));
|
||||
line.add(value);
|
||||
}
|
||||
csv.append(String.join(",", line));
|
||||
csv.append("\n");
|
||||
}
|
||||
System.out.println("sending: " + csv);
|
||||
send(csv.toString());
|
||||
}
|
||||
|
||||
@SafeVarargs
|
||||
public static final void sendAsJson(final Map<String, Object>... entries) throws IOException, InterruptedException {
|
||||
|
||||
sendAsJson(Arrays.asList(entries));
|
||||
}
|
||||
|
||||
public static final void sendAsJson(final Collection<Map<String, Object>> entries)
|
||||
throws IOException, InterruptedException {
|
||||
final LinkedBlockingDeque<Map<String, Object>> queue = new LinkedBlockingDeque<>(entries);
|
||||
queue.put(POISON);
|
||||
sendAsJson(queue);
|
||||
}
|
||||
|
||||
public static final void sendAsJson(final BlockingQueue<Map<String, Object>> aEntriesSupplier) throws IOException {
|
||||
|
||||
final ObjectMapper mapper = new ObjectMapper();
|
||||
final SocketChannel channel = connect();
|
||||
|
||||
Map<String, Object> entry;
|
||||
while ((entry = aEntriesSupplier.poll()) != POISON) {
|
||||
|
||||
final StringBuilder streamData = new StringBuilder();
|
||||
streamData.append(mapper.writeValueAsString(entry));
|
||||
streamData.append("\n");
|
||||
|
||||
final ByteBuffer src = ByteBuffer.wrap(streamData.toString().getBytes(StandardCharsets.UTF_8));
|
||||
channel.write(src);
|
||||
public static final void send(final String format, final Collection<Map<String, Object>> entries)
|
||||
throws IOException, InterruptedException {
|
||||
switch (format) {
|
||||
case "csv":
|
||||
sendAsCsv(entries);
|
||||
break;
|
||||
case "json":
|
||||
sendAsJson(entries);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("unhandled format: " + format);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// ugly workaround: the channel was closed too early and not all
|
||||
// data was received
|
||||
TimeUnit.MILLISECONDS.sleep(10);
|
||||
} catch (final InterruptedException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
channel.close();
|
||||
LOGGER.trace("closed sender connection");
|
||||
}
|
||||
|
||||
public static final void send(final String data) throws IOException {
|
||||
|
||||
final SocketChannel channel = connect();
|
||||
|
||||
final StringBuilder streamData = new StringBuilder();
|
||||
streamData.append(data);
|
||||
|
||||
final ByteBuffer src = ByteBuffer.wrap(streamData.toString().getBytes(StandardCharsets.UTF_8));
|
||||
channel.write(src);
|
||||
|
||||
try {
|
||||
// ugly workaround: the channel was closed too early and not all
|
||||
// data was received
|
||||
TimeUnit.MILLISECONDS.sleep(10);
|
||||
} catch (final InterruptedException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
channel.close();
|
||||
LOGGER.trace("closed sender connection");
|
||||
}
|
||||
|
||||
public static void send(final Path file) throws IOException {
|
||||
final SocketChannel outputChannel = connect();
|
||||
|
||||
try (final FileChannel inputChannel = FileChannel.open(file, StandardOpenOption.READ)) {
|
||||
inputChannel.transferTo(0, Long.MAX_VALUE, outputChannel);
|
||||
@SafeVarargs
|
||||
public static final void sendAsCsv(final Map<String, Object>... entries) throws IOException, InterruptedException {
|
||||
sendAsCsv(Arrays.asList(entries));
|
||||
}
|
||||
|
||||
try {
|
||||
// ugly workaround: the channel was closed too early and not all
|
||||
// data was received
|
||||
TimeUnit.MILLISECONDS.sleep(10);
|
||||
} catch (final InterruptedException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
outputChannel.close();
|
||||
LOGGER.trace("closed sender connection");
|
||||
}
|
||||
public static final void sendAsCsv(final Collection<Map<String, Object>> entries)
|
||||
throws IOException, InterruptedException {
|
||||
|
||||
private static SocketChannel connect() throws IOException {
|
||||
final Set<String> keys = entries.stream().map(Map::keySet).flatMap(Set::stream).collect(Collectors.toSet());
|
||||
|
||||
SocketChannel result = null;
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
result = SocketChannel.open();
|
||||
result.configureBlocking(true);
|
||||
result.connect(new InetSocketAddress("127.0.0.1", TcpIngestor.PORT));
|
||||
break;
|
||||
} catch (final ConnectException e) {
|
||||
// server socket not yet ready, it should be ready any time soon
|
||||
}
|
||||
sendAsCsv(keys, entries);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
public static final void sendAsCsv(Collection<String> keys, final Collection<Map<String, Object>> entries)
|
||||
throws IOException, InterruptedException {
|
||||
|
||||
final StringBuilder csv = new StringBuilder();
|
||||
|
||||
csv.append(String.join(",", keys));
|
||||
csv.append("\n");
|
||||
|
||||
for (final Map<String, Object> entry : entries) {
|
||||
final List<String> line = new ArrayList<>();
|
||||
for (final String key : keys) {
|
||||
final String value = String.valueOf(entry.getOrDefault(key, ""));
|
||||
line.add(value);
|
||||
}
|
||||
csv.append(String.join(",", line));
|
||||
csv.append("\n");
|
||||
}
|
||||
System.out.println("sending: " + csv);
|
||||
send(csv.toString());
|
||||
}
|
||||
|
||||
@SafeVarargs
|
||||
public static final void sendAsJson(final Map<String, Object>... entries) throws IOException, InterruptedException {
|
||||
|
||||
sendAsJson(Arrays.asList(entries));
|
||||
}
|
||||
|
||||
public static final void sendAsJson(final Collection<Map<String, Object>> entries)
|
||||
throws IOException, InterruptedException {
|
||||
final LinkedBlockingDeque<Map<String, Object>> queue = new LinkedBlockingDeque<>(entries);
|
||||
queue.put(POISON);
|
||||
sendAsJson(queue);
|
||||
}
|
||||
|
||||
public static final void sendAsJson(final BlockingQueue<Map<String, Object>> aEntriesSupplier) throws IOException {
|
||||
|
||||
final ObjectMapper mapper = new ObjectMapper();
|
||||
final SocketChannel channel = connect();
|
||||
|
||||
Map<String, Object> entry;
|
||||
while ((entry = aEntriesSupplier.poll()) != POISON) {
|
||||
|
||||
final StringBuilder streamData = new StringBuilder();
|
||||
streamData.append(mapper.writeValueAsString(entry));
|
||||
streamData.append("\n");
|
||||
|
||||
final ByteBuffer src = ByteBuffer.wrap(streamData.toString().getBytes(StandardCharsets.UTF_8));
|
||||
channel.write(src);
|
||||
}
|
||||
|
||||
try {
|
||||
// ugly workaround: the channel was closed too early and not all
|
||||
// data was received
|
||||
TimeUnit.MILLISECONDS.sleep(10);
|
||||
} catch (final InterruptedException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
channel.close();
|
||||
LOGGER.trace("closed sender connection");
|
||||
}
|
||||
|
||||
public static final void send(final String data) throws IOException {
|
||||
|
||||
final SocketChannel channel = connect();
|
||||
|
||||
final StringBuilder streamData = new StringBuilder();
|
||||
streamData.append(data);
|
||||
|
||||
final ByteBuffer src = ByteBuffer.wrap(streamData.toString().getBytes(StandardCharsets.UTF_8));
|
||||
channel.write(src);
|
||||
|
||||
try {
|
||||
// ugly workaround: the channel was closed too early and not all
|
||||
// data was received
|
||||
TimeUnit.MILLISECONDS.sleep(10);
|
||||
} catch (final InterruptedException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
channel.close();
|
||||
LOGGER.trace("closed sender connection");
|
||||
}
|
||||
|
||||
public static void send(final Path file) throws IOException {
|
||||
final SocketChannel outputChannel = connect();
|
||||
|
||||
try (final FileChannel inputChannel = FileChannel.open(file, StandardOpenOption.READ)) {
|
||||
inputChannel.transferTo(0, Long.MAX_VALUE, outputChannel);
|
||||
}
|
||||
|
||||
try {
|
||||
// ugly workaround: the channel was closed too early and not all
|
||||
// data was received
|
||||
TimeUnit.MILLISECONDS.sleep(10);
|
||||
} catch (final InterruptedException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
outputChannel.close();
|
||||
LOGGER.trace("closed sender connection");
|
||||
}
|
||||
|
||||
private static SocketChannel connect() throws IOException {
|
||||
|
||||
SocketChannel result = null;
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
result = SocketChannel.open();
|
||||
result.configureBlocking(true);
|
||||
result.connect(new InetSocketAddress("127.0.0.1", TcpIngestor.PORT));
|
||||
break;
|
||||
} catch (final ConnectException e) {
|
||||
// server socket not yet ready, it should be ready any time soon
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -37,285 +37,284 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
@Test
|
||||
public class TcpIngestorTest {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(TcpIngestorTest.class);
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(TcpIngestorTest.class);
|
||||
|
||||
private Path dataDirectory;
|
||||
private Path dataDirectory;
|
||||
|
||||
@BeforeMethod
|
||||
public void beforeMethod() throws IOException {
|
||||
dataDirectory = Files.createTempDirectory("pdb");
|
||||
}
|
||||
@BeforeMethod
|
||||
public void beforeMethod() throws IOException {
|
||||
dataDirectory = Files.createTempDirectory("pdb");
|
||||
}
|
||||
|
||||
@AfterMethod
|
||||
public void afterMethod() throws IOException {
|
||||
FileUtils.delete(dataDirectory);
|
||||
}
|
||||
@AfterMethod
|
||||
public void afterMethod() throws IOException {
|
||||
FileUtils.delete(dataDirectory);
|
||||
}
|
||||
|
||||
public void testIngestDataViaTcpStream() throws Exception {
|
||||
public void testIngestDataViaTcpStream() throws Exception {
|
||||
|
||||
final OffsetDateTime dateA = OffsetDateTime.now();
|
||||
final OffsetDateTime dateB = OffsetDateTime.now();
|
||||
final String host = "someHost";
|
||||
final OffsetDateTime dateA = OffsetDateTime.now();
|
||||
final OffsetDateTime dateB = OffsetDateTime.now();
|
||||
final String host = "someHost";
|
||||
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
|
||||
ingestor.start();
|
||||
ingestor.start();
|
||||
|
||||
final Map<String, Object> entryA = new HashMap<>();
|
||||
entryA.put("duration", 1);
|
||||
entryA.put("@timestamp", dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entryA.put("host", host);
|
||||
entryA.put("tags", Collections.emptyList());
|
||||
final Map<String, Object> entryA = new HashMap<>();
|
||||
entryA.put("duration", 1);
|
||||
entryA.put("@timestamp", dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entryA.put("host", host);
|
||||
entryA.put("tags", Collections.emptyList());
|
||||
|
||||
final Map<String, Object> entryB = new HashMap<>();
|
||||
entryB.put("duration", 2);
|
||||
entryB.put("@timestamp", dateB.format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entryB.put("host", host);
|
||||
entryB.put("tags", Collections.emptyList());
|
||||
final Map<String, Object> entryB = new HashMap<>();
|
||||
entryB.put("duration", 2);
|
||||
entryB.put("@timestamp", dateB.format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entryB.put("host", host);
|
||||
entryB.put("tags", Collections.emptyList());
|
||||
|
||||
PdbTestUtil.sendAsJson(entryA, entryB);
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
throw e;
|
||||
}
|
||||
PdbTestUtil.sendAsJson(entryA, entryB);
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, DateTimeRange.ofDay(dateA))).singleGroup()
|
||||
.flatMap();
|
||||
Assert.assertEquals(result.size(), 4);
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, DateTimeRange.ofDay(dateA))).singleGroup()
|
||||
.flatMap();
|
||||
Assert.assertEquals(result.size(), 4);
|
||||
|
||||
Assert.assertEquals(result.get(0), dateA.toInstant().toEpochMilli());
|
||||
Assert.assertEquals(result.get(1), 1);
|
||||
Assert.assertEquals(result.get(0), dateA.toInstant().toEpochMilli());
|
||||
Assert.assertEquals(result.get(1), 1);
|
||||
|
||||
Assert.assertEquals(result.get(2), dateB.toInstant().truncatedTo(ChronoUnit.MILLIS).toEpochMilli());
|
||||
Assert.assertEquals(result.get(3), 2);
|
||||
}
|
||||
}
|
||||
Assert.assertEquals(result.get(2), dateB.toInstant().truncatedTo(ChronoUnit.MILLIS).toEpochMilli());
|
||||
Assert.assertEquals(result.get(3), 2);
|
||||
}
|
||||
}
|
||||
|
||||
public void testIngestDataViaTcpStream_CustomFormat() throws Exception {
|
||||
public void testIngestDataViaTcpStream_CustomFormat() throws Exception {
|
||||
|
||||
final long dateA = Instant.now().toEpochMilli();
|
||||
final long dateB = Instant.now().toEpochMilli() + 1;
|
||||
final long dateC = Instant.now().toEpochMilli() - 1;
|
||||
final DateTimeRange dateRange = DateTimeRange.relativeMinutes(1);
|
||||
final String host = "someHost";
|
||||
final long dateA = Instant.now().toEpochMilli();
|
||||
final long dateB = Instant.now().toEpochMilli() + 1;
|
||||
final long dateC = Instant.now().toEpochMilli() - 1;
|
||||
final DateTimeRange dateRange = DateTimeRange.relativeMinutes(1);
|
||||
final String host = "someHost";
|
||||
|
||||
// 1. insert some data
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
// 1. insert some data
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
|
||||
ingestor.start();
|
||||
ingestor.start();
|
||||
|
||||
final long deltaEpochMilliB = dateB - dateA;
|
||||
final long deltaEpochMilliC = dateC - dateB;
|
||||
final long deltaEpochMilliB = dateB - dateA;
|
||||
final long deltaEpochMilliC = dateC - dateB;
|
||||
|
||||
final String data = "#$0:host=someHost,pod=somePod\n"//
|
||||
+ dateA + ",1,0\n"// previous date is 0, therefore the delta is dateA / using tags with id 0
|
||||
+ "$1:host=someHost,pod=otherPod\n" //
|
||||
+ deltaEpochMilliB + ",2,1\n" // dates are the delta the the previous date / using tags with id 1
|
||||
+ deltaEpochMilliC + ",3,0"; // dates are the delta the the previous date / using tags with id 0
|
||||
final String data = "#$0:host=someHost,pod=somePod\n"//
|
||||
+ dateA + ",1,0\n"// previous date is 0, therefore the delta is dateA / using tags with id 0
|
||||
+ "$1:host=someHost,pod=otherPod\n" //
|
||||
+ deltaEpochMilliB + ",2,1\n" // dates are the delta the the previous date / using tags with id 1
|
||||
+ deltaEpochMilliC + ",3,0"; // dates are the delta the the previous date / using tags with id 0
|
||||
|
||||
PdbTestUtil.send(data);
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
throw e;
|
||||
}
|
||||
PdbTestUtil.send(data);
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
// 2. export the data
|
||||
final List<Path> exportFiles = PdbExport.export(dataDirectory, dataDirectory.resolve("export"));
|
||||
// 2. export the data
|
||||
final List<Path> exportFiles = PdbExport.export(dataDirectory, dataDirectory.resolve("export"));
|
||||
|
||||
// 3. delete database
|
||||
FileUtils.delete(dataDirectory.resolve(DataStore.SUBDIR_STORAGE));
|
||||
// 3. delete database
|
||||
FileUtils.delete(dataDirectory.resolve(DataStore.SUBDIR_STORAGE));
|
||||
|
||||
// 4. create a new database
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
ingestor.start();
|
||||
for (final Path exportFile : exportFiles) {
|
||||
PdbTestUtil.send(exportFile);
|
||||
}
|
||||
}
|
||||
// 4. create a new database
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
ingestor.start();
|
||||
for (final Path exportFile : exportFiles) {
|
||||
PdbTestUtil.send(exportFile);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. check that the data is correctly inserted
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap();
|
||||
Assert.assertEquals(result.size(), 6);
|
||||
// 5. check that the data is correctly inserted
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap();
|
||||
Assert.assertEquals(result.size(), 6);
|
||||
|
||||
Assert.assertEquals(result.get(0), dateA);
|
||||
Assert.assertEquals(result.get(1), 1);
|
||||
Assert.assertEquals(result.get(0), dateA);
|
||||
Assert.assertEquals(result.get(1), 1);
|
||||
|
||||
Assert.assertEquals(result.get(2), dateC);
|
||||
Assert.assertEquals(result.get(3), 3);
|
||||
Assert.assertEquals(result.get(2), dateC);
|
||||
Assert.assertEquals(result.get(3), 3);
|
||||
|
||||
Assert.assertEquals(result.get(4), dateB);
|
||||
Assert.assertEquals(result.get(5), 2);
|
||||
}
|
||||
}
|
||||
Assert.assertEquals(result.get(4), dateB);
|
||||
Assert.assertEquals(result.get(5), 2);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIngestionThreadDoesNotDieOnErrors() throws Exception {
|
||||
final OffsetDateTime dateA = OffsetDateTime.ofInstant(Instant.ofEpochMilli(-1), ZoneOffset.UTC);
|
||||
final OffsetDateTime dateB = OffsetDateTime.now();
|
||||
final DateTimeRange dateRange = new DateTimeRange(dateA, dateB);
|
||||
final String host = "someHost";
|
||||
@Test
|
||||
public void testIngestionThreadDoesNotDieOnErrors() throws Exception {
|
||||
final OffsetDateTime dateA = OffsetDateTime.ofInstant(Instant.ofEpochMilli(-1), ZoneOffset.UTC);
|
||||
final OffsetDateTime dateB = OffsetDateTime.now();
|
||||
final DateTimeRange dateRange = new DateTimeRange(dateA, dateB);
|
||||
final String host = "someHost";
|
||||
|
||||
try (TcpIngestor tcpIngestor = new TcpIngestor(dataDirectory)) {
|
||||
tcpIngestor.start();
|
||||
try (TcpIngestor tcpIngestor = new TcpIngestor(dataDirectory)) {
|
||||
tcpIngestor.start();
|
||||
|
||||
// has a negative epoch time milli and negative value
|
||||
final Map<String, Object> entryA = new HashMap<>();
|
||||
entryA.put("duration", -1);
|
||||
entryA.put("@timestamp", dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entryA.put("host", host);
|
||||
entryA.put("tags", Collections.emptyList());
|
||||
// has a negative epoch time milli and negative value
|
||||
final Map<String, Object> entryA = new HashMap<>();
|
||||
entryA.put("duration", -1);
|
||||
entryA.put("@timestamp", dateA.format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entryA.put("host", host);
|
||||
entryA.put("tags", Collections.emptyList());
|
||||
|
||||
// skipped, because it is not valid json
|
||||
final String corrupEntry = "{\"corrupt...";
|
||||
// skipped, because it is not valid json
|
||||
final String corrupEntry = "{\"corrupt...";
|
||||
|
||||
// valid entry
|
||||
final Map<String, Object> entryB = new HashMap<>();
|
||||
entryB.put("duration", 2);
|
||||
entryB.put("@timestamp", dateB.format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entryB.put("host", host);
|
||||
entryB.put("tags", Collections.emptyList());
|
||||
// valid entry
|
||||
final Map<String, Object> entryB = new HashMap<>();
|
||||
entryB.put("duration", 2);
|
||||
entryB.put("@timestamp", dateB.format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entryB.put("host", host);
|
||||
entryB.put("tags", Collections.emptyList());
|
||||
|
||||
final ObjectMapper objectMapper = new ObjectMapper();
|
||||
final String data = String.join("\n", //
|
||||
objectMapper.writeValueAsString(entryA), //
|
||||
corrupEntry, //
|
||||
objectMapper.writeValueAsString(entryB)//
|
||||
)//
|
||||
+ "\n";
|
||||
final ObjectMapper objectMapper = new ObjectMapper();
|
||||
final String data = String.join("\n", //
|
||||
objectMapper.writeValueAsString(entryA), //
|
||||
corrupEntry, //
|
||||
objectMapper.writeValueAsString(entryB)//
|
||||
)//
|
||||
+ "\n";
|
||||
|
||||
PdbTestUtil.send(data);
|
||||
}
|
||||
PdbTestUtil.send(data);
|
||||
}
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap();
|
||||
Assert.assertEquals(result.size(), 4);
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap();
|
||||
Assert.assertEquals(result.size(), 4);
|
||||
|
||||
Assert.assertEquals(result.get(0), dateA.toInstant().truncatedTo(ChronoUnit.MILLIS).toEpochMilli());
|
||||
Assert.assertEquals(result.get(1), -1);
|
||||
Assert.assertEquals(result.get(0), dateA.toInstant().truncatedTo(ChronoUnit.MILLIS).toEpochMilli());
|
||||
Assert.assertEquals(result.get(1), -1);
|
||||
|
||||
Assert.assertEquals(result.get(2), dateB.toInstant().truncatedTo(ChronoUnit.MILLIS).toEpochMilli());
|
||||
Assert.assertEquals(result.get(3), 2);
|
||||
}
|
||||
}
|
||||
Assert.assertEquals(result.get(2), dateB.toInstant().truncatedTo(ChronoUnit.MILLIS).toEpochMilli());
|
||||
Assert.assertEquals(result.get(3), 2);
|
||||
}
|
||||
}
|
||||
|
||||
@DataProvider
|
||||
public Object[][] providerSendingFormats() {
|
||||
final List<Object[]> data = new ArrayList<>();
|
||||
@DataProvider
|
||||
public Object[][] providerSendingFormats() {
|
||||
final List<Object[]> data = new ArrayList<>();
|
||||
|
||||
data.add(new Object[] { "csv" });
|
||||
data.add(new Object[] { "json" });
|
||||
data.add(new Object[] { "csv" });
|
||||
data.add(new Object[] { "json" });
|
||||
|
||||
return data.toArray(Object[][]::new);
|
||||
}
|
||||
return data.toArray(Object[][]::new);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "providerSendingFormats")
|
||||
public void testRandomOrder(final String format) throws Exception {
|
||||
@Test(dataProvider = "providerSendingFormats")
|
||||
public void testRandomOrder(final String format) throws Exception {
|
||||
|
||||
final ThreadLocalRandom rnd = ThreadLocalRandom.current();
|
||||
final String host = "someHost";
|
||||
final List<String> additionalTagValues = Arrays.asList("foo", "bar", "baz");
|
||||
final DateTimeRange dateRange = new DateTimeRange(Instant.ofEpochMilli(-100000L).atOffset(ZoneOffset.UTC),
|
||||
Instant.ofEpochMilli(10000000L).atOffset(ZoneOffset.UTC));
|
||||
final ThreadLocalRandom rnd = ThreadLocalRandom.current();
|
||||
final String host = "someHost";
|
||||
final List<String> additionalTagValues = Arrays.asList("foo", "bar", "baz");
|
||||
final DateTimeRange dateRange = new DateTimeRange(Instant.ofEpochMilli(-100000L).atOffset(ZoneOffset.UTC),
|
||||
Instant.ofEpochMilli(10000000L).atOffset(ZoneOffset.UTC));
|
||||
|
||||
final LongList expected = new LongList();
|
||||
final LongList expected = new LongList();
|
||||
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
|
||||
ingestor.start();
|
||||
ingestor.start();
|
||||
|
||||
final LinkedBlockingDeque<Map<String, Object>> queue = new LinkedBlockingDeque<>();
|
||||
final LinkedBlockingDeque<Map<String, Object>> queue = new LinkedBlockingDeque<>();
|
||||
|
||||
for (int i = 0; i < 103; i++) // use number of rows that is not a multiple of a page size
|
||||
{
|
||||
for (int i = 0; i < 103; i++) // use number of rows that is not a multiple of a page size
|
||||
{
|
||||
|
||||
final long duration = rnd.nextLong(-100000L, 100000L);
|
||||
final long timestamp = rnd.nextLong(-100000L, 10000000L);
|
||||
final long duration = rnd.nextLong(-100000L, 100000L);
|
||||
final long timestamp = rnd.nextLong(-100000L, 10000000L);
|
||||
|
||||
final Map<String, Object> entry = new HashMap<>();
|
||||
entry.put("@timestamp", Instant.ofEpochMilli(timestamp).atOffset(ZoneOffset.UTC)
|
||||
.format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entry.put("duration", duration);
|
||||
entry.put("host", host);
|
||||
entry.put("additionalKey", additionalTagValues.get(rnd.nextInt(additionalTagValues.size())));
|
||||
final Map<String, Object> entry = new HashMap<>();
|
||||
entry.put("@timestamp", Instant.ofEpochMilli(timestamp).atOffset(ZoneOffset.UTC)
|
||||
.format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entry.put("duration", duration);
|
||||
entry.put("host", host);
|
||||
entry.put("additionalKey", additionalTagValues.get(rnd.nextInt(additionalTagValues.size())));
|
||||
|
||||
queue.put(entry);
|
||||
expected.addAll(timestamp, duration);
|
||||
}
|
||||
queue.put(entry);
|
||||
expected.addAll(timestamp, duration);
|
||||
}
|
||||
|
||||
PdbTestUtil.send(format, queue);
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
throw e;
|
||||
}
|
||||
PdbTestUtil.send(format, queue);
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap();
|
||||
Assert.assertEquals(LongPair.fromLongList(result), LongPair.fromLongList(expected));
|
||||
}
|
||||
}
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, dateRange)).singleGroup().flatMap();
|
||||
Assert.assertEquals(LongPair.fromLongList(result), LongPair.fromLongList(expected));
|
||||
}
|
||||
}
|
||||
|
||||
public void testCsvIngestorIgnoresColumns() throws Exception {
|
||||
public void testCsvIngestorIgnoresColumns() throws Exception {
|
||||
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
|
||||
ingestor.start();
|
||||
ingestor.start();
|
||||
|
||||
final Map<String, Object> entry = new HashMap<>();
|
||||
entry.put("@timestamp",
|
||||
Instant.ofEpochMilli(1).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entry.put("duration", 1);
|
||||
entry.put("host", "someHost");
|
||||
entry.put(TcpIngestor.Handler.COLUM_IGNORE_PREFIX + "ignored", "ignoredValue");
|
||||
final Map<String, Object> entry = new HashMap<>();
|
||||
entry.put("@timestamp",
|
||||
Instant.ofEpochMilli(1).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entry.put("duration", 1);
|
||||
entry.put("host", "someHost");
|
||||
entry.put(TcpIngestor.Handler.COLUM_IGNORE_PREFIX + "ignored", "ignoredValue");
|
||||
|
||||
PdbTestUtil.sendAsCsv(entry);
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
throw e;
|
||||
}
|
||||
PdbTestUtil.sendAsCsv(entry);
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final List<String> availableFields = db.getFields(DateTimeRange.max());
|
||||
Assert.assertEquals(availableFields.toString(), List.of("host").toString(),
|
||||
"the ignored field is not returned");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void testCsvIngestorHandlesDurationAtEnd() throws Exception {
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final List<String> availableFields = db.getFields(DateTimeRange.max());
|
||||
Assert.assertEquals(availableFields.toString(), List.of("host").toString(),
|
||||
"the ignored field is not returned");
|
||||
}
|
||||
}
|
||||
|
||||
String host = "someHost";
|
||||
long value1 = 222;
|
||||
long value2= 1;
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
public void testCsvIngestorHandlesDurationAtEnd() throws Exception {
|
||||
|
||||
ingestor.start();
|
||||
String host = "someHost";
|
||||
long value1 = 222;
|
||||
long value2 = 1;
|
||||
try (TcpIngestor ingestor = new TcpIngestor(dataDirectory)) {
|
||||
|
||||
final Map<String, Object> entry1 = new HashMap<>();
|
||||
entry1.put("@timestamp",
|
||||
Instant.ofEpochMilli(1).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entry1.put("host", host);
|
||||
entry1.put("duration", value1);
|
||||
|
||||
final Map<String, Object> entry2 = new HashMap<>();
|
||||
entry2.put("@timestamp",
|
||||
Instant.ofEpochMilli(2).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entry2.put("host", host);
|
||||
entry2.put("duration", value2);
|
||||
ingestor.start();
|
||||
|
||||
PdbTestUtil.sendAsCsv(List.of("@timestamp","host","duration"), List.of(entry1, entry2));
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
throw e;
|
||||
}
|
||||
final Map<String, Object> entry1 = new HashMap<>();
|
||||
entry1.put("@timestamp",
|
||||
Instant.ofEpochMilli(1).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entry1.put("host", host);
|
||||
entry1.put("duration", value1);
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, DateTimeRange.max())).singleGroup().flatMap();
|
||||
Assert.assertEquals(result.size(), 4);
|
||||
final Map<String, Object> entry2 = new HashMap<>();
|
||||
entry2.put("@timestamp",
|
||||
Instant.ofEpochMilli(2).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_ZONED_DATE_TIME));
|
||||
entry2.put("host", host);
|
||||
entry2.put("duration", value2);
|
||||
|
||||
Assert.assertEquals(result.get(1), value1);
|
||||
Assert.assertEquals(result.get(3), value2);
|
||||
}
|
||||
}
|
||||
PdbTestUtil.sendAsCsv(List.of("@timestamp", "host", "duration"), List.of(entry1, entry2));
|
||||
} catch (final Exception e) {
|
||||
LOGGER.error("", e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
try (PerformanceDb db = new PerformanceDb(dataDirectory)) {
|
||||
final LongList result = db.get(new Query("host=" + host, DateTimeRange.max())).singleGroup().flatMap();
|
||||
Assert.assertEquals(result.size(), 4);
|
||||
|
||||
Assert.assertEquals(result.get(1), value1);
|
||||
Assert.assertEquals(result.get(3), value2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,195 +20,195 @@ import org.testng.annotations.Test;
|
||||
@Test
|
||||
public class FastISODateParserTest {
|
||||
|
||||
@DataProvider(name = "providerValidDate")
|
||||
public Object[][] providerValidDate() {
|
||||
return new Object[][] { //
|
||||
{ "2018-11-18T14:42:49.123456789Z" }, //
|
||||
{ "2018-11-18T14:42:49.123456789+12:34" }, //
|
||||
{ "2018-11-18T14:42:49.12345678Z" }, //
|
||||
{ "2018-11-18T14:42:49.12345678+12:34" }, //
|
||||
{ "2018-11-18T14:42:49.1234567Z" }, //
|
||||
{ "2018-11-18T14:42:49.1234567+12:34" }, //
|
||||
{ "2018-11-18T14:42:49.123456Z" }, //
|
||||
{ "2018-11-18T14:42:49.123456+12:34" }, //
|
||||
{ "2018-11-18T14:42:49.33256Z" }, //
|
||||
{ "2018-11-18T14:42:49.33256+12:34" }, //
|
||||
{ "2018-11-18T14:42:49.3325Z" }, //
|
||||
{ "2018-11-18T14:42:49.3325+12:34" }, //
|
||||
{ "2018-11-18T14:42:49.332Z" }, //
|
||||
{ "2018-11-18T14:42:49.332+00:00" }, //
|
||||
{ "2018-11-18T14:42:49.332+12:34" }, //
|
||||
{ "2018-11-18T14:42:49.332-01:23" }, //
|
||||
{ "2018-11-18T14:55:49.44Z" }, //
|
||||
{ "2018-11-18T14:55:49.55-01:23" }, //
|
||||
{ "2018-11-18T14:55:49.4Z" }, //
|
||||
{ "2018-11-18T14:55:49.5-01:23" }, //
|
||||
{ "2018-11-18T14:55:49.Z" }, //
|
||||
{ "2018-11-18T14:55:49.-01:23" }, //
|
||||
{ "2018-11-18T14:55:49Z" }, //
|
||||
{ "2018-11-18T14:55:49-01:23" },//
|
||||
};
|
||||
}
|
||||
@DataProvider(name = "providerValidDate")
|
||||
public Object[][] providerValidDate() {
|
||||
return new Object[][] { //
|
||||
{ "2018-11-18T14:42:49.123456789Z" }, //
|
||||
{ "2018-11-18T14:42:49.123456789+12:34" }, //
|
||||
{ "2018-11-18T14:42:49.12345678Z" }, //
|
||||
{ "2018-11-18T14:42:49.12345678+12:34" }, //
|
||||
{ "2018-11-18T14:42:49.1234567Z" }, //
|
||||
{ "2018-11-18T14:42:49.1234567+12:34" }, //
|
||||
{ "2018-11-18T14:42:49.123456Z" }, //
|
||||
{ "2018-11-18T14:42:49.123456+12:34" }, //
|
||||
{ "2018-11-18T14:42:49.33256Z" }, //
|
||||
{ "2018-11-18T14:42:49.33256+12:34" }, //
|
||||
{ "2018-11-18T14:42:49.3325Z" }, //
|
||||
{ "2018-11-18T14:42:49.3325+12:34" }, //
|
||||
{ "2018-11-18T14:42:49.332Z" }, //
|
||||
{ "2018-11-18T14:42:49.332+00:00" }, //
|
||||
{ "2018-11-18T14:42:49.332+12:34" }, //
|
||||
{ "2018-11-18T14:42:49.332-01:23" }, //
|
||||
{ "2018-11-18T14:55:49.44Z" }, //
|
||||
{ "2018-11-18T14:55:49.55-01:23" }, //
|
||||
{ "2018-11-18T14:55:49.4Z" }, //
|
||||
{ "2018-11-18T14:55:49.5-01:23" }, //
|
||||
{ "2018-11-18T14:55:49.Z" }, //
|
||||
{ "2018-11-18T14:55:49.-01:23" }, //
|
||||
{ "2018-11-18T14:55:49Z" }, //
|
||||
{ "2018-11-18T14:55:49-01:23" },//
|
||||
};
|
||||
}
|
||||
|
||||
@Test(dataProvider = "providerValidDate")
|
||||
public void testParseValidDate(final String date) {
|
||||
@Test(dataProvider = "providerValidDate")
|
||||
public void testParseValidDate(final String date) {
|
||||
|
||||
final OffsetDateTime actualDate = new FastISODateParser().parse(date);
|
||||
final OffsetDateTime actualDate = new FastISODateParser().parse(date);
|
||||
|
||||
final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date));
|
||||
Assert.assertEquals(actualDate, expectedDate);
|
||||
}
|
||||
final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date));
|
||||
Assert.assertEquals(actualDate, expectedDate);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "providerValidDate")
|
||||
public void testParseValidDateAsEpochMilli(final String date) {
|
||||
@Test(dataProvider = "providerValidDate")
|
||||
public void testParseValidDateAsEpochMilli(final String date) {
|
||||
|
||||
final long actualDate = new FastISODateParser().parseAsEpochMilli(date);
|
||||
final long actualDate = new FastISODateParser().parseAsEpochMilli(date);
|
||||
|
||||
final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date));
|
||||
Assert.assertEquals(actualDate, expectedDate.toInstant().toEpochMilli());
|
||||
}
|
||||
final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date));
|
||||
Assert.assertEquals(actualDate, expectedDate.toInstant().toEpochMilli());
|
||||
}
|
||||
|
||||
@DataProvider(name = "providerParseInvalidDate")
|
||||
public Object[][] providerParseInvalidDate() {
|
||||
return new Object[][] { //
|
||||
{ "a2018-11-18T14:42:49.332Z" }, //
|
||||
{ "a018-11-18T14:42:49.332Z" }, //
|
||||
{ "2a18-11-18T14:42:49.332Z" }, //
|
||||
{ "20a8-11-18T14:42:49.332Z" }, //
|
||||
{ "201a-11-18T14:42:49.332Z" }, //
|
||||
{ "2018-a1-18T14:42:49.332Z" }, //
|
||||
{ "2018-1a-18T14:42:49.332Z" }, //
|
||||
{ "2018-11-a8T14:42:49.332Z" }, //
|
||||
{ "2018-11-1aT14:42:49.332Z" }, //
|
||||
{ "2018-11-18Ta4:42:49.332Z" }, //
|
||||
{ "2018-11-18T1a:42:49.332Z" }, //
|
||||
{ "2018-11-18T14:a2:49.332Z" }, //
|
||||
{ "2018-11-18T14:4a:49.332Z" }, //
|
||||
{ "2018-11-18T14:42:a9.332Z" }, //
|
||||
{ "2018-11-18T14:42:4a.332Z" }, //
|
||||
{ "2018-11-18T14:42:49.a32Z" }, //
|
||||
{ "2018-11-18T14:42:49.3a2Z" }, //
|
||||
{ "2018-11-18T14:42:49.33aZ" }, //
|
||||
{ "2018-11-18T14:42:49.332a" }, //
|
||||
{ "2018-11-18T14:42:49.332a00:00" }, //
|
||||
{ "2018-11-18T14:42:49.332+a0:00" }, //
|
||||
{ "2018-11-18T14:42:49.332+0a:00" }, //
|
||||
{ "2018-11-18T14:42:49.332+00:a0" }, //
|
||||
{ "2018-11-18T14:42:49.332+00:0a" }//
|
||||
};
|
||||
}
|
||||
@DataProvider(name = "providerParseInvalidDate")
|
||||
public Object[][] providerParseInvalidDate() {
|
||||
return new Object[][] { //
|
||||
{ "a2018-11-18T14:42:49.332Z" }, //
|
||||
{ "a018-11-18T14:42:49.332Z" }, //
|
||||
{ "2a18-11-18T14:42:49.332Z" }, //
|
||||
{ "20a8-11-18T14:42:49.332Z" }, //
|
||||
{ "201a-11-18T14:42:49.332Z" }, //
|
||||
{ "2018-a1-18T14:42:49.332Z" }, //
|
||||
{ "2018-1a-18T14:42:49.332Z" }, //
|
||||
{ "2018-11-a8T14:42:49.332Z" }, //
|
||||
{ "2018-11-1aT14:42:49.332Z" }, //
|
||||
{ "2018-11-18Ta4:42:49.332Z" }, //
|
||||
{ "2018-11-18T1a:42:49.332Z" }, //
|
||||
{ "2018-11-18T14:a2:49.332Z" }, //
|
||||
{ "2018-11-18T14:4a:49.332Z" }, //
|
||||
{ "2018-11-18T14:42:a9.332Z" }, //
|
||||
{ "2018-11-18T14:42:4a.332Z" }, //
|
||||
{ "2018-11-18T14:42:49.a32Z" }, //
|
||||
{ "2018-11-18T14:42:49.3a2Z" }, //
|
||||
{ "2018-11-18T14:42:49.33aZ" }, //
|
||||
{ "2018-11-18T14:42:49.332a" }, //
|
||||
{ "2018-11-18T14:42:49.332a00:00" }, //
|
||||
{ "2018-11-18T14:42:49.332+a0:00" }, //
|
||||
{ "2018-11-18T14:42:49.332+0a:00" }, //
|
||||
{ "2018-11-18T14:42:49.332+00:a0" }, //
|
||||
{ "2018-11-18T14:42:49.332+00:0a" }//
|
||||
};
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class, dataProvider = "providerParseInvalidDate")
|
||||
public void testParseInvalidDate(final String invalidDate) {
|
||||
new FastISODateParser().parse(invalidDate);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalArgumentException.class, dataProvider = "providerParseInvalidDate")
|
||||
public void testParseInvalidDate(final String invalidDate) {
|
||||
new FastISODateParser().parse(invalidDate);
|
||||
}
|
||||
|
||||
@DataProvider(name = "providerDateToTimestamp")
|
||||
public Object[][] providerDateToTimestamp() {
|
||||
return new Object[][] { //
|
||||
{ "2018-11-18T14:42:49.123Z" }, //
|
||||
// There are no leap seconds in java-time:
|
||||
// In reality, UTC has a leap second 2016-12-31T23:59:60Z, but java handles
|
||||
// this differently. This makes it a little bit easier for us, because we do not
|
||||
// have to handle this.
|
||||
{ "2016-12-31T23:59:59.999Z" }, // before leap second
|
||||
{ "2017-01-01T00:00:00.000Z" }, // after leap second
|
||||
@DataProvider(name = "providerDateToTimestamp")
|
||||
public Object[][] providerDateToTimestamp() {
|
||||
return new Object[][] { //
|
||||
{ "2018-11-18T14:42:49.123Z" }, //
|
||||
// There are no leap seconds in java-time:
|
||||
// In reality, UTC has a leap second 2016-12-31T23:59:60Z, but java handles
|
||||
// this differently. This makes it a little bit easier for us, because we do not
|
||||
// have to handle this.
|
||||
{ "2016-12-31T23:59:59.999Z" }, // before leap second
|
||||
{ "2017-01-01T00:00:00.000Z" }, // after leap second
|
||||
|
||||
// normal leap days exist
|
||||
{ "2016-02-28T23:59:59.999Z" }, // before leap day
|
||||
{ "2016-02-29T00:00:00.000Z" }, // leap day
|
||||
{ "2016-02-29T23:59:59.999Z" }, // leap day
|
||||
{ "2016-03-01T00:00:00.000Z" }, // after leap day
|
||||
// normal leap days exist
|
||||
{ "2016-02-28T23:59:59.999Z" }, // before leap day
|
||||
{ "2016-02-29T00:00:00.000Z" }, // leap day
|
||||
{ "2016-02-29T23:59:59.999Z" }, // leap day
|
||||
{ "2016-03-01T00:00:00.000Z" }, // after leap day
|
||||
|
||||
// dates with non-UTC timezones
|
||||
{ "2018-11-18T14:42:49.123+12:34" }, //
|
||||
{ "2018-11-18T02:34:56.123+12:34" }, //
|
||||
// dates with non-UTC timezones
|
||||
{ "2018-11-18T14:42:49.123+12:34" }, //
|
||||
{ "2018-11-18T02:34:56.123+12:34" }, //
|
||||
|
||||
// dates with non-UTC timezones and leap days
|
||||
{ "2016-02-29T00:59:59.999+01:00" }, // before leap day
|
||||
{ "2016-02-29T01:00:00.000+01:00" }, // leap day
|
||||
{ "2016-03-01T00:59:59.999+01:00" }, // leap day
|
||||
{ "2016-03-01T01:00:00.000+01:00" }, // after leap day
|
||||
};
|
||||
}
|
||||
// dates with non-UTC timezones and leap days
|
||||
{ "2016-02-29T00:59:59.999+01:00" }, // before leap day
|
||||
{ "2016-02-29T01:00:00.000+01:00" }, // leap day
|
||||
{ "2016-03-01T00:59:59.999+01:00" }, // leap day
|
||||
{ "2016-03-01T01:00:00.000+01:00" }, // after leap day
|
||||
};
|
||||
}
|
||||
|
||||
@Test(dataProvider = "providerDateToTimestamp")
|
||||
public void testDateToTimestamp(final String date) {
|
||||
@Test(dataProvider = "providerDateToTimestamp")
|
||||
public void testDateToTimestamp(final String date) {
|
||||
|
||||
final long actualEpochMilli = new FastISODateParser().parseAsEpochMilli(date);
|
||||
final long actualEpochMilli = new FastISODateParser().parseAsEpochMilli(date);
|
||||
|
||||
final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date));
|
||||
final long expectedEpochMilli = expectedDate.toInstant().toEpochMilli();
|
||||
Assert.assertEquals(actualEpochMilli, expectedEpochMilli);
|
||||
}
|
||||
final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date));
|
||||
final long expectedEpochMilli = expectedDate.toInstant().toEpochMilli();
|
||||
Assert.assertEquals(actualEpochMilli, expectedEpochMilli);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "providerDateToTimestamp")
|
||||
public void testDateToTimestampWithBytes(final String date) {
|
||||
@Test(dataProvider = "providerDateToTimestamp")
|
||||
public void testDateToTimestampWithBytes(final String date) {
|
||||
|
||||
final byte[] dateAsBytes = date.getBytes(StandardCharsets.UTF_8);
|
||||
final long actualEpochMilli = new FastISODateParser().parseAsEpochMilli(dateAsBytes, 0);
|
||||
final byte[] dateAsBytes = date.getBytes(StandardCharsets.UTF_8);
|
||||
final long actualEpochMilli = new FastISODateParser().parseAsEpochMilli(dateAsBytes, 0);
|
||||
|
||||
final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date));
|
||||
final long expectedEpochMilli = expectedDate.toInstant().toEpochMilli();
|
||||
Assert.assertEquals(actualEpochMilli, expectedEpochMilli);
|
||||
}
|
||||
final OffsetDateTime expectedDate = OffsetDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse(date));
|
||||
final long expectedEpochMilli = expectedDate.toInstant().toEpochMilli();
|
||||
Assert.assertEquals(actualEpochMilli, expectedEpochMilli);
|
||||
}
|
||||
|
||||
@Test(enabled = false)
|
||||
public void test() {
|
||||
@Test(enabled = false)
|
||||
public void test() {
|
||||
|
||||
final OffsetDateTime expectedDate = OffsetDateTime
|
||||
.from(DateTimeFormatter.ISO_DATE_TIME.parse("2016-12-31T23:00:00.000Z"));
|
||||
final OffsetDateTime expectedDate = OffsetDateTime
|
||||
.from(DateTimeFormatter.ISO_DATE_TIME.parse("2016-12-31T23:00:00.000Z"));
|
||||
|
||||
final long epochMilli = expectedDate.toInstant().toEpochMilli();
|
||||
final long epochMilli = expectedDate.toInstant().toEpochMilli();
|
||||
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
|
||||
final long timestamp = epochMilli + i * 10000;
|
||||
final OffsetDateTime date = Instant.ofEpochMilli(timestamp).atOffset(ZoneOffset.UTC);
|
||||
final long timestamp = epochMilli + i * 10000;
|
||||
final OffsetDateTime date = Instant.ofEpochMilli(timestamp).atOffset(ZoneOffset.UTC);
|
||||
|
||||
System.out.println(timestamp + " " + date.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME));
|
||||
}
|
||||
}
|
||||
System.out.println(timestamp + " " + date.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME));
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(final String[] args) throws IOException, InterruptedException {
|
||||
final Path path = Path.of("/home/andi/ws/performanceDb/data/production/dates.csv");
|
||||
public static void main(final String[] args) throws IOException, InterruptedException {
|
||||
final Path path = Path.of("/home/andi/ws/performanceDb/data/production/dates.csv");
|
||||
|
||||
final List<byte[]> dates = new ArrayList<>();
|
||||
final List<byte[]> dates = new ArrayList<>();
|
||||
|
||||
try (final BufferedReader reader = new BufferedReader(new FileReader(path.toFile(), StandardCharsets.UTF_8))) {
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
dates.add(line.getBytes());
|
||||
}
|
||||
}
|
||||
try (final BufferedReader reader = new BufferedReader(new FileReader(path.toFile(), StandardCharsets.UTF_8))) {
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
dates.add(line.getBytes());
|
||||
}
|
||||
}
|
||||
|
||||
for (int i = 0; i < 20; i++) {
|
||||
for (int i = 0; i < 20; i++) {
|
||||
|
||||
System.gc();
|
||||
TimeUnit.MILLISECONDS.sleep(100);
|
||||
System.gc();
|
||||
TimeUnit.MILLISECONDS.sleep(100);
|
||||
System.gc();
|
||||
TimeUnit.MILLISECONDS.sleep(100);
|
||||
System.gc();
|
||||
System.gc();
|
||||
TimeUnit.MILLISECONDS.sleep(100);
|
||||
System.gc();
|
||||
TimeUnit.MILLISECONDS.sleep(100);
|
||||
System.gc();
|
||||
TimeUnit.MILLISECONDS.sleep(100);
|
||||
System.gc();
|
||||
|
||||
TimeUnit.SECONDS.sleep(1);
|
||||
TimeUnit.SECONDS.sleep(1);
|
||||
|
||||
final long start = System.nanoTime();
|
||||
final FastISODateParser fastISODateParser = new FastISODateParser();
|
||||
final long start = System.nanoTime();
|
||||
final FastISODateParser fastISODateParser = new FastISODateParser();
|
||||
|
||||
for (final byte[] date : dates) {
|
||||
fastISODateParser.parseAsEpochMilli(date, 0);
|
||||
// final long timestamp =
|
||||
// fastISODateParser.parse(date).toInstant().toEpochMilli();
|
||||
for (final byte[] date : dates) {
|
||||
fastISODateParser.parseAsEpochMilli(date, 0);
|
||||
// final long timestamp =
|
||||
// fastISODateParser.parse(date).toInstant().toEpochMilli();
|
||||
// final long timestamp = OffsetDateTime.parse(date, DateTimeFormatter.ISO_OFFSET_DATE_TIME)
|
||||
// .toInstant().toEpochMilli();
|
||||
// sum += timestamp;
|
||||
}
|
||||
// sum += timestamp;
|
||||
}
|
||||
|
||||
final double millis = (System.nanoTime() - start) / 1_000_000.0;
|
||||
final long datesPerSecond = (long) (dates.size() / (millis / 1000.0));
|
||||
System.out.println("duration: " + millis + "ms ; speed: " + datesPerSecond + " dates/s");
|
||||
}
|
||||
}
|
||||
final double millis = (System.nanoTime() - start) / 1_000_000.0;
|
||||
final long datesPerSecond = (long) (dates.size() / (millis / 1000.0));
|
||||
System.out.println("duration: " + millis + "ms ; speed: " + datesPerSecond + " dates/s");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,50 +11,50 @@ import org.testng.annotations.Test;
|
||||
|
||||
@Test
|
||||
public class DataSeriesStatsTest {
|
||||
@DataProvider
|
||||
public Object[][] providerAverage() {
|
||||
final List<Object[]> result = new ArrayList<>();
|
||||
@DataProvider
|
||||
public Object[][] providerAverage() {
|
||||
final List<Object[]> result = new ArrayList<>();
|
||||
|
||||
{
|
||||
final List<DataSeriesStats> stats = Arrays.asList(//
|
||||
new DataSeriesStats(10, 0, 0, 5.0)//
|
||||
);
|
||||
final double expected = 5.0;
|
||||
result.add(new Object[] { stats, expected });
|
||||
}
|
||||
{
|
||||
final List<DataSeriesStats> stats = Arrays.asList(//
|
||||
new DataSeriesStats(0, 0, 0, 5.0)//
|
||||
);
|
||||
final double expected = 0.0; // no values
|
||||
result.add(new Object[] { stats, expected });
|
||||
}
|
||||
{
|
||||
final List<DataSeriesStats> stats = Arrays.asList(//
|
||||
new DataSeriesStats(10, 0, 0, 5.0)//
|
||||
);
|
||||
final double expected = 5.0;
|
||||
result.add(new Object[] { stats, expected });
|
||||
}
|
||||
{
|
||||
final List<DataSeriesStats> stats = Arrays.asList(//
|
||||
new DataSeriesStats(0, 0, 0, 5.0)//
|
||||
);
|
||||
final double expected = 0.0; // no values
|
||||
result.add(new Object[] { stats, expected });
|
||||
}
|
||||
|
||||
{
|
||||
final List<DataSeriesStats> stats = Arrays.asList(//
|
||||
new DataSeriesStats(10, 0, 0, 5.0), //
|
||||
new DataSeriesStats(40, 0, 0, 1.0)//
|
||||
);
|
||||
final double expected = 1.8; // 90 / 50
|
||||
result.add(new Object[] { stats, expected });
|
||||
}
|
||||
{
|
||||
final List<DataSeriesStats> stats = Arrays.asList(//
|
||||
new DataSeriesStats(5, 0, 0, 7.0), //
|
||||
new DataSeriesStats(0, 0, 0, 5.0), // // no values
|
||||
new DataSeriesStats(20, 0, 0, 2.0)//
|
||||
);
|
||||
final double expected = 3.0; // (35+40) / 25
|
||||
result.add(new Object[] { stats, expected });
|
||||
}
|
||||
{
|
||||
final List<DataSeriesStats> stats = Arrays.asList(//
|
||||
new DataSeriesStats(10, 0, 0, 5.0), //
|
||||
new DataSeriesStats(40, 0, 0, 1.0)//
|
||||
);
|
||||
final double expected = 1.8; // 90 / 50
|
||||
result.add(new Object[] { stats, expected });
|
||||
}
|
||||
{
|
||||
final List<DataSeriesStats> stats = Arrays.asList(//
|
||||
new DataSeriesStats(5, 0, 0, 7.0), //
|
||||
new DataSeriesStats(0, 0, 0, 5.0), // // no values
|
||||
new DataSeriesStats(20, 0, 0, 2.0)//
|
||||
);
|
||||
final double expected = 3.0; // (35+40) / 25
|
||||
result.add(new Object[] { stats, expected });
|
||||
}
|
||||
|
||||
return result.toArray(new Object[0][]);
|
||||
}
|
||||
return result.toArray(new Object[0][]);
|
||||
}
|
||||
|
||||
@Test(dataProvider = "providerAverage")
|
||||
public void testAverage(final Collection<DataSeriesStats> stats, final double expected) {
|
||||
@Test(dataProvider = "providerAverage")
|
||||
public void testAverage(final Collection<DataSeriesStats> stats, final double expected) {
|
||||
|
||||
final double actual = DataSeriesStats.average(stats);
|
||||
Assert.assertEquals(actual, expected, 0.01);
|
||||
}
|
||||
final double actual = DataSeriesStats.average(stats);
|
||||
Assert.assertEquals(actual, expected, 0.01);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user