diff --git a/build.gradle b/build.gradle
index d10b2c4..907340b 100644
--- a/build.gradle
+++ b/build.gradle
@@ -31,7 +31,11 @@ ext {
lib_commons_collections4 = 'org.apache.commons:commons-collections4:4.4'
lib_commons_lang3 = 'org.apache.commons:commons-lang3:3.10'
+<<<<<<< HEAD
lib_jackson_databind = 'com.fasterxml.jackson.core:jackson-databind:2.11.2'
+=======
+ lib_jackson_databind = 'com.fasterxml.jackson.core:jackson-databind:2.11.0'
+>>>>>>> 668f86323eb4efa2f2c5a016e8785958517af07b
lib_log4j2_core = "org.apache.logging.log4j:log4j-core:${version_log4j2}"
lib_log4j2_slf4j_impl = "org.apache.logging.log4j:log4j-slf4j-impl:${version_log4j2}"
@@ -124,5 +128,9 @@ subprojects {
}
wrapper {
+<<<<<<< HEAD
gradleVersion = '6.6'
+=======
+ gradleVersion = '6.4'
+>>>>>>> 668f86323eb4efa2f2c5a016e8785958517af07b
}
diff --git a/pdb-js/src/app/app.component.html b/pdb-js/src/app/app.component.html
index 9f674fa..f95b614 100644
--- a/pdb-js/src/app/app.component.html
+++ b/pdb-js/src/app/app.component.html
@@ -1,21 +1,8 @@
-
-
-
-
\ No newline at end of file
+
diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/CsvReaderSettings.java b/pdb-ui/src/main/java/org/lucares/pdbui/CsvReaderSettings.java
index 2c1f727..1657978 100644
--- a/pdb-ui/src/main/java/org/lucares/pdbui/CsvReaderSettings.java
+++ b/pdb-ui/src/main/java/org/lucares/pdbui/CsvReaderSettings.java
@@ -1,5 +1,6 @@
package org.lucares.pdbui;
+import java.nio.charset.StandardCharsets;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
@@ -38,51 +39,51 @@ public final class CsvReaderSettings {
}
public static final class ColumnDefinitions {
- Map columnDefinitions = new HashMap<>();
+ Map columns = new HashMap<>();
- public Map getColumnDefinitions() {
- return columnDefinitions;
+ public Map getColumns() {
+ return columns;
}
- public void setColumnDefinitions(final Map columnDefinitions) {
- this.columnDefinitions = columnDefinitions;
+ public void setColumns(final Map columnDefinitions) {
+ this.columns = columnDefinitions;
}
public void ignoreColumn(final String csvColumnHeader) {
- columnDefinitions.putIfAbsent(csvColumnHeader, new ColumnDefinition());
- columnDefinitions.get(csvColumnHeader).setIgnore(true);
+ columns.putIfAbsent(csvColumnHeader, new ColumnDefinition());
+ columns.get(csvColumnHeader).setIgnore(true);
}
public void rename(final String csvColumnHeader, final String renameTo) {
- columnDefinitions.putIfAbsent(csvColumnHeader, new ColumnDefinition());
- columnDefinitions.get(csvColumnHeader).setRenameTo(renameTo);
+ columns.putIfAbsent(csvColumnHeader, new ColumnDefinition());
+ columns.get(csvColumnHeader).setRenameTo(renameTo);
}
public void postProcess(final String csvColumnHeader, final EnumSet postProcessors) {
- columnDefinitions.putIfAbsent(csvColumnHeader, new ColumnDefinition());
- columnDefinitions.get(csvColumnHeader).setPostProcessors(postProcessors);
+ columns.putIfAbsent(csvColumnHeader, new ColumnDefinition());
+ columns.get(csvColumnHeader).setPostProcessors(postProcessors);
}
public boolean isIgnoredColumn(final String csvColumnHeader) {
- return columnDefinitions.getOrDefault(csvColumnHeader, new ColumnDefinition()).isIgnore();
+ return columns.getOrDefault(csvColumnHeader, new ColumnDefinition()).isIgnore();
}
public String getRenameTo(final String csvColumnHeader) {
- return columnDefinitions.getOrDefault(csvColumnHeader, new ColumnDefinition()).getRenameTo();
+ return columns.getOrDefault(csvColumnHeader, new ColumnDefinition()).getRenameTo();
}
public EnumSet getPostProcessors(final String csvColumnHeader) {
- return columnDefinitions.getOrDefault(csvColumnHeader, new ColumnDefinition()).getPostProcessors();
+ return columns.getOrDefault(csvColumnHeader, new ColumnDefinition()).getPostProcessors();
}
@Override
public String toString() {
final StringBuilder result = new StringBuilder();
- for (final String col : columnDefinitions.keySet()) {
+ for (final String col : columns.keySet()) {
result.append(col);
result.append(":");
- result.append(columnDefinitions.get(col));
+ result.append(columns.get(col));
result.append("\n");
}
@@ -145,7 +146,7 @@ public final class CsvReaderSettings {
}
}
- private byte separator;
+ private String separator;
private ColumnDefinitions columnDefinitions = new ColumnDefinitions();
@@ -155,13 +156,13 @@ public final class CsvReaderSettings {
private String valueColumn;
- private byte comment = '#';
+ private String comment = "#";
public CsvReaderSettings() {
- this("@timestamp", "duration", (byte) ',', new ColumnDefinitions());
+ this("@timestamp", "duration", ",", new ColumnDefinitions());
}
- private CsvReaderSettings(final String timeColumn, final String valueColumn, final byte separator,
+ private CsvReaderSettings(final String timeColumn, final String valueColumn, final String separator,
final ColumnDefinitions columnDefinitions) {
this.timeColumn = timeColumn;
@@ -170,13 +171,13 @@ public final class CsvReaderSettings {
this.columnDefinitions = columnDefinitions;
}
- public static CsvReaderSettings create(final String timeColumn, final String valueColumn, final char separator,
+ public static CsvReaderSettings create(final String timeColumn, final String valueColumn, final String separator,
final ColumnDefinitions columnDefinitions) {
- Preconditions.checkTrue(separator == (byte) separator,
+ Preconditions.checkTrue(separator.getBytes(StandardCharsets.UTF_8).length == 1,
"Only separators that fulfill separator == (byte)separator are supported. "
+ "This restriction is because the parsing algorithm skips the overhead of "
+ "translating bytes to characters.");
- return new CsvReaderSettings(timeColumn, valueColumn, (byte) separator, columnDefinitions);
+ return new CsvReaderSettings(timeColumn, valueColumn, separator, columnDefinitions);
}
public String getTimeColumn() {
@@ -195,22 +196,36 @@ public final class CsvReaderSettings {
this.valueColumn = valueColumn;
}
- public byte getSeparator() {
+ public String getSeparator() {
return separator;
}
- public void setSeparator(final byte separator) {
+ public void setSeparator(final String separator) {
this.separator = separator;
}
- public byte getComment() {
+ public byte separatorByte() {
+ final byte[] bytes = separator.getBytes(StandardCharsets.UTF_8);
+ Preconditions.checkEqual(bytes.length, 1,
+ "separator must be a character that is represented as a single byte in UTF-8");
+ return bytes[0];
+ }
+
+ public String getComment() {
return comment;
}
- public void setComment(final byte comment) {
+ public void setComment(final String comment) {
this.comment = comment;
}
+ public byte commentByte() {
+ final byte[] bytes = comment.getBytes(StandardCharsets.UTF_8);
+ Preconditions.checkEqual(bytes.length, 1,
+ "comment must be a character that is represented as a single byte in UTF-8");
+ return bytes[0];
+ }
+
public void putAdditionalTag(final String field, final String value) {
additionalTags.put(field, value);
}
diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/CsvToEntryTransformer.java b/pdb-ui/src/main/java/org/lucares/pdbui/CsvToEntryTransformer.java
index fde4987..2521781 100644
--- a/pdb-ui/src/main/java/org/lucares/pdbui/CsvToEntryTransformer.java
+++ b/pdb-ui/src/main/java/org/lucares/pdbui/CsvToEntryTransformer.java
@@ -46,8 +46,8 @@ class CsvToEntryTransformer {
Entries entries = new Entries(chunksize);
final byte newline = '\n';
- final byte separator = settings.getSeparator();
- final byte comment = settings.getComment();
+ final byte separator = settings.separatorByte();
+ final byte comment = settings.commentByte();
final byte[] line = new byte[64 * 1024]; // max line length
int offsetInLine = 0;
int offsetInBuffer = 0;
diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/CsvUploadHandler.java b/pdb-ui/src/main/java/org/lucares/pdbui/CsvUploadHandler.java
index b5bfb6a..2d5e464 100644
--- a/pdb-ui/src/main/java/org/lucares/pdbui/CsvUploadHandler.java
+++ b/pdb-ui/src/main/java/org/lucares/pdbui/CsvUploadHandler.java
@@ -54,7 +54,7 @@ public class CsvUploadHandler implements PropertyKeys, DisposableBean {
for (final MultipartFile file : files) {
final Path tmpFile = tmpDir.resolve(UUID.randomUUID().toString());
tmpFiles.add(tmpFile);
- LOGGER.info("writing uploaded file to {}", tmpFile);
+ LOGGER.debug("writing uploaded file to {}", tmpFile);
file.transferTo(tmpFile);
}
} catch (RuntimeException | IOException e) {
@@ -71,7 +71,7 @@ public class CsvUploadHandler implements PropertyKeys, DisposableBean {
csvToEntryTransformer.readCSV(in);
}
- LOGGER.info("delete uploaded file {}", tmpFile);
+ LOGGER.debug("delete uploaded file {}", tmpFile);
Files.delete(tmpFile);
} catch (final Exception e) {
diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/IngestionHandler.java b/pdb-ui/src/main/java/org/lucares/pdbui/IngestionHandler.java
index c8c32d5..385f50d 100644
--- a/pdb-ui/src/main/java/org/lucares/pdbui/IngestionHandler.java
+++ b/pdb-ui/src/main/java/org/lucares/pdbui/IngestionHandler.java
@@ -68,7 +68,7 @@ public final class IngestionHandler implements Callable {
} else {
in.reset();
final CsvToEntryTransformer csvTransformer = new CsvToEntryTransformer(queue,
- CsvReaderSettings.create("@timestamp", "duration", ',', new ColumnDefinitions()));
+ CsvReaderSettings.create("@timestamp", "duration", ",", new ColumnDefinitions()));
csvTransformer.readCSV(in);
}
}
diff --git a/pdb-ui/src/main/java/org/lucares/pdbui/date/FastISODateParser.java b/pdb-ui/src/main/java/org/lucares/pdbui/date/FastISODateParser.java
index 2a79565..67b8e77 100644
--- a/pdb-ui/src/main/java/org/lucares/pdbui/date/FastISODateParser.java
+++ b/pdb-ui/src/main/java/org/lucares/pdbui/date/FastISODateParser.java
@@ -192,7 +192,11 @@ public class FastISODateParser {
final long nanos = nanosAndCharsRead[0];
final int offsetTimezone = beginIndex + 19 + nanosAndCharsRead[1];
- final long zoneOffsetMillis = date[offsetTimezone] == 'Z' ? 0 : parseZoneToMillis(date, offsetTimezone);
+ final byte firstTimeZoneChar = date[offsetTimezone];
+ final boolean isNumericTimeZone = firstTimeZoneChar >= '0' && firstTimeZoneChar <= '9'
+ || (firstTimeZoneChar == '-' || firstTimeZoneChar == '+');
+ final long zoneOffsetMillis = firstTimeZoneChar == 'Z' || !isNumericTimeZone ? 0
+ : parseZoneToMillis(date, offsetTimezone);
final int epochMilliMonthOffsetKey = (int) (year * 12 + month - 1);
final long epochMilliMonthOffset;
@@ -261,7 +265,7 @@ public class FastISODateParser {
while (i < date.length) {
final byte c = date[i];
i++;
- if (c == '.') {
+ if (c == '.' || c == ',') {
continue;
}
if (c < '0' || c > '9') {
diff --git a/pdb-ui/src/main/resources/application-testing.properties b/pdb-ui/src/main/resources/application-testing.properties
index a7307ce..545740d 100644
--- a/pdb-ui/src/main/resources/application-testing.properties
+++ b/pdb-ui/src/main/resources/application-testing.properties
@@ -1,6 +1,8 @@
-#db.base=D:/ws/pdb/dataNew
-db.base=C:/Temp/pdb/testing
+db.base=D:/ws/pdb/databases/test
server.port=17333
gnuplot.home=D:/ws/pdb/gnuplot-5.2
cache.images.duration.seconds=86400
+defaults.groupBy=pod,method,metric
+defaults.splitBy=method
+defaults.query.examples=pod=vapfinra01 and method=ViewService.findFieldView,ViewService.findFieldViewGroup;pod=vappilby01 and method=ReviewInContextController.index;pod=vapnyse001 and method=ReviewInContextController.index,ReviewController.index
mode.production=false
\ No newline at end of file
diff --git a/pdb-ui/src/test/java/org/lucares/pdbui/CsvToEntryTransformerTest.java b/pdb-ui/src/test/java/org/lucares/pdbui/CsvToEntryTransformerTest.java
index b77926e..3b970a7 100644
--- a/pdb-ui/src/test/java/org/lucares/pdbui/CsvToEntryTransformerTest.java
+++ b/pdb-ui/src/test/java/org/lucares/pdbui/CsvToEntryTransformerTest.java
@@ -50,7 +50,7 @@ public class CsvToEntryTransformerTest {
+ dateB.format(DateTimeFormatter.ISO_ZONED_DATE_TIME) + ",2,tagValue\n";
final ArrayBlockingQueue queue = db.getQueue();
- final CsvReaderSettings settings = CsvReaderSettings.create("@timestamp", "duration", ',',
+ final CsvReaderSettings settings = CsvReaderSettings.create("@timestamp", "duration", ",",
new ColumnDefinitions());
final CsvToEntryTransformer csvToEntryTransformer = new CsvToEntryTransformer(queue, settings);
csvToEntryTransformer.readCSV(new ByteArrayInputStream(csv.getBytes(StandardCharsets.UTF_8)));
@@ -92,7 +92,7 @@ public class CsvToEntryTransformerTest {
final ArrayBlockingQueue queue = db.getQueue();
final ColumnDefinitions columnDefinitions = new ColumnDefinitions();
columnDefinitions.ignoreColumn("ignoredColumn");
- final CsvReaderSettings settings = CsvReaderSettings.create("@timestamp", "duration", ',',
+ final CsvReaderSettings settings = CsvReaderSettings.create("@timestamp", "duration", ",",
columnDefinitions);
final CsvToEntryTransformer csvToEntryTransformer = new CsvToEntryTransformer(queue, settings);
csvToEntryTransformer.readCSV(new ByteArrayInputStream(csv.getBytes(StandardCharsets.UTF_8)));
diff --git a/pdb-ui/src/test/java/org/lucares/pdbui/PdbControllerTest.java b/pdb-ui/src/test/java/org/lucares/pdbui/PdbControllerTest.java
index c3083eb..01b4aff 100644
--- a/pdb-ui/src/test/java/org/lucares/pdbui/PdbControllerTest.java
+++ b/pdb-ui/src/test/java/org/lucares/pdbui/PdbControllerTest.java
@@ -66,7 +66,7 @@ public class PdbControllerTest {
final ColumnDefinitions columnDefinitions = new ColumnDefinitions();
columnDefinitions.ignoreColumn(ignoredColumn);
columnDefinitions.postProcess("tag", EnumSet.of(PostProcessors.LOWER_CASE));
- final CsvReaderSettings settings = CsvReaderSettings.create(timeColumn, valueColumn, ',', columnDefinitions);
+ final CsvReaderSettings settings = CsvReaderSettings.create(timeColumn, valueColumn, ",", columnDefinitions);
settings.putAdditionalTag(additionalColumn, additionalValue);
uploadCsv(settings, csv);
{
diff --git a/pdb-ui/src/test/java/org/lucares/pdbui/date/FastISODateParserTest.java b/pdb-ui/src/test/java/org/lucares/pdbui/date/FastISODateParserTest.java
index a6161d4..2804e58 100644
--- a/pdb-ui/src/test/java/org/lucares/pdbui/date/FastISODateParserTest.java
+++ b/pdb-ui/src/test/java/org/lucares/pdbui/date/FastISODateParserTest.java
@@ -12,10 +12,10 @@ import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;
+import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
-import org.junit.jupiter.api.Assertions;
public class FastISODateParserTest {
@@ -44,7 +44,8 @@ public class FastISODateParserTest {
Arguments.of("2018-11-18T14:55:49.Z"), //
Arguments.of("2018-11-18T14:55:49.-01:23"), //
Arguments.of("2018-11-18T14:55:49Z"), //
- Arguments.of("2018-11-18T14:55:49-01:23") //
+ Arguments.of("2018-11-18T14:55:49-01:23"), //
+ Arguments.of("2018-11-18 14:55:49,123") //
);
}
@@ -187,7 +188,7 @@ public class FastISODateParserTest {
Assertions.assertEquals(expectedEpochMilli, actualEpochMilli, "date: " + date);
}
- public static void main(final String[] args) throws IOException, InterruptedException {
+ public static void main2(final String[] args) throws IOException, InterruptedException {
final Path path = Path.of("/home/andi/ws/performanceDb/data/production/dates.csv");
final List dates = new ArrayList<>();
@@ -228,4 +229,8 @@ public class FastISODateParserTest {
System.out.println("duration: " + millis + "ms ; speed: " + datesPerSecond + " dates/s");
}
}
+
+ public static void main(final String[] args) {
+ System.out.println(new FastISODateParser().parse("2018-11-18 14:55:49,123"));
+ }
}
diff --git a/performanceDb/src/main/java/org/lucares/performance/db/PerformanceDb.java b/performanceDb/src/main/java/org/lucares/performance/db/PerformanceDb.java
index 3b9e0c5..00e8932 100644
--- a/performanceDb/src/main/java/org/lucares/performance/db/PerformanceDb.java
+++ b/performanceDb/src/main/java/org/lucares/performance/db/PerformanceDb.java
@@ -137,8 +137,10 @@ public class PerformanceDb implements AutoCloseable {
}
if (entries.isForceFlush()) {
- LOGGER.info("flush triggered via entries.isForceFlush()");
+ LOGGER.debug("flush triggered via entries.isForceFlush()");
+ final long start = System.nanoTime();
dataStore.flush();
+ LOGGER.debug("flush duration: {}ms", (System.nanoTime() - start) / 1_000_000.0);
entries.notifyFlushed();
}
}
@@ -147,8 +149,9 @@ public class PerformanceDb implements AutoCloseable {
throw new WriteException(e);
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
- LOGGER.info("Thread was interrupted. Aborting exectution.");
+ LOGGER.info("Thread was interrupted. Aborting execution.");
} finally {
+ LOGGER.info("flush after inserting all data");
dataStore.flush();
}
}