serialize the ingestion of logfiles
performance improvement of roughly 40%
This commit is contained in:
@@ -42,6 +42,12 @@ public class CsvUploadHandler implements PropertyKeys, DisposableBean {
|
||||
try {
|
||||
final ArrayBlockingQueue<Entries> queue = performanceDb.getQueue();
|
||||
for (final MultipartFile file : files) {
|
||||
|
||||
// insert one file at a time. This improves performance, because the likelihood
|
||||
// of values having the same tags is greatly increased. In my tests this
|
||||
// improved the
|
||||
// ingestion performance fom 1.1m to 1.55m values per second on average
|
||||
synchronized (this) {
|
||||
final CsvToEntryTransformer csvToEntryTransformer = new CsvToEntryTransformer(queue, settings);
|
||||
try (InputStream in = file.getInputStream()) {
|
||||
csvToEntryTransformer.readCSV(in);
|
||||
@@ -49,6 +55,7 @@ public class CsvUploadHandler implements PropertyKeys, DisposableBean {
|
||||
LOGGER.error("csv ingestion failed", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (final RuntimeException e) {
|
||||
FileUtils.deleteSilently(tmpFiles);
|
||||
throw e;
|
||||
|
||||
Reference in New Issue
Block a user