serialize the ingestion of logfiles

performance improvement of roughly 40%
This commit is contained in:
2020-11-27 19:49:22 +01:00
parent 08111e0d69
commit c6d7f97628

View File

@@ -42,11 +42,18 @@ public class CsvUploadHandler implements PropertyKeys, DisposableBean {
try { try {
final ArrayBlockingQueue<Entries> queue = performanceDb.getQueue(); final ArrayBlockingQueue<Entries> queue = performanceDb.getQueue();
for (final MultipartFile file : files) { for (final MultipartFile file : files) {
final CsvToEntryTransformer csvToEntryTransformer = new CsvToEntryTransformer(queue, settings);
try (InputStream in = file.getInputStream()) { // insert one file at a time. This improves performance, because the likelihood
csvToEntryTransformer.readCSV(in); // of values having the same tags is greatly increased. In my tests this
} catch (final Exception e) { // improved the
LOGGER.error("csv ingestion failed", e); // ingestion performance fom 1.1m to 1.55m values per second on average
synchronized (this) {
final CsvToEntryTransformer csvToEntryTransformer = new CsvToEntryTransformer(queue, settings);
try (InputStream in = file.getInputStream()) {
csvToEntryTransformer.readCSV(in);
} catch (final Exception e) {
LOGGER.error("csv ingestion failed", e);
}
} }
} }
} catch (final RuntimeException e) { } catch (final RuntimeException e) {