serialize the ingestion of logfiles
performance improvement of roughly 40%
This commit is contained in:
@@ -42,6 +42,12 @@ public class CsvUploadHandler implements PropertyKeys, DisposableBean {
|
|||||||
try {
|
try {
|
||||||
final ArrayBlockingQueue<Entries> queue = performanceDb.getQueue();
|
final ArrayBlockingQueue<Entries> queue = performanceDb.getQueue();
|
||||||
for (final MultipartFile file : files) {
|
for (final MultipartFile file : files) {
|
||||||
|
|
||||||
|
// insert one file at a time. This improves performance, because the likelihood
|
||||||
|
// of values having the same tags is greatly increased. In my tests this
|
||||||
|
// improved the
|
||||||
|
// ingestion performance fom 1.1m to 1.55m values per second on average
|
||||||
|
synchronized (this) {
|
||||||
final CsvToEntryTransformer csvToEntryTransformer = new CsvToEntryTransformer(queue, settings);
|
final CsvToEntryTransformer csvToEntryTransformer = new CsvToEntryTransformer(queue, settings);
|
||||||
try (InputStream in = file.getInputStream()) {
|
try (InputStream in = file.getInputStream()) {
|
||||||
csvToEntryTransformer.readCSV(in);
|
csvToEntryTransformer.readCSV(in);
|
||||||
@@ -49,6 +55,7 @@ public class CsvUploadHandler implements PropertyKeys, DisposableBean {
|
|||||||
LOGGER.error("csv ingestion failed", e);
|
LOGGER.error("csv ingestion failed", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
} catch (final RuntimeException e) {
|
} catch (final RuntimeException e) {
|
||||||
FileUtils.deleteSilently(tmpFiles);
|
FileUtils.deleteSilently(tmpFiles);
|
||||||
throw e;
|
throw e;
|
||||||
|
|||||||
Reference in New Issue
Block a user