send CSV file via REST

This commit is contained in:
2019-12-08 18:39:34 +01:00
parent f1ef13c1de
commit 85679ca0c8
12 changed files with 334 additions and 48 deletions

View File

@@ -0,0 +1,87 @@
package org.lucares.pdbui;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.lucares.pdb.api.Entries;
import org.lucares.performance.db.PerformanceDb;
import org.lucares.utils.file.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.web.multipart.MultipartFile;
@Component
public class CsvUploadHandler implements PropertyKeys, DisposableBean {
private static final Logger LOGGER = LoggerFactory.getLogger(CsvUploadHandler.class);
private final Path tmpDir;
private final ExecutorService threadPool = Executors.newFixedThreadPool(2);
private final PerformanceDb performanceDb;
public CsvUploadHandler(@Value("${" + TMP_DIR + "}") final String tmpDir, final PerformanceDb performanceDb)
throws IOException {
this.tmpDir = Paths.get(tmpDir).resolve("uploads");
Files.createDirectories(this.tmpDir);
this.performanceDb = performanceDb;
}
public void ingest(final List<MultipartFile> files, final CsvReaderSettings settings)
throws IllegalStateException, IOException {
final List<Path> tmpFiles = new ArrayList<Path>();
try {
for (final MultipartFile file : files) {
final Path tmpFile = tmpDir.resolve(UUID.randomUUID().toString());
tmpFiles.add(tmpFile);
LOGGER.info("writing uploaded file to {}", tmpFile);
file.transferTo(tmpFile);
}
} catch (RuntimeException | IOException e) {
FileUtils.deleteSilently(tmpFiles);
throw e;
}
threadPool.submit(() -> {
final ArrayBlockingQueue<Entries> queue = performanceDb.getQueue();
for (final Path tmpFile : tmpFiles) {
try {
final CsvToEntryTransformer csvToEntryTransformer = new CsvToEntryTransformer(queue, settings);
try (FileInputStream in = new FileInputStream(tmpFile.toFile())) {
csvToEntryTransformer.readCSV(in);
}
LOGGER.info("delete uploaded file {}", tmpFile);
Files.delete(tmpFile);
} catch (final Exception e) {
LOGGER.error("csv ingestion failed", e);
}
}
queue.add(Entries.POISON);
});
}
@Override
public void destroy() throws Exception {
threadPool.shutdown();
LOGGER.info("awaiting termination ...");
threadPool.awaitTermination(10, TimeUnit.MINUTES);
}
}

View File

@@ -42,17 +42,21 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.util.StreamUtils;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody;
import com.fasterxml.jackson.core.JsonParseException;
@@ -83,9 +87,12 @@ public class PdbController implements HardcodedValues, PropertyKeys {
@Value("${" + DEFAULTS_SPLIT_BY + ":}")
private String defaultsSplitBy;
public PdbController(final PerformanceDb db, final Plotter plotter) {
private final CsvUploadHandler csvUploadHandler;
public PdbController(final PerformanceDb db, final Plotter plotter, final CsvUploadHandler csvUploadHandler) {
this.db = db;
this.plotter = plotter;
this.csvUploadHandler = csvUploadHandler;
}
@RequestMapping(path = "/plots", //
@@ -315,4 +322,15 @@ public class PdbController implements HardcodedValues, PropertyKeys {
return result;
}
@PostMapping(path = "/data")
@ResponseBody
@ResponseStatus(code = HttpStatus.CREATED)
public String handleCsvFileUpload(@RequestParam("file") final MultipartFile[] files)
throws IllegalStateException, IOException {
csvUploadHandler.ingest(List.of(files), new CsvReaderSettings(','));
return ""; // return value might become a job id that can be used to cancel, or observe
// status
}
}

View File

@@ -14,7 +14,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.PreDestroy;
import org.lucares.pdb.api.Entries;
import org.lucares.performance.db.BlockingQueueIterator;
import org.lucares.performance.db.PerformanceDb;
import org.lucares.recommind.logs.Config;
import org.slf4j.Logger;
@@ -32,7 +31,7 @@ public class TcpIngestor implements Ingestor, AutoCloseable, DisposableBean {
private final AtomicBoolean acceptNewConnections = new AtomicBoolean(true);
private final ExecutorService serverThreadPool = Executors.newFixedThreadPool(2);
private final ExecutorService serverThreadPool = Executors.newFixedThreadPool(1);
private final ExecutorService workerThreadPool = Executors.newCachedThreadPool();
@@ -57,27 +56,10 @@ public class TcpIngestor implements Ingestor, AutoCloseable, DisposableBean {
@Override
public void start() throws Exception {
final ArrayBlockingQueue<Entries> queue = new ArrayBlockingQueue<>(10);
serverThreadPool.submit(() -> {
Thread.currentThread().setName("db-ingestion");
boolean finished = false;
while (!finished) {
try {
db.putEntries(new BlockingQueueIterator<>(queue, Entries.POISON));
finished = true;
} catch (final Exception e) {
LOGGER.warn("Write to database failed. Will retry with the next element.", e);
}
}
return null;
});
serverThreadPool.submit(() -> listen(queue));
serverThreadPool.submit(() -> listen());
}
private Void listen(final ArrayBlockingQueue<Entries> queue) throws IOException {
private Void listen() throws IOException {
Thread.currentThread().setName("socket-listener");
try (ServerSocket serverSocket = new ServerSocket(PORT);) {
LOGGER.info("listening on port " + PORT);
@@ -89,6 +71,7 @@ public class TcpIngestor implements Ingestor, AutoCloseable, DisposableBean {
final Socket clientSocket = serverSocket.accept();
LOGGER.debug("accepted connection: " + clientSocket.getRemoteSocketAddress());
final ArrayBlockingQueue<Entries> queue = db.getQueue();
workerThreadPool.submit(new IngestionHandler(clientSocket, queue));
LOGGER.debug("handler submitted");
} catch (final SocketTimeoutException e) {
@@ -109,12 +92,13 @@ public class TcpIngestor implements Ingestor, AutoCloseable, DisposableBean {
} catch (final InterruptedException e) {
Thread.interrupted();
}
LOGGER.debug("adding poison");
queue.put(Entries.POISON);
} catch (final InterruptedException e) {
LOGGER.info("Listener thread interrupted. Likely while adding the poison. "
+ "That would mean that the db-ingestion thread will not terminate. ");
Thread.interrupted();
// LOGGER.debug("adding poison");
// final ArrayBlockingQueue<Entries> queue = db.getQueue();
// queue.put(Entries.POISON);
// } catch (final InterruptedException e) {
// LOGGER.info("Listener thread interrupted. Likely while adding the poison. "
// + "That would mean that the db-ingestion thread will not terminate. ");
// Thread.interrupted();
} catch (final Exception e) {
LOGGER.error("", e);
throw e;
@@ -138,7 +122,6 @@ public class TcpIngestor implements Ingestor, AutoCloseable, DisposableBean {
} catch (final InterruptedException e) {
Thread.interrupted();
}
LOGGER.debug("closing database");
db.close();
LOGGER.info("destroyed");
}