apply new code formatter and save action

This commit is contained in:
2019-11-24 10:20:43 +01:00
parent 5ea82c6a4c
commit 06b379494f
184 changed files with 13455 additions and 13489 deletions

View File

@@ -6,17 +6,17 @@ import org.springframework.web.bind.annotation.ResponseStatus;
@ResponseStatus(value = HttpStatus.BAD_REQUEST)
public class BadRequest extends RuntimeException {
private static final long serialVersionUID = 694206253376122420L;
private static final long serialVersionUID = 694206253376122420L;
public BadRequest(final String message, final Throwable cause) {
super(message, cause);
}
public BadRequest(final String message, final Throwable cause) {
super(message, cause);
}
public BadRequest(final String message) {
super(message);
}
public BadRequest(final String message) {
super(message);
}
public BadRequest(final Throwable cause) {
super(cause);
}
public BadRequest(final Throwable cause) {
super(cause);
}
}

View File

@@ -21,74 +21,74 @@ import org.springframework.stereotype.Component;
@Component
public class CleanupThread implements DisposableBean, PropertyKeys {
private static final Logger LOGGER = LoggerFactory.getLogger(CleanupThread.class);
private static final Logger LOGGER = LoggerFactory.getLogger(CleanupThread.class);
private static final class RemoveTempFiles implements Runnable {
private static final class RemoveTempFiles implements Runnable {
private final Path outputPath;
private final int cacheDurationInSeconds;
private final Path outputPath;
private final int cacheDurationInSeconds;
public RemoveTempFiles(final Path outputPath, final int cacheDurationInSeconds) {
this.outputPath = outputPath;
this.cacheDurationInSeconds = cacheDurationInSeconds;
}
public RemoveTempFiles(final Path outputPath, final int cacheDurationInSeconds) {
this.outputPath = outputPath;
this.cacheDurationInSeconds = cacheDurationInSeconds;
}
@Override
public void run() {
@Override
public void run() {
try {
Files.walk(outputPath)//
.filter(Files::isRegularFile)//
.filter(this::isStale)//
.forEach(RemoveTempFiles::delete);
} catch (final IOException | RuntimeException e) {
LOGGER.warn("failed to walk " + outputPath + ". Cannot delete stale files", e);
}
}
try {
Files.walk(outputPath)//
.filter(Files::isRegularFile)//
.filter(this::isStale)//
.forEach(RemoveTempFiles::delete);
} catch (final IOException | RuntimeException e) {
LOGGER.warn("failed to walk " + outputPath + ". Cannot delete stale files", e);
}
}
private static void delete(final Path path) {
try {
LOGGER.debug("deleting stale file: " + path);
Files.delete(path);
} catch (final IOException e) {
LOGGER.warn("failed to delete stale file " + path, e);
}
}
private static void delete(final Path path) {
try {
LOGGER.debug("deleting stale file: " + path);
Files.delete(path);
} catch (final IOException e) {
LOGGER.warn("failed to delete stale file " + path, e);
}
}
private boolean isStale(final Path path) {
final Instant maxAge = Instant.now().minusSeconds(cacheDurationInSeconds);
try {
final FileTime lastModifiedTime = Files.getLastModifiedTime(path);
final Instant lastModifiedInstant = lastModifiedTime.toInstant();
return lastModifiedInstant.compareTo(maxAge) < 0;
} catch (final IOException e) {
LOGGER.warn("failed to get last modified time of " + path + ". Considering this file as stale.", e);
return true;
}
}
private boolean isStale(final Path path) {
final Instant maxAge = Instant.now().minusSeconds(cacheDurationInSeconds);
try {
final FileTime lastModifiedTime = Files.getLastModifiedTime(path);
final Instant lastModifiedInstant = lastModifiedTime.toInstant();
return lastModifiedInstant.compareTo(maxAge) < 0;
} catch (final IOException e) {
LOGGER.warn("failed to get last modified time of " + path + ". Considering this file as stale.", e);
return true;
}
}
}
}
private final ScheduledExecutorService scheduledThreadPool;
private static final int CACHE_DURATION_IN_SECONDS = 24 * 3600;
private final ScheduledExecutorService scheduledThreadPool;
private static final int CACHE_DURATION_IN_SECONDS = 24 * 3600;
@Autowired
public CleanupThread(@Value("${" + PATH_GENERATED_IMAGES + "}") final String outputDir) {
scheduledThreadPool = Executors.newScheduledThreadPool(1, new CustomizableThreadFactory("cleanup-"));
@Autowired
public CleanupThread(@Value("${" + PATH_GENERATED_IMAGES + "}") final String outputDir) {
scheduledThreadPool = Executors.newScheduledThreadPool(1, new CustomizableThreadFactory("cleanup-"));
final Path outputPath = Paths.get(outputDir);
scheduledThreadPool.scheduleWithFixedDelay(new RemoveTempFiles(outputPath, CACHE_DURATION_IN_SECONDS), 1, 5,
TimeUnit.MINUTES);
}
final Path outputPath = Paths.get(outputDir);
scheduledThreadPool.scheduleWithFixedDelay(new RemoveTempFiles(outputPath, CACHE_DURATION_IN_SECONDS), 1, 5,
TimeUnit.MINUTES);
}
@Override
public void destroy() {
scheduledThreadPool.shutdown();
try {
scheduledThreadPool.awaitTermination(10, TimeUnit.SECONDS);
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
}
}
@Override
public void destroy() {
scheduledThreadPool.shutdown();
try {
scheduledThreadPool.awaitTermination(10, TimeUnit.SECONDS);
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}

View File

@@ -38,90 +38,90 @@ import org.slf4j.LoggerFactory;
public class CustomExportFormatToEntryTransformer {
private static final int ENTRY_BUFFER_SIZE = 100;
private static final int ENTRY_BUFFER_SIZE = 100;
private static final Logger LOGGER = LoggerFactory.getLogger(CustomExportFormatToEntryTransformer.class);
private static final Logger LOGGER = LoggerFactory.getLogger(CustomExportFormatToEntryTransformer.class);
private final Pattern splitByComma = Pattern.compile(",");
private final Pattern splitByComma = Pattern.compile(",");
private final Map<Long, Tags> tagsDictionary = new HashMap<>();
private final Map<Long, Tags> tagsDictionary = new HashMap<>();
private long lastEpochMilli;
private long lastEpochMilli;
public void read(final BufferedReader in, final ArrayBlockingQueue<Entries> queue) throws IOException {
public void read(final BufferedReader in, final ArrayBlockingQueue<Entries> queue) throws IOException {
Entries bufferedEntries = new Entries(ENTRY_BUFFER_SIZE);
Entries bufferedEntries = new Entries(ENTRY_BUFFER_SIZE);
try {
String line;
while ((line = in.readLine()) != null) {
try {
if (line.startsWith(PdbExport.MARKER_DICT_ENTRY)) {
readDictionaryEntry(line);
} else {
final Entry entry = readEntry(line);
if (entry != null) {
try {
String line;
while ((line = in.readLine()) != null) {
try {
if (line.startsWith(PdbExport.MARKER_DICT_ENTRY)) {
readDictionaryEntry(line);
} else {
final Entry entry = readEntry(line);
if (entry != null) {
bufferedEntries.add(entry);
bufferedEntries.add(entry);
if (bufferedEntries.size() == ENTRY_BUFFER_SIZE) {
queue.put(bufferedEntries);
bufferedEntries = new Entries(ENTRY_BUFFER_SIZE);
}
}
}
} catch (final Exception e) {
LOGGER.error("ignoring line '{}'", line, e);
}
queue.put(bufferedEntries);
bufferedEntries = new Entries(ENTRY_BUFFER_SIZE);
}
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
LOGGER.info("aborting because of interruption");
}
}
if (bufferedEntries.size() == ENTRY_BUFFER_SIZE) {
queue.put(bufferedEntries);
bufferedEntries = new Entries(ENTRY_BUFFER_SIZE);
}
}
}
} catch (final Exception e) {
LOGGER.error("ignoring line '{}'", line, e);
}
queue.put(bufferedEntries);
bufferedEntries = new Entries(ENTRY_BUFFER_SIZE);
}
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
LOGGER.info("aborting because of interruption");
}
}
private Entry readEntry(final String line) {
private Entry readEntry(final String line) {
final String[] timeValueTags = splitByComma.split(line);
final String[] timeValueTags = splitByComma.split(line);
final long timeDelta = Long.parseLong(timeValueTags[0]);
final long value = Long.parseLong(timeValueTags[1]);
final long tagsId = Long.parseLong(timeValueTags[2]);
final long timeDelta = Long.parseLong(timeValueTags[0]);
final long value = Long.parseLong(timeValueTags[1]);
final long tagsId = Long.parseLong(timeValueTags[2]);
lastEpochMilli = lastEpochMilli + timeDelta;
lastEpochMilli = lastEpochMilli + timeDelta;
final Tags tags = tagsDictionary.get(tagsId);
if (tags == null) {
LOGGER.info("no tags available for tagsId {}. Ignoring line '{}'", tagsId, line);
return null;
}
final Tags tags = tagsDictionary.get(tagsId);
if (tags == null) {
LOGGER.info("no tags available for tagsId {}. Ignoring line '{}'", tagsId, line);
return null;
}
return new Entry(lastEpochMilli, value, tags);
}
return new Entry(lastEpochMilli, value, tags);
}
private void readDictionaryEntry(final String line) {
final String[] tagsIdToSerializedTags = line.split(Pattern.quote(PdbExport.SEPARATOR_TAG_ID));
private void readDictionaryEntry(final String line) {
final String[] tagsIdToSerializedTags = line.split(Pattern.quote(PdbExport.SEPARATOR_TAG_ID));
final Long tagId = Long.parseLong(tagsIdToSerializedTags[0], 1, tagsIdToSerializedTags[0].length(), 10);
final Tags tags = tagsFromCsv(tagsIdToSerializedTags[1]);
tagsDictionary.put(tagId, tags);
}
final Long tagId = Long.parseLong(tagsIdToSerializedTags[0], 1, tagsIdToSerializedTags[0].length(), 10);
final Tags tags = tagsFromCsv(tagsIdToSerializedTags[1]);
tagsDictionary.put(tagId, tags);
}
public static Tags tagsFromCsv(final String line) {
public static Tags tagsFromCsv(final String line) {
final TagsBuilder tagsBuilder = new TagsBuilder();
final String[] tagsAsString = line.split(Pattern.quote(","));
final TagsBuilder tagsBuilder = new TagsBuilder();
final String[] tagsAsString = line.split(Pattern.quote(","));
for (final String tagAsString : tagsAsString) {
final String[] keyValue = tagAsString.split(Pattern.quote("="));
for (final String tagAsString : tagsAsString) {
final String[] keyValue = tagAsString.split(Pattern.quote("="));
final int key = Tags.STRING_COMPRESSOR.put(keyValue[0]);
final int value = Tags.STRING_COMPRESSOR.put(keyValue[1]);
tagsBuilder.add(key, value);
}
final int key = Tags.STRING_COMPRESSOR.put(keyValue[0]);
final int value = Tags.STRING_COMPRESSOR.put(keyValue[1]);
tagsBuilder.add(key, value);
}
return tagsBuilder.build();
}
return tagsBuilder.build();
}
}

View File

@@ -2,8 +2,8 @@ package org.lucares.pdbui;
public interface HardcodedValues {
/**
* The path for generated images relative to the context root.
*/
String WEB_IMAGE_OUTPUT_PATH = "img-generated";
/**
* The path for generated images relative to the context root.
*/
String WEB_IMAGE_OUTPUT_PATH = "img-generated";
}

View File

@@ -7,15 +7,15 @@ import org.springframework.stereotype.Component;
@Component
public class Ingestion {
private final Ingestor tcpIngestor;
private final Ingestor tcpIngestor;
public Ingestion(final Ingestor tcpIngestor) {
this.tcpIngestor = tcpIngestor;
}
public Ingestion(final Ingestor tcpIngestor) {
this.tcpIngestor = tcpIngestor;
}
@PostConstruct
public void start() throws Exception {
tcpIngestor.start();
}
@PostConstruct
public void start() throws Exception {
tcpIngestor.start();
}
}

View File

@@ -2,6 +2,6 @@ package org.lucares.pdbui;
public interface Ingestor {
void start() throws Exception;
void start() throws Exception;
}

View File

@@ -6,13 +6,13 @@ import org.springframework.web.bind.annotation.ResponseStatus;
@ResponseStatus(value = HttpStatus.INTERNAL_SERVER_ERROR, reason = "Internal Server Error")
public class InternalServerError extends RuntimeException {
private static final long serialVersionUID = 548651821080252932L;
private static final long serialVersionUID = 548651821080252932L;
public InternalServerError(final String message, final Throwable cause) {
super(message, cause);
}
public InternalServerError(final String message, final Throwable cause) {
super(message, cause);
}
public InternalServerError(final Throwable cause) {
super(cause);
}
public InternalServerError(final Throwable cause) {
super(cause);
}
}

View File

@@ -16,82 +16,82 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
public class JsonToEntryTransformer implements LineToEntryTransformer {
private static final Logger LOGGER = LoggerFactory.getLogger(JsonToEntryTransformer.class);
private static final Logger LOGGER = LoggerFactory.getLogger(JsonToEntryTransformer.class);
private final TypeReference<Map<String, Object>> typeReferenceForMap = new TypeReference<Map<String, Object>>() {
};
private final TypeReference<Map<String, Object>> typeReferenceForMap = new TypeReference<Map<String, Object>>() {
};
private final ObjectMapper objectMapper = new ObjectMapper();
private final ObjectReader objectReader = objectMapper.readerFor(typeReferenceForMap);
private final FastISODateParser fastISODateParser = new FastISODateParser();
private final ObjectMapper objectMapper = new ObjectMapper();
private final ObjectReader objectReader = objectMapper.readerFor(typeReferenceForMap);
private final FastISODateParser fastISODateParser = new FastISODateParser();
@Override
public Optional<Entry> toEntry(final String line) throws IOException {
@Override
public Optional<Entry> toEntry(final String line) throws IOException {
final Map<String, Object> object = objectReader.readValue(line);
final Map<String, Object> object = objectReader.readValue(line);
final Optional<Entry> entry = createEntry(object);
final Optional<Entry> entry = createEntry(object);
return entry;
}
return entry;
}
public Optional<Entry> createEntry(final Map<String, Object> map) {
try {
public Optional<Entry> createEntry(final Map<String, Object> map) {
try {
if (map.containsKey("duration") && map.containsKey("@timestamp")) {
final long epochMilli = getDate(map);
final long duration = (int) map.get("duration");
if (map.containsKey("duration") && map.containsKey("@timestamp")) {
final long epochMilli = getDate(map);
final long duration = (int) map.get("duration");
final Tags tags = createTags(map);
final Tags tags = createTags(map);
final Entry entry = new Entry(epochMilli, duration, tags);
return Optional.of(entry);
} else {
LOGGER.info("Skipping invalid entry: " + map);
return Optional.empty();
}
} catch (final Exception e) {
LOGGER.error("Failed to create entry from map: " + map, e);
return Optional.empty();
}
}
final Entry entry = new Entry(epochMilli, duration, tags);
return Optional.of(entry);
} else {
LOGGER.info("Skipping invalid entry: " + map);
return Optional.empty();
}
} catch (final Exception e) {
LOGGER.error("Failed to create entry from map: " + map, e);
return Optional.empty();
}
}
private Tags createTags(final Map<String, Object> map) {
final TagsBuilder tags = TagsBuilder.create();
for (final java.util.Map.Entry<String, Object> e : map.entrySet()) {
private Tags createTags(final Map<String, Object> map) {
final TagsBuilder tags = TagsBuilder.create();
for (final java.util.Map.Entry<String, Object> e : map.entrySet()) {
final String key = e.getKey();
final Object value = e.getValue();
final String key = e.getKey();
final Object value = e.getValue();
switch (key) {
case "@timestamp":
case "duration":
// these fields are not tags
break;
case "tags":
// ignore: we only support key/value tags
break;
default:
final int keyAsInt = Tags.STRING_COMPRESSOR.put(key);
final int valueAsInt;
if (value instanceof String) {
valueAsInt = Tags.STRING_COMPRESSOR.put((String) value);
} else if (value != null) {
valueAsInt = Tags.STRING_COMPRESSOR.put(String.valueOf(value));
} else {
continue;
}
tags.add(keyAsInt, valueAsInt);
break;
}
}
return tags.build();
}
switch (key) {
case "@timestamp":
case "duration":
// these fields are not tags
break;
case "tags":
// ignore: we only support key/value tags
break;
default:
final int keyAsInt = Tags.STRING_COMPRESSOR.put(key);
final int valueAsInt;
if (value instanceof String) {
valueAsInt = Tags.STRING_COMPRESSOR.put((String) value);
} else if (value != null) {
valueAsInt = Tags.STRING_COMPRESSOR.put(String.valueOf(value));
} else {
continue;
}
tags.add(keyAsInt, valueAsInt);
break;
}
}
return tags.build();
}
private long getDate(final Map<String, Object> map) {
final String timestamp = (String) map.get("@timestamp");
private long getDate(final Map<String, Object> map) {
final String timestamp = (String) map.get("@timestamp");
return fastISODateParser.parseAsEpochMilli(timestamp);
}
return fastISODateParser.parseAsEpochMilli(timestamp);
}
}

View File

@@ -6,5 +6,5 @@ import java.util.Optional;
import org.lucares.pdb.api.Entry;
public interface LineToEntryTransformer {
public Optional<Entry> toEntry(String line) throws IOException;
public Optional<Entry> toEntry(String line) throws IOException;
}

View File

@@ -18,14 +18,14 @@ import org.springframework.scheduling.annotation.EnableAsync;
@ComponentScan("org.lucares.pdbui")
public class MySpringConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(MySpringConfiguration.class);
private static final Logger LOGGER = LoggerFactory.getLogger(MySpringConfiguration.class);
@Bean
PerformanceDb performanceDb(@Value("${db.base}") final String dbBaseDir) throws IOException {
final Path dataDirectory = Paths.get(dbBaseDir);
@Bean
PerformanceDb performanceDb(@Value("${db.base}") final String dbBaseDir) throws IOException {
final Path dataDirectory = Paths.get(dbBaseDir);
LOGGER.info("using database in {}", dataDirectory.toAbsolutePath());
LOGGER.info("using database in {}", dataDirectory.toAbsolutePath());
return new PerformanceDb(dataDirectory);
}
return new PerformanceDb(dataDirectory);
}
}

View File

@@ -6,13 +6,13 @@ import org.springframework.web.bind.annotation.ResponseStatus;
@ResponseStatus(value = HttpStatus.NOT_FOUND)
public class NotFoundException extends RuntimeException {
private static final long serialVersionUID = 694206253376122420L;
private static final long serialVersionUID = 694206253376122420L;
public NotFoundException(final String message, final Throwable cause) {
super(message, cause);
}
public NotFoundException(final String message, final Throwable cause) {
super(message, cause);
}
public NotFoundException(final Throwable cause) {
super(cause);
}
public NotFoundException(final Throwable cause) {
super(cause);
}
}

View File

@@ -65,268 +65,268 @@ import com.fasterxml.jackson.databind.ObjectMapper;
@Controller
@EnableAutoConfiguration
@CrossOrigin(origins = {"http://localhost:4200", "http://127.0.0.1:4200"})
@CrossOrigin(origins = { "http://localhost:4200", "http://127.0.0.1:4200" })
public class PdbController implements HardcodedValues, PropertyKeys {
private static final Logger LOGGER = LoggerFactory.getLogger(PdbController.class);
private static final Logger LOGGER = LoggerFactory.getLogger(PdbController.class);
private final Plotter plotter;
private final PerformanceDb db;
private final Plotter plotter;
private final PerformanceDb db;
private final ReentrantLock plotterLock = new ReentrantLock();
private final ReentrantLock plotterLock = new ReentrantLock();
@Value("${" + PRODUCTION_MODE + ":true}")
private boolean modeProduction;
@Value("${"+DEFAULTS_QUERY_EXAMPLES+":}")
private String queryExamples;
@Value("${"+DEFAULTS_GROUP_BY+":}")
private String defaultsGroupBy;
@Value("${"+DEFAULTS_SPLIT_BY+":}")
private String defaultsSplitBy;
@Value("${" + PRODUCTION_MODE + ":true}")
private boolean modeProduction;
public PdbController(final PerformanceDb db, final Plotter plotter) {
this.db = db;
this.plotter = plotter;
}
@Value("${" + DEFAULTS_QUERY_EXAMPLES + ":}")
private String queryExamples;
@GetMapping("/")
public ModelAndView index() {
final String view = "main";
final Map<String, Object> model = new HashMap<>();
// model.put("oldestValue",
// LocalDateTime.now().minusDays(7).format(DATE_FORMAT_BEGIN));
// model.put("latestValue", LocalDateTime.now().format(DATE_FORMAT_END));
model.put("isProduction", modeProduction);
return new ModelAndView(view, model);
}
@Value("${" + DEFAULTS_GROUP_BY + ":}")
private String defaultsGroupBy;
@RequestMapping(path = "/plots", //
method = RequestMethod.GET, //
consumes = MediaType.APPLICATION_JSON_VALUE, //
produces = MediaType.APPLICATION_JSON_VALUE //
)
@ResponseBody
ResponseEntity<PlotResponse> createPlotGet(@RequestParam(name = "request") final String request)
throws InternalPlottingException, InterruptedException, JsonParseException, JsonMappingException,
IOException {
@Value("${" + DEFAULTS_SPLIT_BY + ":}")
private String defaultsSplitBy;
final ObjectMapper objectMapper = new ObjectMapper();
final PlotRequest plotRequest = objectMapper.readValue(request, PlotRequest.class);
return createPlot(plotRequest);
}
@RequestMapping(path = "/plots", //
method = RequestMethod.POST, //
consumes = MediaType.APPLICATION_JSON_VALUE, //
produces = MediaType.APPLICATION_JSON_VALUE //
)
@ResponseBody
ResponseEntity<PlotResponse> createPlot(@RequestBody final PlotRequest request)
throws InternalPlottingException, InterruptedException {
final PlotSettings plotSettings = PlotSettingsTransformer.toSettings(request);
if (StringUtils.isBlank(plotSettings.getQuery())) {
throw new BadRequest("The query must not be empty!");
}
// TODO the UI should cancel requests that are in flight before sending a plot
// request
if (plotterLock.tryLock(5, TimeUnit.SECONDS)) {
try {
final PlotResult result = plotter.plot(plotSettings);
final String imageUrl = WEB_IMAGE_OUTPUT_PATH + "/" + result.getImageName();
LOGGER.trace("image url: {}", imageUrl);
final String thumbnailUrl = result.getThumbnailPath() != null
? WEB_IMAGE_OUTPUT_PATH + "/" + result.getThumbnailName()
: "img/no-thumbnail.png";
final PlotResponseStats stats = PlotResponseStats.fromDataSeries(result.getDataSeries());
final PlotResponse plotResponse = new PlotResponse(stats, imageUrl, thumbnailUrl);
return ResponseEntity.ok().body(plotResponse);
} catch (final NoDataPointsException e) {
throw new NotFoundException("No data was found. Try another query, or change the date range.",e);
} finally {
plotterLock.unlock();
}
} else {
throw new ServiceUnavailableException("Too many parallel requests!");
}
}
@RequestMapping(path = "/plots", //
method = RequestMethod.GET, //
produces = MediaType.APPLICATION_OCTET_STREAM_VALUE //
)
StreamingResponseBody createPlotImage(@RequestParam(name = "query", defaultValue = "") final String query,
@RequestParam(name = "groupBy[]", defaultValue = "") final List<String> aGroupBy,
@RequestParam(name = "limitBy.number", defaultValue = "10") final int limit,
@RequestParam(name = "limitBy.selected", defaultValue = "NO_LIMIT") final Limit limitBy,
@RequestParam(name = "dateRange") final String dateRange,
@RequestParam(name = "axisScale", defaultValue = "LINEAR") final AxisScale axisScale,
@RequestParam(name = "aggregates") final EnumSet<Aggregate>aggregate,
@RequestParam(name = "keyOutside", defaultValue = "false") final boolean keyOutside,
@RequestParam(name = "width", defaultValue = "1920") final int hidth,
@RequestParam(name = "height", defaultValue = "1080") final int height) {
return (final OutputStream outputStream) -> {
if (StringUtils.isBlank(query)) {
throw new BadRequest("The query must not be empty!");
}
if (StringUtils.isBlank(dateRange)) {
throw new BadRequest("The parameter 'dateRange' must be set.");
}
final PlotSettings plotSettings = new PlotSettings();
plotSettings.setQuery(query);
plotSettings.setGroupBy(aGroupBy);
plotSettings.setHeight(height);
plotSettings.setWidth(hidth);
plotSettings.setLimit(limit);
plotSettings.setLimitBy(limitBy);
plotSettings.setDateRange(dateRange);
plotSettings.setYAxisScale(axisScale);
plotSettings.setAggregates(PlotSettingsTransformer.toAggregateInternal(plotSettings.getYRangeUnit(), plotSettings.getYAxisScale(), aggregate));
plotSettings.setKeyOutside(keyOutside);
plotSettings.setGenerateThumbnail(false);
if (plotterLock.tryLock()) {
try {
final PlotResult result = plotter.plot(plotSettings);
try (FileInputStream in = new FileInputStream(result.getImagePath().toFile())) {
StreamUtils.copy(in, outputStream);
}
} catch (final NoDataPointsException e) {
throw new NotFoundException(e);
} catch (final InternalPlottingException e) {
throw new InternalServerError(e);
} finally {
plotterLock.unlock();
}
} else {
throw new ServiceUnavailableException("Too many parallel requests!");
}
};
}
@RequestMapping(path = "/autocomplete", //
method = RequestMethod.GET, //
produces = MediaType.APPLICATION_JSON_VALUE //
)
@ResponseBody
AutocompleteResponse autocomplete(@RequestParam(name = "query") final String query,
@RequestParam(name = "caretIndex") final int caretIndex,
@RequestParam(name="resultMode", defaultValue = "CUT_AT_DOT") ResultMode resultMode) {
// TODO get date range from UI
final DateTimeRange dateRange = DateTimeRange.max();
final int zeroBasedCaretIndex = caretIndex - 1;
final QueryWithCaretMarker q = new QueryWithCaretMarker(query, dateRange, zeroBasedCaretIndex,
resultMode);
final AutocompleteResponse result = new AutocompleteResponse();
final List<Proposal> proposals = db.autocomplete(q);
if (query.trim().length() == 0) {
proposals.addAll(exampleProposals());
}
final List<AutocompleteProposal> autocompleteProposals = toAutocompleteProposals(proposals);
Collections.sort(autocompleteProposals, new AutocompleteProposalByValue());
result.setProposals(autocompleteProposals);
return result;
}
private List<Proposal> exampleProposals() {
List<Proposal> result = new ArrayList<Proposal>();
if (queryExamples.length() > 0) {
final String[] exampleQueries = queryExamples.split(Pattern.quote(";"));
for (String example : exampleQueries) {
Proposal p = new Proposal(" Example: "+example, example, true, example+" ", example.length()+1);
result.add(p);
}
public PdbController(final PerformanceDb db, final Plotter plotter) {
this.db = db;
this.plotter = plotter;
}
return result;
}
@RequestMapping(path = "/fields", //
method = RequestMethod.GET, //
//consumes = MediaType.APPLICATION_JSON_UTF8_VALUE, //
produces = MediaType.APPLICATION_JSON_VALUE //
)
@ResponseBody
List<String> fields() {
final DateTimeRange dateTimeRange = DateTimeRange.max();
final List<String> fields = db.getFields(dateTimeRange);
@GetMapping("/")
public ModelAndView index() {
final String view = "main";
final Map<String, Object> model = new HashMap<>();
// model.put("oldestValue",
// LocalDateTime.now().minusDays(7).format(DATE_FORMAT_BEGIN));
// model.put("latestValue", LocalDateTime.now().format(DATE_FORMAT_END));
model.put("isProduction", modeProduction);
return new ModelAndView(view, model);
}
fields.sort(Collator.getInstance(Locale.ENGLISH));
@RequestMapping(path = "/plots", //
method = RequestMethod.GET, //
consumes = MediaType.APPLICATION_JSON_VALUE, //
produces = MediaType.APPLICATION_JSON_VALUE //
)
@ResponseBody
ResponseEntity<PlotResponse> createPlotGet(@RequestParam(name = "request") final String request)
throws InternalPlottingException, InterruptedException, JsonParseException, JsonMappingException,
IOException {
return fields;
}
final ObjectMapper objectMapper = new ObjectMapper();
final PlotRequest plotRequest = objectMapper.readValue(request, PlotRequest.class);
@RequestMapping(path = "/fields/{fieldName}/values", //
method = RequestMethod.GET, //
consumes = MediaType.APPLICATION_JSON_VALUE, //
produces = MediaType.APPLICATION_JSON_VALUE //
)
@ResponseBody
SortedSet<String> fields(@PathVariable(name = "fieldName") final String fieldName,
@RequestParam(name = "query") final String query) {
return createPlot(plotRequest);
}
// TODO get date range from UI
final String q = query.isBlank()//
? String.format("%s = ", fieldName)//
: String.format("(%s) and %s=", query, fieldName);
final int zeroBasedCaretIndex = q.length();
final DateTimeRange dateRange = DateTimeRange.max();
final QueryWithCaretMarker autocompleteQuery = new QueryWithCaretMarker(q, dateRange, zeroBasedCaretIndex,
ResultMode.FULL_VALUES);
@RequestMapping(path = "/plots", //
method = RequestMethod.POST, //
consumes = MediaType.APPLICATION_JSON_VALUE, //
produces = MediaType.APPLICATION_JSON_VALUE //
)
@ResponseBody
ResponseEntity<PlotResponse> createPlot(@RequestBody final PlotRequest request)
throws InternalPlottingException, InterruptedException {
final List<Proposal> result = db.autocomplete(autocompleteQuery);
final PlotSettings plotSettings = PlotSettingsTransformer.toSettings(request);
if (StringUtils.isBlank(plotSettings.getQuery())) {
throw new BadRequest("The query must not be empty!");
}
final SortedSet<String> fields = CollectionUtils.map(result, new TreeSet<>(), Proposal::getProposedTag);
// TODO the UI should cancel requests that are in flight before sending a plot
// request
if (plotterLock.tryLock(5, TimeUnit.SECONDS)) {
try {
final PlotResult result = plotter.plot(plotSettings);
return fields;
}
@RequestMapping(path = "/filters/defaults", //
method = RequestMethod.GET, //
produces = MediaType.APPLICATION_JSON_VALUE //
)
@ResponseBody
public FilterDefaults getFilterDefaults() {
final Set<String> groupBy = defaultsGroupBy.isBlank() ? Set.of() : Set.of(defaultsGroupBy.split("\\s*,\\s*"));
final List<String> fields = fields();
return new FilterDefaults(fields, groupBy, defaultsSplitBy);
}
final String imageUrl = WEB_IMAGE_OUTPUT_PATH + "/" + result.getImageName();
LOGGER.trace("image url: {}", imageUrl);
private List<AutocompleteProposal> toAutocompleteProposals(final List<Proposal> proposals) {
final String thumbnailUrl = result.getThumbnailPath() != null
? WEB_IMAGE_OUTPUT_PATH + "/" + result.getThumbnailName()
: "img/no-thumbnail.png";
final List<AutocompleteProposal> result = new ArrayList<>();
final PlotResponseStats stats = PlotResponseStats.fromDataSeries(result.getDataSeries());
final PlotResponse plotResponse = new PlotResponse(stats, imageUrl, thumbnailUrl);
for (final Proposal proposal : proposals) {
final AutocompleteProposal e = new AutocompleteProposal();
e.setValue(proposal.getProposedTag());
e.setNewQuery(proposal.getNewQuery());
e.setNewCaretPosition(proposal.getNewCaretPosition());
return ResponseEntity.ok().body(plotResponse);
} catch (final NoDataPointsException e) {
throw new NotFoundException("No data was found. Try another query, or change the date range.", e);
} finally {
plotterLock.unlock();
}
result.add(e);
}
} else {
throw new ServiceUnavailableException("Too many parallel requests!");
}
}
return result;
}
@RequestMapping(path = "/plots", //
method = RequestMethod.GET, //
produces = MediaType.APPLICATION_OCTET_STREAM_VALUE //
)
StreamingResponseBody createPlotImage(@RequestParam(name = "query", defaultValue = "") final String query,
@RequestParam(name = "groupBy[]", defaultValue = "") final List<String> aGroupBy,
@RequestParam(name = "limitBy.number", defaultValue = "10") final int limit,
@RequestParam(name = "limitBy.selected", defaultValue = "NO_LIMIT") final Limit limitBy,
@RequestParam(name = "dateRange") final String dateRange,
@RequestParam(name = "axisScale", defaultValue = "LINEAR") final AxisScale axisScale,
@RequestParam(name = "aggregates") final EnumSet<Aggregate> aggregate,
@RequestParam(name = "keyOutside", defaultValue = "false") final boolean keyOutside,
@RequestParam(name = "width", defaultValue = "1920") final int hidth,
@RequestParam(name = "height", defaultValue = "1080") final int height) {
return (final OutputStream outputStream) -> {
if (StringUtils.isBlank(query)) {
throw new BadRequest("The query must not be empty!");
}
if (StringUtils.isBlank(dateRange)) {
throw new BadRequest("The parameter 'dateRange' must be set.");
}
final PlotSettings plotSettings = new PlotSettings();
plotSettings.setQuery(query);
plotSettings.setGroupBy(aGroupBy);
plotSettings.setHeight(height);
plotSettings.setWidth(hidth);
plotSettings.setLimit(limit);
plotSettings.setLimitBy(limitBy);
plotSettings.setDateRange(dateRange);
plotSettings.setYAxisScale(axisScale);
plotSettings.setAggregates(PlotSettingsTransformer.toAggregateInternal(plotSettings.getYRangeUnit(),
plotSettings.getYAxisScale(), aggregate));
plotSettings.setKeyOutside(keyOutside);
plotSettings.setGenerateThumbnail(false);
if (plotterLock.tryLock()) {
try {
final PlotResult result = plotter.plot(plotSettings);
try (FileInputStream in = new FileInputStream(result.getImagePath().toFile())) {
StreamUtils.copy(in, outputStream);
}
} catch (final NoDataPointsException e) {
throw new NotFoundException(e);
} catch (final InternalPlottingException e) {
throw new InternalServerError(e);
} finally {
plotterLock.unlock();
}
} else {
throw new ServiceUnavailableException("Too many parallel requests!");
}
};
}
@RequestMapping(path = "/autocomplete", //
method = RequestMethod.GET, //
produces = MediaType.APPLICATION_JSON_VALUE //
)
@ResponseBody
AutocompleteResponse autocomplete(@RequestParam(name = "query") final String query,
@RequestParam(name = "caretIndex") final int caretIndex,
@RequestParam(name = "resultMode", defaultValue = "CUT_AT_DOT") ResultMode resultMode) {
// TODO get date range from UI
final DateTimeRange dateRange = DateTimeRange.max();
final int zeroBasedCaretIndex = caretIndex - 1;
final QueryWithCaretMarker q = new QueryWithCaretMarker(query, dateRange, zeroBasedCaretIndex, resultMode);
final AutocompleteResponse result = new AutocompleteResponse();
final List<Proposal> proposals = db.autocomplete(q);
if (query.trim().length() == 0) {
proposals.addAll(exampleProposals());
}
final List<AutocompleteProposal> autocompleteProposals = toAutocompleteProposals(proposals);
Collections.sort(autocompleteProposals, new AutocompleteProposalByValue());
result.setProposals(autocompleteProposals);
return result;
}
private List<Proposal> exampleProposals() {
List<Proposal> result = new ArrayList<Proposal>();
if (queryExamples.length() > 0) {
final String[] exampleQueries = queryExamples.split(Pattern.quote(";"));
for (String example : exampleQueries) {
Proposal p = new Proposal(" Example: " + example, example, true, example + " ", example.length() + 1);
result.add(p);
}
}
return result;
}
@RequestMapping(path = "/fields", //
method = RequestMethod.GET, //
// consumes = MediaType.APPLICATION_JSON_UTF8_VALUE, //
produces = MediaType.APPLICATION_JSON_VALUE //
)
@ResponseBody
List<String> fields() {
final DateTimeRange dateTimeRange = DateTimeRange.max();
final List<String> fields = db.getFields(dateTimeRange);
fields.sort(Collator.getInstance(Locale.ENGLISH));
return fields;
}
@RequestMapping(path = "/fields/{fieldName}/values", //
method = RequestMethod.GET, //
consumes = MediaType.APPLICATION_JSON_VALUE, //
produces = MediaType.APPLICATION_JSON_VALUE //
)
@ResponseBody
SortedSet<String> fields(@PathVariable(name = "fieldName") final String fieldName,
@RequestParam(name = "query") final String query) {
// TODO get date range from UI
final String q = query.isBlank()//
? String.format("%s = ", fieldName)//
: String.format("(%s) and %s=", query, fieldName);
final int zeroBasedCaretIndex = q.length();
final DateTimeRange dateRange = DateTimeRange.max();
final QueryWithCaretMarker autocompleteQuery = new QueryWithCaretMarker(q, dateRange, zeroBasedCaretIndex,
ResultMode.FULL_VALUES);
final List<Proposal> result = db.autocomplete(autocompleteQuery);
final SortedSet<String> fields = CollectionUtils.map(result, new TreeSet<>(), Proposal::getProposedTag);
return fields;
}
@RequestMapping(path = "/filters/defaults", //
method = RequestMethod.GET, //
produces = MediaType.APPLICATION_JSON_VALUE //
)
@ResponseBody
public FilterDefaults getFilterDefaults() {
final Set<String> groupBy = defaultsGroupBy.isBlank() ? Set.of() : Set.of(defaultsGroupBy.split("\\s*,\\s*"));
final List<String> fields = fields();
return new FilterDefaults(fields, groupBy, defaultsSplitBy);
}
private List<AutocompleteProposal> toAutocompleteProposals(final List<Proposal> proposals) {
final List<AutocompleteProposal> result = new ArrayList<>();
for (final Proposal proposal : proposals) {
final AutocompleteProposal e = new AutocompleteProposal();
e.setValue(proposal.getProposedTag());
e.setNewQuery(proposal.getNewQuery());
e.setNewCaretPosition(proposal.getNewCaretPosition());
result.add(e);
}
return result;
}
}

View File

@@ -6,24 +6,22 @@ import org.springframework.boot.SpringApplication;
public class PdbWebapp {
public static void main(final String[] args) throws Exception {
SpringApplication.run(MySpringConfiguration.class, args);
Thread t = new Thread(()-> {
while(true){
try{
TimeUnit.MINUTES.sleep(10);
}catch(InterruptedException e)
{
// ignore
}
System.gc();
}
});
t.setDaemon(true);
t.setName("periodic-gc");
t.start();
}
public static void main(final String[] args) throws Exception {
SpringApplication.run(MySpringConfiguration.class, args);
Thread t = new Thread(() -> {
while (true) {
try {
TimeUnit.MINUTES.sleep(10);
} catch (InterruptedException e) {
// ignore
}
System.gc();
}
});
t.setDaemon(true);
t.setName("periodic-gc");
t.start();
}
}

View File

@@ -12,76 +12,79 @@ import org.lucares.pdbui.domain.PlotRequest;
import org.lucares.pdbui.domain.TimeRangeUnit;
class PlotSettingsTransformer {
static PlotSettings toSettings(final PlotRequest request) {
static PlotSettings toSettings(final PlotRequest request) {
final PlotSettings result = new PlotSettings();
final PlotSettings result = new PlotSettings();
result.setQuery(request.getQuery());
result.setGroupBy(request.getGroupBy());
result.setHeight(request.getHeight());
result.setWidth(request.getWidth());
result.setLimit(request.getLimit());
result.setLimitBy(request.getLimitBy());
result.setDateRange(request.getDateRange());
result.setYAxisScale(request.getAxisScale());
result.setKeyOutside(request.isKeyOutside());
result.setThumbnailMaxWidth(request.getThumbnailMaxWidth());
result.setThumbnailMaxHeight(request.getThumbnailMaxHeight());
result.setGenerateThumbnail(request.isGenerateThumbnail());
result.setYRangeMin(request.getyRangeMin());
result.setYRangeMax(request.getyRangeMax());
result.setYRangeUnit(toTimeRangeUnitInternal(request.getyRangeUnit()));
result.setAggregates(toAggregateInternal(result.getYRangeUnit(), result.getYAxisScale(), request.getAggregates()));
result.setQuery(request.getQuery());
result.setGroupBy(request.getGroupBy());
result.setHeight(request.getHeight());
result.setWidth(request.getWidth());
result.setLimit(request.getLimit());
result.setLimitBy(request.getLimitBy());
result.setDateRange(request.getDateRange());
result.setYAxisScale(request.getAxisScale());
result.setKeyOutside(request.isKeyOutside());
result.setThumbnailMaxWidth(request.getThumbnailMaxWidth());
result.setThumbnailMaxHeight(request.getThumbnailMaxHeight());
result.setGenerateThumbnail(request.isGenerateThumbnail());
result.setYRangeMin(request.getyRangeMin());
result.setYRangeMax(request.getyRangeMax());
result.setYRangeUnit(toTimeRangeUnitInternal(request.getyRangeUnit()));
result.setAggregates(
toAggregateInternal(result.getYRangeUnit(), result.getYAxisScale(), request.getAggregates()));
return result;
}
private static TimeRangeUnitInternal toTimeRangeUnitInternal(final TimeRangeUnit yRangeUnit) {
switch (yRangeUnit) {
case AUTOMATIC:
return TimeRangeUnitInternal.AUTOMATIC;
case MILLISECONDS:
return TimeRangeUnitInternal.MILLISECONDS;
case SECONDS:
return TimeRangeUnitInternal.SECONDS;
case MINUTES:
return TimeRangeUnitInternal.MINUTES;
case HOURS:
return TimeRangeUnitInternal.HOURS;
case DAYS:
return TimeRangeUnitInternal.DAYS;
return result;
}
throw new IllegalStateException("unhandled enum value: " + yRangeUnit);
}
static AggregateHandlerCollection toAggregateInternal(TimeRangeUnitInternal yRangeUnit, AxisScale yAxisScale,
final Iterable<Aggregate> aggregates) {
final AggregateHandlerCollection aggregateHandlerCollection = new AggregateHandlerCollection();
for (Aggregate aggregate : aggregates) {
switch (aggregate) {
case CUM_DISTRIBUTION:
aggregateHandlerCollection.add(new CumulativeDistributionHandler());
break;
case PARALLEL:
aggregateHandlerCollection.add(new ParallelRequestsAggregate());
break;
case SCATTER:
if (yRangeUnit == TimeRangeUnitInternal.AUTOMATIC && yAxisScale == AxisScale.LINEAR) {
// TODO need a second ScatterAggregateHandler for YRangeUnit() == TimeRangeUnitInternal.AUTOMATIC
throw new UnsupportedOperationException("linear axis with automatic y range does not work, use logarthmic y-axis, or define a y-axis range");
}else {
aggregateHandlerCollection.add(new ScatterAggregateHandler());
private static TimeRangeUnitInternal toTimeRangeUnitInternal(final TimeRangeUnit yRangeUnit) {
switch (yRangeUnit) {
case AUTOMATIC:
return TimeRangeUnitInternal.AUTOMATIC;
case MILLISECONDS:
return TimeRangeUnitInternal.MILLISECONDS;
case SECONDS:
return TimeRangeUnitInternal.SECONDS;
case MINUTES:
return TimeRangeUnitInternal.MINUTES;
case HOURS:
return TimeRangeUnitInternal.HOURS;
case DAYS:
return TimeRangeUnitInternal.DAYS;
}
break;
default:
throw new IllegalStateException("unhandled enum: " + aggregate);
}
throw new IllegalStateException("unhandled enum value: " + yRangeUnit);
}
aggregateHandlerCollection.updateAxisForHandlers();
return aggregateHandlerCollection;
}
static AggregateHandlerCollection toAggregateInternal(TimeRangeUnitInternal yRangeUnit, AxisScale yAxisScale,
final Iterable<Aggregate> aggregates) {
final AggregateHandlerCollection aggregateHandlerCollection = new AggregateHandlerCollection();
for (Aggregate aggregate : aggregates) {
switch (aggregate) {
case CUM_DISTRIBUTION:
aggregateHandlerCollection.add(new CumulativeDistributionHandler());
break;
case PARALLEL:
aggregateHandlerCollection.add(new ParallelRequestsAggregate());
break;
case SCATTER:
if (yRangeUnit == TimeRangeUnitInternal.AUTOMATIC && yAxisScale == AxisScale.LINEAR) {
// TODO need a second ScatterAggregateHandler for YRangeUnit() ==
// TimeRangeUnitInternal.AUTOMATIC
throw new UnsupportedOperationException(
"linear axis with automatic y range does not work, use logarthmic y-axis, or define a y-axis range");
} else {
aggregateHandlerCollection.add(new ScatterAggregateHandler());
}
break;
default:
throw new IllegalStateException("unhandled enum: " + aggregate);
}
}
aggregateHandlerCollection.updateAxisForHandlers();
return aggregateHandlerCollection;
}
}

View File

@@ -14,29 +14,29 @@ import org.springframework.stereotype.Component;
@Component
public class PlotterBeanFactory extends AbstractFactoryBean<Plotter> implements PropertyKeys {
private final PerformanceDb db;
private final Path tmpDir;
private final Path outputDir;
private final PerformanceDb db;
private final Path tmpDir;
private final Path outputDir;
@Autowired
public PlotterBeanFactory(final PerformanceDb db, @Value("${" + TMP_DIR + "}") final String tmpDir,
@Value("${" + PATH_GENERATED_IMAGES + "}") final String outputDir) {
this.db = db;
this.tmpDir = Paths.get(tmpDir);
this.outputDir = Paths.get(outputDir);
}
@Autowired
public PlotterBeanFactory(final PerformanceDb db, @Value("${" + TMP_DIR + "}") final String tmpDir,
@Value("${" + PATH_GENERATED_IMAGES + "}") final String outputDir) {
this.db = db;
this.tmpDir = Paths.get(tmpDir);
this.outputDir = Paths.get(outputDir);
}
@Override
public Class<?> getObjectType() {
return Plotter.class;
}
@Override
public Class<?> getObjectType() {
return Plotter.class;
}
@Override
protected Plotter createInstance() throws Exception {
Files.createDirectories(tmpDir);
Files.createDirectories(outputDir);
@Override
protected Plotter createInstance() throws Exception {
Files.createDirectories(tmpDir);
Files.createDirectories(outputDir);
return new Plotter(db, tmpDir, outputDir);
}
return new Plotter(db, tmpDir, outputDir);
}
}

View File

@@ -2,25 +2,25 @@ package org.lucares.pdbui;
public interface PropertyKeys {
/**
* The path for generated images
*/
String PATH_GENERATED_IMAGES = "path.output";
/**
* The path for generated images
*/
String PATH_GENERATED_IMAGES = "path.output";
/**
* Path for temporary files
*/
String TMP_DIR = "path.tmp";
/**
* Path for temporary files
*/
String TMP_DIR = "path.tmp";
/**
* Indicates whether or not this instance is running in production. This
* property is used to switch Vue.js into production or development mode.
*/
String PRODUCTION_MODE = "mode.production";
String DEFAULTS_QUERY_EXAMPLES = "defaults.query.examples";
String DEFAULTS_GROUP_BY = "defaults.groupBy";
String DEFAULTS_SPLIT_BY = "defaults.splitBy";
/**
* Indicates whether or not this instance is running in production. This
* property is used to switch Vue.js into production or development mode.
*/
String PRODUCTION_MODE = "mode.production";
String DEFAULTS_QUERY_EXAMPLES = "defaults.query.examples";
String DEFAULTS_GROUP_BY = "defaults.groupBy";
String DEFAULTS_SPLIT_BY = "defaults.splitBy";
}

View File

@@ -6,9 +6,9 @@ import org.springframework.web.bind.annotation.ResponseStatus;
@ResponseStatus(value = HttpStatus.SERVICE_UNAVAILABLE, reason = "Service Unavailable")
public class ServiceUnavailableException extends RuntimeException {
private static final long serialVersionUID = -4512668277873760500L;
private static final long serialVersionUID = -4512668277873760500L;
public ServiceUnavailableException(String message) {
super(message);
}
public ServiceUnavailableException(String message) {
super(message);
}
}

View File

@@ -44,391 +44,391 @@ import com.fasterxml.jackson.core.JsonParseException;
@Component
public class TcpIngestor implements Ingestor, AutoCloseable, DisposableBean {
private static final Logger LOGGER = LoggerFactory.getLogger(TcpIngestor.class);
private static final Logger LOGGER = LoggerFactory.getLogger(TcpIngestor.class);
public static final int PORT = 17347;
public static final int PORT = 17347;
private final AtomicBoolean acceptNewConnections = new AtomicBoolean(true);
private final AtomicBoolean acceptNewConnections = new AtomicBoolean(true);
private final ExecutorService serverThreadPool = Executors.newFixedThreadPool(2);
private final ExecutorService workerThreadPool = Executors.newCachedThreadPool();
private final PerformanceDb db;
public final static class Handler implements Callable<Void> {
private final ExecutorService serverThreadPool = Executors.newFixedThreadPool(2);
private final ExecutorService workerThreadPool = Executors.newCachedThreadPool();
private final PerformanceDb db;
public final static class Handler implements Callable<Void> {
/**
* Column header names starting with "-" will be ignored.
*/
static final String COLUM_IGNORE_PREFIX = "-";
private static final int IGNORE_COLUMN = 0;
final Socket clientSocket;
private final ArrayBlockingQueue<Entries> queue;
public Handler(final Socket clientSocket, final ArrayBlockingQueue<Entries> queue) {
this.clientSocket = clientSocket;
this.queue = queue;
}
@Override
public Void call() throws Exception {
final SocketAddress clientAddress = clientSocket.getRemoteSocketAddress();
Thread.currentThread().setName("worker-" + clientAddress);
LOGGER.debug("opening streams to client");
try (PrintWriter out = new PrintWriter(clientSocket.getOutputStream(), true);
InputStream in = new BufferedInputStream(clientSocket.getInputStream());) {
LOGGER.debug("reading from stream");
redirectInputStream(in);
LOGGER.debug("connection closed: " + clientAddress);
} catch (final Throwable e) {
LOGGER.warn("Stream handling failed", e);
throw e;
}
return null;
}
private void redirectInputStream(final InputStream in) throws IOException, InterruptedException {
in.mark(1);
final byte firstByte = (byte) in.read();
if (firstByte == '{') {
readJSON(in);
} else if (firstByte == PdbExport.MAGIC_BYTE) {
readCustomExportFormat(in);
} else if (isGZIP(firstByte)) {
in.reset();
final GZIPInputStream gzip = new GZIPInputStream(in);
redirectInputStream(gzip);
} else {
readCSV(in, firstByte);
}
}
private boolean isGZIP(final byte firstByte) {
// GZIP starts with 0x1f, 0x8b, see https://www.ietf.org/rfc/rfc1952.txt section
// 2.3.1
// I am cheap and only check the first byte
return firstByte == 0x1f;
}
private void readCustomExportFormat(final InputStream in) throws IOException {
final CustomExportFormatToEntryTransformer transformer = new CustomExportFormatToEntryTransformer();
final BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
transformer.read(reader, queue);
}
private void readCSV(final InputStream in, final byte firstByte) throws IOException, InterruptedException {
final int chunksize = 1000;
Entries entries = new Entries(chunksize);
final byte newline = '\n';
final byte[] line = new byte[4096]; // max line length
line[0] = firstByte;
int offsetInLine = 1; // because the first byte is already set
int offsetInBuffer = 0;
final IntList separatorPositions = new IntList();
int read = 0;
int bytesInLine = 0;
int[] columns = null;
final byte[] buffer = new byte[4096 * 16];
final int keyTimestamp = Tags.STRING_COMPRESSOR.put("@timestamp");
final int keyDuration = Tags.STRING_COMPRESSOR.put("duration");
final FastISODateParser dateParser = new FastISODateParser();
while ((read = in.read(buffer)) >= 0) {
offsetInBuffer = 0;
for (int i = 0; i < read; i++) {
if (buffer[i] == newline) {
final int length = i - offsetInBuffer;
System.arraycopy(buffer, offsetInBuffer, line, offsetInLine, length);
bytesInLine = offsetInLine + length;
separatorPositions.add(offsetInLine + i - offsetInBuffer);
if (columns != null) {
final Entry entry = handleCsvLine(columns, line, bytesInLine, separatorPositions,
keyTimestamp, keyDuration, dateParser);
if (entry != null) {
entries.add(entry);
}
if (entries.size() >= chunksize) {
queue.put(entries);
entries = new Entries(chunksize);
}
} else {
columns = handleCsvHeaderLine(line, bytesInLine, separatorPositions);
}
offsetInBuffer = i + 1;
offsetInLine = 0;
bytesInLine = 0;
separatorPositions.clear();
} else if (buffer[i] == ',') {
separatorPositions.add(offsetInLine + i - offsetInBuffer);
}
}
if (offsetInBuffer < read) {
final int length = read - offsetInBuffer;
System.arraycopy(buffer, offsetInBuffer, line, offsetInLine, length);
bytesInLine = offsetInLine + length;
offsetInLine += length;
offsetInBuffer = 0;
}
}
final Entry entry = handleCsvLine(columns, line, bytesInLine, separatorPositions, keyTimestamp, keyDuration,
dateParser);
if (entry != null) {
entries.add(entry);
}
queue.put(entries);
}
private int[] handleCsvHeaderLine(final byte[] line, final int bytesInLine, final IntList separatorPositions) {
final int[] columns = new int[separatorPositions.size()];
int lastSeparatorPosition = -1;
final int size = separatorPositions.size();
for (int i = 0; i < size; i++) {
final int separatorPosition = separatorPositions.get(i);
final int compressedString = Tags.STRING_COMPRESSOR.put(line, lastSeparatorPosition + 1,
separatorPosition);
final String columnName = Tags.STRING_COMPRESSOR.get(compressedString);
columns[i] = ignoreColum(columnName) ? IGNORE_COLUMN : compressedString;
lastSeparatorPosition = separatorPosition;
}
return columns;
}
private boolean ignoreColum(final String columnName) {
return columnName.startsWith(COLUM_IGNORE_PREFIX);
}
private static Entry handleCsvLine(final int[] columns, final byte[] line, final int bytesInLine,
final IntList separatorPositions, final int keyTimestamp, final int keyDuration,
final FastISODateParser dateParser) {
try {
if (separatorPositions.size() != columns.length) {
return null;
}
final TagsBuilder tagsBuilder = new TagsBuilder();
int lastSeparatorPosition = -1;
final int size = separatorPositions.size();
long epochMilli = -1;
long duration = -1;
for (int i = 0; i < size; i++) {
final int separatorPosition = separatorPositions.get(i);
final int key = columns[i];
if (key == IGNORE_COLUMN) {
// this column's value will not be ingested
} else if (key == keyTimestamp) {
epochMilli = dateParser.parseAsEpochMilli(line, lastSeparatorPosition + 1);
} else if (key == keyDuration) {
duration = parseLong(line, lastSeparatorPosition + 1, separatorPosition);
} else if (lastSeparatorPosition + 1 < separatorPosition) { // value is not empty
final int value = Tags.STRING_COMPRESSOR.put(line, lastSeparatorPosition + 1,
separatorPosition);
tagsBuilder.add(key, value);
}
lastSeparatorPosition = separatorPosition;
}
final Tags tags = tagsBuilder.build();
return new Entry(epochMilli, duration, tags);
} catch (final RuntimeException e) {
LOGGER.debug("ignoring invalid line '" + new String(line, 0, bytesInLine, StandardCharsets.UTF_8) + "'",
e);
}
return null;
}
private static long parseLong(final byte[] bytes, final int start, int endExclusive) {
long result = 0;
int i = start;
int c = bytes[i];
int sign = 1;
if (c == '-') {
sign = -1;
i++;
}
while (i < endExclusive && (c = bytes[i]) >= 48 && c <= 57) {
result = result * 10 + (c - 48);
i++;
}
return sign * result;
}
private void readJSON(final InputStream in) throws IOException, InterruptedException {
final int chunksize = 100;
Entries entries = new Entries(chunksize);
final BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
String line = "{" + reader.readLine();
final JsonToEntryTransformer transformer = new JsonToEntryTransformer();
final Optional<Entry> firstEntry = transformer.toEntry(line);
if (firstEntry.isPresent()) {
LOGGER.debug("adding entry to queue: {}", firstEntry);
entries.add(firstEntry.get());
}
while ((line = reader.readLine()) != null) {
try {
final Optional<Entry> entry = transformer.toEntry(line);
if (entry.isPresent()) {
LOGGER.debug("adding entry to queue: {}", entry);
entries.add(entry.get());
}
} catch (final JsonParseException e) {
LOGGER.info("json parse error in line '" + line + "'", e);
}
if (entries.size() == chunksize) {
queue.put(entries);
entries = new Entries(chunksize);
}
}
queue.put(entries);
}
}
public TcpIngestor(final Path dataDirectory) throws IOException {
LOGGER.info("opening performance db: " + dataDirectory);
db = new PerformanceDb(dataDirectory);
LOGGER.debug("performance db open");
}
@Autowired
public TcpIngestor(final PerformanceDb db) {
this.db = db;
}
public PerformanceDb getDb() {
return db;
}
@Async
@Override
public void start() throws Exception {
final ArrayBlockingQueue<Entries> queue = new ArrayBlockingQueue<>(10);
serverThreadPool.submit(() -> {
Thread.currentThread().setName("db-ingestion");
boolean finished = false;
while (!finished) {
try {
db.putEntries(new BlockingQueueIterator<>(queue, Entries.POISON));
finished = true;
} catch (final Exception e) {
LOGGER.warn("Write to database failed. Will retry with the next element.", e);
}
}
return null;
});
serverThreadPool.submit(() -> listen(queue));
}
private Void listen(final ArrayBlockingQueue<Entries> queue) throws IOException {
Thread.currentThread().setName("socket-listener");
try (ServerSocket serverSocket = new ServerSocket(PORT);) {
LOGGER.info("listening on port " + PORT);
serverSocket.setSoTimeout((int) TimeUnit.MILLISECONDS.toMillis(2));
while (acceptNewConnections.get()) {
try {
final Socket clientSocket = serverSocket.accept();
LOGGER.debug("accepted connection: " + clientSocket.getRemoteSocketAddress());
workerThreadPool.submit(new Handler(clientSocket, queue));
LOGGER.debug("handler submitted");
} catch (final SocketTimeoutException e) {
// expected every 100ms
// needed to be able to stop the server
} catch (final Exception e) {
LOGGER.warn("Exception caught while waiting for a new connection. "
+ "We'll ignore this error and keep going.", e);
}
}
LOGGER.info("not accepting new connections. ");
LOGGER.info("stopping worker pool");
workerThreadPool.shutdown();
try {
workerThreadPool.awaitTermination(10, TimeUnit.MINUTES);
LOGGER.debug("workers stopped");
} catch (final InterruptedException e) {
Thread.interrupted();
}
LOGGER.debug("adding poison");
queue.put(Entries.POISON);
} catch (final InterruptedException e) {
LOGGER.info("Listener thread interrupted. Likely while adding the poison. "
+ "That would mean that the db-ingestion thread will not terminate. ");
Thread.interrupted();
} catch (final Exception e) {
LOGGER.error("", e);
throw e;
}
return null;
}
@Override
@PreDestroy
public void destroy() {
close();
}
@Override
public void close() {
LOGGER.debug("stopping accept thread");
acceptNewConnections.set(false);
serverThreadPool.shutdown();
try {
serverThreadPool.awaitTermination(10, TimeUnit.MINUTES);
} catch (final InterruptedException e) {
Thread.interrupted();
}
LOGGER.debug("closing database");
db.close();
LOGGER.info("destroyed");
}
public static void main(final String[] args) throws Exception {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
LOGGER.info("shutdown hook");
}
});
try (final TcpIngestor ingestor = new TcpIngestor(Config.DATA_DIR)) {
ingestor.start();
TimeUnit.MILLISECONDS.sleep(Long.MAX_VALUE);
}
}
/**
* Column header names starting with "-" will be ignored.
*/
static final String COLUM_IGNORE_PREFIX = "-";
private static final int IGNORE_COLUMN = 0;
final Socket clientSocket;
private final ArrayBlockingQueue<Entries> queue;
public Handler(final Socket clientSocket, final ArrayBlockingQueue<Entries> queue) {
this.clientSocket = clientSocket;
this.queue = queue;
}
@Override
public Void call() throws Exception {
final SocketAddress clientAddress = clientSocket.getRemoteSocketAddress();
Thread.currentThread().setName("worker-" + clientAddress);
LOGGER.debug("opening streams to client");
try (PrintWriter out = new PrintWriter(clientSocket.getOutputStream(), true);
InputStream in = new BufferedInputStream(clientSocket.getInputStream());) {
LOGGER.debug("reading from stream");
redirectInputStream(in);
LOGGER.debug("connection closed: " + clientAddress);
} catch (final Throwable e) {
LOGGER.warn("Stream handling failed", e);
throw e;
}
return null;
}
private void redirectInputStream(final InputStream in) throws IOException, InterruptedException {
in.mark(1);
final byte firstByte = (byte) in.read();
if (firstByte == '{') {
readJSON(in);
} else if (firstByte == PdbExport.MAGIC_BYTE) {
readCustomExportFormat(in);
} else if (isGZIP(firstByte)) {
in.reset();
final GZIPInputStream gzip = new GZIPInputStream(in);
redirectInputStream(gzip);
} else {
readCSV(in, firstByte);
}
}
private boolean isGZIP(final byte firstByte) {
// GZIP starts with 0x1f, 0x8b, see https://www.ietf.org/rfc/rfc1952.txt section
// 2.3.1
// I am cheap and only check the first byte
return firstByte == 0x1f;
}
private void readCustomExportFormat(final InputStream in) throws IOException {
final CustomExportFormatToEntryTransformer transformer = new CustomExportFormatToEntryTransformer();
final BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
transformer.read(reader, queue);
}
private void readCSV(final InputStream in, final byte firstByte) throws IOException, InterruptedException {
final int chunksize = 1000;
Entries entries = new Entries(chunksize);
final byte newline = '\n';
final byte[] line = new byte[4096]; // max line length
line[0] = firstByte;
int offsetInLine = 1; // because the first byte is already set
int offsetInBuffer = 0;
final IntList separatorPositions = new IntList();
int read = 0;
int bytesInLine = 0;
int[] columns = null;
final byte[] buffer = new byte[4096 * 16];
final int keyTimestamp = Tags.STRING_COMPRESSOR.put("@timestamp");
final int keyDuration = Tags.STRING_COMPRESSOR.put("duration");
final FastISODateParser dateParser = new FastISODateParser();
while ((read = in.read(buffer)) >= 0) {
offsetInBuffer = 0;
for (int i = 0; i < read; i++) {
if (buffer[i] == newline) {
final int length = i - offsetInBuffer;
System.arraycopy(buffer, offsetInBuffer, line, offsetInLine, length);
bytesInLine = offsetInLine + length;
separatorPositions.add(offsetInLine + i - offsetInBuffer);
if (columns != null) {
final Entry entry = handleCsvLine(columns, line, bytesInLine, separatorPositions,
keyTimestamp, keyDuration, dateParser);
if (entry != null) {
entries.add(entry);
}
if (entries.size() >= chunksize) {
queue.put(entries);
entries = new Entries(chunksize);
}
} else {
columns = handleCsvHeaderLine(line, bytesInLine, separatorPositions);
}
offsetInBuffer = i + 1;
offsetInLine = 0;
bytesInLine = 0;
separatorPositions.clear();
} else if (buffer[i] == ',') {
separatorPositions.add(offsetInLine + i - offsetInBuffer);
}
}
if (offsetInBuffer < read) {
final int length = read - offsetInBuffer;
System.arraycopy(buffer, offsetInBuffer, line, offsetInLine, length);
bytesInLine = offsetInLine + length;
offsetInLine += length;
offsetInBuffer = 0;
}
}
final Entry entry = handleCsvLine(columns, line, bytesInLine, separatorPositions, keyTimestamp, keyDuration,
dateParser);
if (entry != null) {
entries.add(entry);
}
queue.put(entries);
}
private int[] handleCsvHeaderLine(final byte[] line, final int bytesInLine, final IntList separatorPositions) {
final int[] columns = new int[separatorPositions.size()];
int lastSeparatorPosition = -1;
final int size = separatorPositions.size();
for (int i = 0; i < size; i++) {
final int separatorPosition = separatorPositions.get(i);
final int compressedString = Tags.STRING_COMPRESSOR.put(line, lastSeparatorPosition + 1,
separatorPosition);
final String columnName = Tags.STRING_COMPRESSOR.get(compressedString);
columns[i] = ignoreColum(columnName) ? IGNORE_COLUMN : compressedString;
lastSeparatorPosition = separatorPosition;
}
return columns;
}
private boolean ignoreColum(final String columnName) {
return columnName.startsWith(COLUM_IGNORE_PREFIX);
}
private static Entry handleCsvLine(final int[] columns, final byte[] line, final int bytesInLine,
final IntList separatorPositions, final int keyTimestamp, final int keyDuration,
final FastISODateParser dateParser) {
try {
if (separatorPositions.size() != columns.length) {
return null;
}
final TagsBuilder tagsBuilder = new TagsBuilder();
int lastSeparatorPosition = -1;
final int size = separatorPositions.size();
long epochMilli = -1;
long duration = -1;
for (int i = 0; i < size; i++) {
final int separatorPosition = separatorPositions.get(i);
final int key = columns[i];
if (key == IGNORE_COLUMN) {
// this column's value will not be ingested
} else if (key == keyTimestamp) {
epochMilli = dateParser.parseAsEpochMilli(line, lastSeparatorPosition + 1);
} else if (key == keyDuration) {
duration = parseLong(line, lastSeparatorPosition + 1, separatorPosition);
} else if (lastSeparatorPosition + 1 < separatorPosition) { // value is not empty
final int value = Tags.STRING_COMPRESSOR.put(line, lastSeparatorPosition + 1,
separatorPosition);
tagsBuilder.add(key, value);
}
lastSeparatorPosition = separatorPosition;
}
final Tags tags = tagsBuilder.build();
return new Entry(epochMilli, duration, tags);
} catch (final RuntimeException e) {
LOGGER.debug("ignoring invalid line '" + new String(line, 0, bytesInLine, StandardCharsets.UTF_8) + "'",
e);
}
return null;
}
private static long parseLong(final byte[] bytes, final int start, int endExclusive) {
long result = 0;
int i = start;
int c = bytes[i];
int sign = 1;
if (c == '-') {
sign = -1;
i++;
}
while (i < endExclusive && (c = bytes[i]) >= 48 && c <= 57) {
result = result * 10 + (c - 48);
i++;
}
return sign * result;
}
private void readJSON(final InputStream in) throws IOException, InterruptedException {
final int chunksize = 100;
Entries entries = new Entries(chunksize);
final BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
String line = "{" + reader.readLine();
final JsonToEntryTransformer transformer = new JsonToEntryTransformer();
final Optional<Entry> firstEntry = transformer.toEntry(line);
if (firstEntry.isPresent()) {
LOGGER.debug("adding entry to queue: {}", firstEntry);
entries.add(firstEntry.get());
}
while ((line = reader.readLine()) != null) {
try {
final Optional<Entry> entry = transformer.toEntry(line);
if (entry.isPresent()) {
LOGGER.debug("adding entry to queue: {}", entry);
entries.add(entry.get());
}
} catch (final JsonParseException e) {
LOGGER.info("json parse error in line '" + line + "'", e);
}
if (entries.size() == chunksize) {
queue.put(entries);
entries = new Entries(chunksize);
}
}
queue.put(entries);
}
}
public TcpIngestor(final Path dataDirectory) throws IOException {
LOGGER.info("opening performance db: " + dataDirectory);
db = new PerformanceDb(dataDirectory);
LOGGER.debug("performance db open");
}
@Autowired
public TcpIngestor(final PerformanceDb db) {
this.db = db;
}
public PerformanceDb getDb() {
return db;
}
@Async
@Override
public void start() throws Exception {
final ArrayBlockingQueue<Entries> queue = new ArrayBlockingQueue<>(10);
serverThreadPool.submit(() -> {
Thread.currentThread().setName("db-ingestion");
boolean finished = false;
while (!finished) {
try {
db.putEntries(new BlockingQueueIterator<>(queue, Entries.POISON));
finished = true;
} catch (final Exception e) {
LOGGER.warn("Write to database failed. Will retry with the next element.", e);
}
}
return null;
});
serverThreadPool.submit(() -> listen(queue));
}
private Void listen(final ArrayBlockingQueue<Entries> queue) throws IOException {
Thread.currentThread().setName("socket-listener");
try (ServerSocket serverSocket = new ServerSocket(PORT);) {
LOGGER.info("listening on port " + PORT);
serverSocket.setSoTimeout((int) TimeUnit.MILLISECONDS.toMillis(2));
while (acceptNewConnections.get()) {
try {
final Socket clientSocket = serverSocket.accept();
LOGGER.debug("accepted connection: " + clientSocket.getRemoteSocketAddress());
workerThreadPool.submit(new Handler(clientSocket, queue));
LOGGER.debug("handler submitted");
} catch (final SocketTimeoutException e) {
// expected every 100ms
// needed to be able to stop the server
} catch (final Exception e) {
LOGGER.warn("Exception caught while waiting for a new connection. "
+ "We'll ignore this error and keep going.", e);
}
}
LOGGER.info("not accepting new connections. ");
LOGGER.info("stopping worker pool");
workerThreadPool.shutdown();
try {
workerThreadPool.awaitTermination(10, TimeUnit.MINUTES);
LOGGER.debug("workers stopped");
} catch (final InterruptedException e) {
Thread.interrupted();
}
LOGGER.debug("adding poison");
queue.put(Entries.POISON);
} catch (final InterruptedException e) {
LOGGER.info("Listener thread interrupted. Likely while adding the poison. "
+ "That would mean that the db-ingestion thread will not terminate. ");
Thread.interrupted();
} catch (final Exception e) {
LOGGER.error("", e);
throw e;
}
return null;
}
@Override
@PreDestroy
public void destroy() {
close();
}
@Override
public void close() {
LOGGER.debug("stopping accept thread");
acceptNewConnections.set(false);
serverThreadPool.shutdown();
try {
serverThreadPool.awaitTermination(10, TimeUnit.MINUTES);
} catch (final InterruptedException e) {
Thread.interrupted();
}
LOGGER.debug("closing database");
db.close();
LOGGER.info("destroyed");
}
public static void main(final String[] args) throws Exception {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
LOGGER.info("shutdown hook");
}
});
try (final TcpIngestor ingestor = new TcpIngestor(Config.DATA_DIR)) {
ingestor.start();
TimeUnit.MILLISECONDS.sleep(Long.MAX_VALUE);
}
}
}

View File

@@ -10,18 +10,18 @@ import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
@Configuration
public class WebConfiguration implements WebMvcConfigurer, HardcodedValues, PropertyKeys {
private final String outputDir;
private final String outputDir;
public WebConfiguration(@Value("${" + PATH_GENERATED_IMAGES + "}") final String outputDir) {
this.outputDir = outputDir;
}
public WebConfiguration(@Value("${" + PATH_GENERATED_IMAGES + "}") final String outputDir) {
this.outputDir = outputDir;
}
@Override
public void addResourceHandlers(final ResourceHandlerRegistry registry) {
@Override
public void addResourceHandlers(final ResourceHandlerRegistry registry) {
final String pathPattern = "/" + WEB_IMAGE_OUTPUT_PATH + "/**";
final String resourceLocation = "file:" + Paths.get(outputDir).toAbsolutePath() + "/";
final String pathPattern = "/" + WEB_IMAGE_OUTPUT_PATH + "/**";
final String resourceLocation = "file:" + Paths.get(outputDir).toAbsolutePath() + "/";
registry.addResourceHandler(pathPattern).addResourceLocations(resourceLocation);
}
registry.addResourceHandler(pathPattern).addResourceLocations(resourceLocation);
}
}

View File

@@ -14,271 +14,271 @@ import java.util.concurrent.ConcurrentHashMap;
*/
public class FastISODateParser {
private final static ConcurrentHashMap<Integer, Long> EPOCH_MILLI_MONTH_OFFSETS = new ConcurrentHashMap<>();
private final static ConcurrentHashMap<Integer, Long> EPOCH_MILLI_MONTH_OFFSETS = new ConcurrentHashMap<>();
private int cached_epochMilliMonthOffsetKey = 0;
private long cached_epochMilliMonthOffset = 0;
private int cached_epochMilliMonthOffsetKey = 0;
private long cached_epochMilliMonthOffset = 0;
/**
* Parsing ISO-8601 like dates, e.g. 2011-12-03T10:15:30.123Z or
* 2011-12-03T10:15:30+01:00.
*
* @param date in ISO-8601 format
* @return {@link OffsetDateTime}
*/
public OffsetDateTime parse(final String date) {
try {
final int year = Integer.parseInt(date, 0, 4, 10);
final int month = Integer.parseInt(date, 5, 7, 10);
final int dayOfMonth = Integer.parseInt(date, 8, 10, 10);
final int hour = Integer.parseInt(date, 11, 13, 10);
final int minute = Integer.parseInt(date, 14, 16, 10);
final int second = Integer.parseInt(date, 17, 19, 10);
/**
* Parsing ISO-8601 like dates, e.g. 2011-12-03T10:15:30.123Z or
* 2011-12-03T10:15:30+01:00.
*
* @param date in ISO-8601 format
* @return {@link OffsetDateTime}
*/
public OffsetDateTime parse(final String date) {
try {
final int year = Integer.parseInt(date, 0, 4, 10);
final int month = Integer.parseInt(date, 5, 7, 10);
final int dayOfMonth = Integer.parseInt(date, 8, 10, 10);
final int hour = Integer.parseInt(date, 11, 13, 10);
final int minute = Integer.parseInt(date, 14, 16, 10);
final int second = Integer.parseInt(date, 17, 19, 10);
final int[] nanosAndCharsRead = parseMilliseconds(date, 19);
final int nanos = nanosAndCharsRead[0];
final int offsetTimezone = 19 + nanosAndCharsRead[1];
final int[] nanosAndCharsRead = parseMilliseconds(date, 19);
final int nanos = nanosAndCharsRead[0];
final int offsetTimezone = 19 + nanosAndCharsRead[1];
final ZoneOffset offset = date.charAt(offsetTimezone) == 'Z' ? ZoneOffset.UTC
: parseZone(date.subSequence(offsetTimezone, date.length()));
return OffsetDateTime.of(year, month, dayOfMonth, hour, minute, second, nanos, offset);
} catch (final RuntimeException e) {
throw new IllegalArgumentException("'" + date + "' is not an ISO-8601 that can be parsed with "
+ FastISODateParser.class.getCanonicalName(), e);
}
}
final ZoneOffset offset = date.charAt(offsetTimezone) == 'Z' ? ZoneOffset.UTC
: parseZone(date.subSequence(offsetTimezone, date.length()));
return OffsetDateTime.of(year, month, dayOfMonth, hour, minute, second, nanos, offset);
} catch (final RuntimeException e) {
throw new IllegalArgumentException("'" + date + "' is not an ISO-8601 that can be parsed with "
+ FastISODateParser.class.getCanonicalName(), e);
}
}
public long parseAsEpochMilli(final String date) {
try {
final long year = parseLong(date, 0, 4);
final long month = parseLong(date, 5, 7);
final long dayOfMonth = parseLong(date, 8, 10);
final long hour = parseLong(date, 11, 13);
final long minute = parseLong(date, 14, 16);
final long second = parseLong(date, 17, 19);
public long parseAsEpochMilli(final String date) {
try {
final long year = parseLong(date, 0, 4);
final long month = parseLong(date, 5, 7);
final long dayOfMonth = parseLong(date, 8, 10);
final long hour = parseLong(date, 11, 13);
final long minute = parseLong(date, 14, 16);
final long second = parseLong(date, 17, 19);
final int[] nanosAndCharsRead = parseMilliseconds(date, 19);
final long nanos = nanosAndCharsRead[0];
final int offsetTimezone = 19 + nanosAndCharsRead[1];
final int[] nanosAndCharsRead = parseMilliseconds(date, 19);
final long nanos = nanosAndCharsRead[0];
final int offsetTimezone = 19 + nanosAndCharsRead[1];
final long zoneOffsetMillis = date.charAt(offsetTimezone) == 'Z' ? 0
: parseZoneToMillis(date.subSequence(offsetTimezone, date.length()));
final long zoneOffsetMillis = date.charAt(offsetTimezone) == 'Z' ? 0
: parseZoneToMillis(date.subSequence(offsetTimezone, date.length()));
final int epochMilliMonthOffsetKey = (int) (year * 12 + month - 1);
final long epochMilliMonthOffset;
final int epochMilliMonthOffsetKey = (int) (year * 12 + month - 1);
final long epochMilliMonthOffset;
if (cached_epochMilliMonthOffsetKey == epochMilliMonthOffsetKey) {
epochMilliMonthOffset = cached_epochMilliMonthOffset;
} else {
epochMilliMonthOffset = EPOCH_MILLI_MONTH_OFFSETS.computeIfAbsent(epochMilliMonthOffsetKey,
FastISODateParser::computeEpochMilliMonthOffset);
cached_epochMilliMonthOffsetKey = epochMilliMonthOffsetKey;
cached_epochMilliMonthOffset = epochMilliMonthOffset;
}
if (cached_epochMilliMonthOffsetKey == epochMilliMonthOffsetKey) {
epochMilliMonthOffset = cached_epochMilliMonthOffset;
} else {
epochMilliMonthOffset = EPOCH_MILLI_MONTH_OFFSETS.computeIfAbsent(epochMilliMonthOffsetKey,
FastISODateParser::computeEpochMilliMonthOffset);
cached_epochMilliMonthOffsetKey = epochMilliMonthOffsetKey;
cached_epochMilliMonthOffset = epochMilliMonthOffset;
}
final long epochMilli = epochMilliMonthOffset //
+ (dayOfMonth - 1) * 86_400_000 //
+ hour * 3_600_000 //
+ minute * 60_000 //
+ second * 1_000 //
+ nanos / 1_000_000//
- zoneOffsetMillis;
return epochMilli;
final long epochMilli = epochMilliMonthOffset //
+ (dayOfMonth - 1) * 86_400_000 //
+ hour * 3_600_000 //
+ minute * 60_000 //
+ second * 1_000 //
+ nanos / 1_000_000//
- zoneOffsetMillis;
return epochMilli;
} catch (final RuntimeException e) {
throw new IllegalArgumentException("'" + date + "' is not an ISO-8601 that can be parsed with "
+ FastISODateParser.class.getCanonicalName(), e);
}
}
} catch (final RuntimeException e) {
throw new IllegalArgumentException("'" + date + "' is not an ISO-8601 that can be parsed with "
+ FastISODateParser.class.getCanonicalName(), e);
}
}
private static Long computeEpochMilliMonthOffset(final int key) {
private static Long computeEpochMilliMonthOffset(final int key) {
final int year = key / 12;
final int month = key % 12 + 1;
final int year = key / 12;
final int month = key % 12 + 1;
final OffsetDateTime date = OffsetDateTime.of(year, month, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final OffsetDateTime date = OffsetDateTime.of(year, month, 1, 0, 0, 0, 0, ZoneOffset.UTC);
return date.toInstant().toEpochMilli();
}
return date.toInstant().toEpochMilli();
}
private long parseLong(final String string, final int start, final int end) {
long result = 0;
for (int i = start; i < end; i++) {
// final int c = string.charAt(i);
final int c = string.codePointAt(i);
if (c < '0' || c > '9') {
throw new NumberFormatException(c + " is not a number at offset " + i);
}
result = result * 10 + (c - '0');
}
return result;
}
private long parseLong(final String string, final int start, final int end) {
long result = 0;
for (int i = start; i < end; i++) {
// final int c = string.charAt(i);
final int c = string.codePointAt(i);
if (c < '0' || c > '9') {
throw new NumberFormatException(c + " is not a number at offset " + i);
}
result = result * 10 + (c - '0');
}
return result;
}
private int[] parseMilliseconds(final String date, final int start) {
int result = 0;
int i = start;
while (i < date.length()) {
final char c = date.charAt(i);
i++;
if (c == '.') {
continue;
}
if (c < '0' || c > '9') {
break;
}
result = result * 10 + (c - '0');
}
final int readChars = i - start - 1;
private int[] parseMilliseconds(final String date, final int start) {
int result = 0;
int i = start;
while (i < date.length()) {
final char c = date.charAt(i);
i++;
if (c == '.') {
continue;
}
if (c < '0' || c > '9') {
break;
}
result = result * 10 + (c - '0');
}
final int readChars = i - start - 1;
while (i <= start + 10) {
result *= 10;
i++;
}
while (i <= start + 10) {
result *= 10;
i++;
}
return new int[] { result, readChars };
}
return new int[] { result, readChars };
}
private ZoneOffset parseZone(final CharSequence zoneString) {
private ZoneOffset parseZone(final CharSequence zoneString) {
final int hours = Integer.parseInt(zoneString, 0, 3, 10);
int minutes = Integer.parseInt(zoneString, 4, 6, 10);
final int hours = Integer.parseInt(zoneString, 0, 3, 10);
int minutes = Integer.parseInt(zoneString, 4, 6, 10);
// if hours is negative,then minutes must be too
minutes = (hours < 0 ? -1 : 1) * minutes;
return ZoneOffset.ofHoursMinutes(hours, minutes);
}
// if hours is negative,then minutes must be too
minutes = (hours < 0 ? -1 : 1) * minutes;
return ZoneOffset.ofHoursMinutes(hours, minutes);
}
private long parseZoneToMillis(final CharSequence zoneString) {
private long parseZoneToMillis(final CharSequence zoneString) {
final int hours = Integer.parseInt(zoneString, 0, 3, 10);
int minutes = Integer.parseInt(zoneString, 4, 6, 10);
final int hours = Integer.parseInt(zoneString, 0, 3, 10);
int minutes = Integer.parseInt(zoneString, 4, 6, 10);
// if hours is negative,then minutes must be too
minutes = (hours < 0 ? -1 : 1) * minutes;
return hours * 3_600_000 + minutes * 60_000;
}
// if hours is negative,then minutes must be too
minutes = (hours < 0 ? -1 : 1) * minutes;
return hours * 3_600_000 + minutes * 60_000;
}
public long parseAsEpochMilli(final byte[] date) {
return parseAsEpochMilli(date, 0);
}
public long parseAsEpochMilli(final byte[] date) {
return parseAsEpochMilli(date, 0);
}
public long parseAsEpochMilli(final byte[] date, final int beginIndex) {
try {
final int yearBegin = beginIndex + 0;
final int yearEnd = yearBegin + 4;
final int monthBegin = yearEnd + 1;
final int dayBegin = monthBegin + 3;
final int hourBegin = dayBegin + 3;
final int minuteBegin = hourBegin + 3;
final int secondBegin = minuteBegin + 3;
final int secondEnd = secondBegin + 2;
public long parseAsEpochMilli(final byte[] date, final int beginIndex) {
try {
final int yearBegin = beginIndex + 0;
final int yearEnd = yearBegin + 4;
final int monthBegin = yearEnd + 1;
final int dayBegin = monthBegin + 3;
final int hourBegin = dayBegin + 3;
final int minuteBegin = hourBegin + 3;
final int secondBegin = minuteBegin + 3;
final int secondEnd = secondBegin + 2;
final long year = parseLong(date, yearBegin, yearEnd);
final long month = parse2ByteLong(date, monthBegin);
final long dayOfMonth = parse2ByteLong(date, dayBegin);
final long hour = parse2ByteLong(date, hourBegin);
final long minute = parse2ByteLong(date, minuteBegin);
final long second = parse2ByteLong(date, secondBegin);
final long year = parseLong(date, yearBegin, yearEnd);
final long month = parse2ByteLong(date, monthBegin);
final long dayOfMonth = parse2ByteLong(date, dayBegin);
final long hour = parse2ByteLong(date, hourBegin);
final long minute = parse2ByteLong(date, minuteBegin);
final long second = parse2ByteLong(date, secondBegin);
final int[] nanosAndCharsRead = parseMilliseconds(date, secondEnd);
final long nanos = nanosAndCharsRead[0];
final int offsetTimezone = beginIndex + 19 + nanosAndCharsRead[1];
final int[] nanosAndCharsRead = parseMilliseconds(date, secondEnd);
final long nanos = nanosAndCharsRead[0];
final int offsetTimezone = beginIndex + 19 + nanosAndCharsRead[1];
final long zoneOffsetMillis = date[offsetTimezone] == 'Z' ? 0 : parseZoneToMillis(date, offsetTimezone);
final long zoneOffsetMillis = date[offsetTimezone] == 'Z' ? 0 : parseZoneToMillis(date, offsetTimezone);
final int epochMilliMonthOffsetKey = (int) (year * 12 + month - 1);
final long epochMilliMonthOffset;
final int epochMilliMonthOffsetKey = (int) (year * 12 + month - 1);
final long epochMilliMonthOffset;
if (cached_epochMilliMonthOffsetKey == epochMilliMonthOffsetKey) {
epochMilliMonthOffset = cached_epochMilliMonthOffset;
} else {
epochMilliMonthOffset = EPOCH_MILLI_MONTH_OFFSETS.computeIfAbsent(epochMilliMonthOffsetKey,
FastISODateParser::computeEpochMilliMonthOffset);
cached_epochMilliMonthOffsetKey = epochMilliMonthOffsetKey;
cached_epochMilliMonthOffset = epochMilliMonthOffset;
}
if (cached_epochMilliMonthOffsetKey == epochMilliMonthOffsetKey) {
epochMilliMonthOffset = cached_epochMilliMonthOffset;
} else {
epochMilliMonthOffset = EPOCH_MILLI_MONTH_OFFSETS.computeIfAbsent(epochMilliMonthOffsetKey,
FastISODateParser::computeEpochMilliMonthOffset);
cached_epochMilliMonthOffsetKey = epochMilliMonthOffsetKey;
cached_epochMilliMonthOffset = epochMilliMonthOffset;
}
final long epochMilli = epochMilliMonthOffset //
+ (dayOfMonth - 1) * 86_400_000 //
+ hour * 3_600_000 //
+ minute * 60_000 //
+ second * 1_000 //
+ nanos / 1_000_000//
- zoneOffsetMillis;
return epochMilli;
final long epochMilli = epochMilliMonthOffset //
+ (dayOfMonth - 1) * 86_400_000 //
+ hour * 3_600_000 //
+ minute * 60_000 //
+ second * 1_000 //
+ nanos / 1_000_000//
- zoneOffsetMillis;
return epochMilli;
} catch (final RuntimeException e) {
throw new IllegalArgumentException("'"
+ new String(date, beginIndex, date.length - beginIndex, StandardCharsets.UTF_8)
+ "' is not an ISO-8601 that can be parsed with " + FastISODateParser.class.getCanonicalName(), e);
}
}
} catch (final RuntimeException e) {
throw new IllegalArgumentException("'"
+ new String(date, beginIndex, date.length - beginIndex, StandardCharsets.UTF_8)
+ "' is not an ISO-8601 that can be parsed with " + FastISODateParser.class.getCanonicalName(), e);
}
}
private long parseLong(final byte[] bytes, final int start, final int end) {
long result = 0;
for (int i = start; i < end; i++) {
final int c = bytes[i];
if (c < '0' || c > '9') // (byte)48 = '0' and (byte)57 = '9'
{
throw new NumberFormatException(c + " is not a number at offset " + i);
}
result = result * 10 + (c - '0');
}
return result;
}
private long parseLong(final byte[] bytes, final int start, final int end) {
long result = 0;
for (int i = start; i < end; i++) {
final int c = bytes[i];
if (c < '0' || c > '9') // (byte)48 = '0' and (byte)57 = '9'
{
throw new NumberFormatException(c + " is not a number at offset " + i);
}
result = result * 10 + (c - '0');
}
return result;
}
private long parse2ByteLong(final byte[] bytes, final int start) {
private long parse2ByteLong(final byte[] bytes, final int start) {
final int c0 = bytes[start];
if (c0 < 48 || c0 > 57) // (byte)48 = '0' and (byte)57 = '9'
{
throw new NumberFormatException(c0 + " is not a number at offset " + start);
// throw new NumberFormatException();
}
long result = c0 - 48;
final int c0 = bytes[start];
if (c0 < 48 || c0 > 57) // (byte)48 = '0' and (byte)57 = '9'
{
throw new NumberFormatException(c0 + " is not a number at offset " + start);
// throw new NumberFormatException();
}
long result = c0 - 48;
final int c1 = bytes[start + 1];
if (c1 < 48 || c1 > 57) {
throw new NumberFormatException(c1 + " is not a number at offset " + (start + 1));
// throw new NumberFormatException();
}
result = result * 10 + (c1 - 48);
final int c1 = bytes[start + 1];
if (c1 < 48 || c1 > 57) {
throw new NumberFormatException(c1 + " is not a number at offset " + (start + 1));
// throw new NumberFormatException();
}
result = result * 10 + (c1 - 48);
return result;
}
return result;
}
private int[] parseMilliseconds(final byte[] date, final int start) {
int result = 0;
int i = start;
while (i < date.length) {
final byte c = date[i];
i++;
if (c == '.') {
continue;
}
if (c < '0' || c > '9') {
break;
}
result = result * 10 + (c - '0');
}
final int readChars = i - start - 1;
private int[] parseMilliseconds(final byte[] date, final int start) {
int result = 0;
int i = start;
while (i < date.length) {
final byte c = date[i];
i++;
if (c == '.') {
continue;
}
if (c < '0' || c > '9') {
break;
}
result = result * 10 + (c - '0');
}
final int readChars = i - start - 1;
while (i <= start + 10) {
result *= 10;
i++;
}
while (i <= start + 10) {
result *= 10;
i++;
}
return new int[] { result, readChars };
}
return new int[] { result, readChars };
}
private long parseZoneToMillis(final byte[] zoneBytes, final int beginIndex) {
private long parseZoneToMillis(final byte[] zoneBytes, final int beginIndex) {
final String zoneString = new String(zoneBytes, beginIndex, zoneBytes.length - beginIndex);
final int hours = Integer.parseInt(zoneString, 0, 3, 10);
int minutes = Integer.parseInt(zoneString, 4, 6, 10);
// if hours is negative,then minutes must be too
minutes = (hours < 0 ? -1 : 1) * minutes;
return hours * 3_600_000 + minutes * 60_000;
}
final String zoneString = new String(zoneBytes, beginIndex, zoneBytes.length - beginIndex);
final int hours = Integer.parseInt(zoneString, 0, 3, 10);
int minutes = Integer.parseInt(zoneString, 4, 6, 10);
// if hours is negative,then minutes must be too
minutes = (hours < 0 ? -1 : 1) * minutes;
return hours * 3_600_000 + minutes * 60_000;
}
}

View File

@@ -1,36 +1,36 @@
package org.lucares.pdbui.domain;
public class AutocompleteProposal {
private String value;
private String newQuery;
private int newCaretPosition;
private String value;
private String newQuery;
private int newCaretPosition;
public String getValue() {
return value;
}
public String getValue() {
return value;
}
public void setValue(final String value) {
this.value = value;
}
public void setValue(final String value) {
this.value = value;
}
@Override
public String toString() {
return value;
}
@Override
public String toString() {
return value;
}
public int getNewCaretPosition() {
return newCaretPosition;
}
public int getNewCaretPosition() {
return newCaretPosition;
}
public void setNewCaretPosition(final int newCaretPosition) {
this.newCaretPosition = newCaretPosition;
}
public void setNewCaretPosition(final int newCaretPosition) {
this.newCaretPosition = newCaretPosition;
}
public String getNewQuery() {
return newQuery;
}
public String getNewQuery() {
return newQuery;
}
public void setNewQuery(final String newQuery) {
this.newQuery = newQuery;
}
public void setNewQuery(final String newQuery) {
this.newQuery = newQuery;
}
}

View File

@@ -4,9 +4,9 @@ import java.util.Comparator;
public class AutocompleteProposalByValue implements Comparator<AutocompleteProposal> {
@Override
public int compare(final AutocompleteProposal o1, final AutocompleteProposal o2) {
return o1.getValue().compareToIgnoreCase(o2.getValue());
}
@Override
public int compare(final AutocompleteProposal o1, final AutocompleteProposal o2) {
return o1.getValue().compareToIgnoreCase(o2.getValue());
}
}

View File

@@ -3,19 +3,19 @@ package org.lucares.pdbui.domain;
import java.util.List;
public class AutocompleteResponse {
private List<AutocompleteProposal> proposals;
private List<AutocompleteProposal> proposals;
public List<AutocompleteProposal> getProposals() {
return proposals;
}
public List<AutocompleteProposal> getProposals() {
return proposals;
}
public void setProposals(final List<AutocompleteProposal> proposals) {
this.proposals = proposals;
}
public void setProposals(final List<AutocompleteProposal> proposals) {
this.proposals = proposals;
}
@Override
public String toString() {
return String.valueOf(proposals);
}
@Override
public String toString() {
return String.valueOf(proposals);
}
}

View File

@@ -3,62 +3,62 @@ package org.lucares.pdbui.domain;
import java.util.Collection;
public class DataSeriesStats {
private final int values;
private final long maxValue;
private final double average;
private final int plottedValues;
private final int values;
private final long maxValue;
private final double average;
private final int plottedValues;
public DataSeriesStats(final int values, final int plottedValues, final long maxValue, final double average) {
this.values = values;
this.plottedValues = plottedValues;
this.maxValue = maxValue;
this.average = average;
}
public DataSeriesStats(final int values, final int plottedValues, final long maxValue, final double average) {
this.values = values;
this.plottedValues = plottedValues;
this.maxValue = maxValue;
this.average = average;
}
/**
* The number of values in the date range, without applying the y-range.
*
* @return total number of values
*/
public int getValues() {
return values;
}
/**
* The number of values in the date range, without applying the y-range.
*
* @return total number of values
*/
public int getValues() {
return values;
}
/**
* The number of values in the date range <em>and</em> the y-range.
*
* @return number of plotted values
*/
public int getPlottedValues() {
return plottedValues;
}
/**
* The number of values in the date range <em>and</em> the y-range.
*
* @return number of plotted values
*/
public int getPlottedValues() {
return plottedValues;
}
public long getMaxValue() {
return maxValue;
}
public long getMaxValue() {
return maxValue;
}
public double getAverage() {
return average;
}
public double getAverage() {
return average;
}
@Override
public String toString() {
return "[values=" + values + ", maxValue=" + maxValue + ", average=" + average + "]";
}
@Override
public String toString() {
return "[values=" + values + ", maxValue=" + maxValue + ", average=" + average + "]";
}
public static double average(final Collection<DataSeriesStats> stats) {
long n = 0;
double average = 0;
public static double average(final Collection<DataSeriesStats> stats) {
long n = 0;
double average = 0;
for (final DataSeriesStats stat : stats) {
final int newValues = stat.getValues();
final double newAverage = stat.getAverage();
if (newValues > 0) {
average = (average * n + newAverage * newValues) / (n + newValues);
n += newValues;
}
}
for (final DataSeriesStats stat : stats) {
final int newValues = stat.getValues();
final double newAverage = stat.getAverage();
if (newValues > 0) {
average = (average * n + newAverage * newValues) / (n + newValues);
n += newValues;
}
}
return average;
}
return average;
}
}

View File

@@ -9,68 +9,68 @@ import org.lucares.pdb.api.DateTimeRange;
public class DateRange {
private static final DateTimeFormatter DATE_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
private static final DateTimeFormatter DATE_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
private String startDate;
private String endDate;
private String startDate;
private String endDate;
DateRange() {
super();
}
DateRange() {
super();
}
/**
*
* @param startDate date in format 'yyyy-MM-dd HH:mm:ss'
* @param endDate date in format 'yyyy-MM-dd HH:mm:ss'
*/
public DateRange(final String startDate, final String endDate) {
this.startDate = startDate;
this.endDate = endDate;
}
/**
*
* @param startDate date in format 'yyyy-MM-dd HH:mm:ss'
* @param endDate date in format 'yyyy-MM-dd HH:mm:ss'
*/
public DateRange(final String startDate, final String endDate) {
this.startDate = startDate;
this.endDate = endDate;
}
/**
*
* @return date in format 'yyyy-MM-dd HH:mm:ss'
*/
public String getStartDate() {
return startDate;
}
/**
*
* @return date in format 'yyyy-MM-dd HH:mm:ss'
*/
public String getStartDate() {
return startDate;
}
/**
*
* @param startDate date in format 'yyyy-MM-dd HH:mm:ss'
*/
public void setStartDate(final String startDate) {
this.startDate = startDate;
}
/**
*
* @param startDate date in format 'yyyy-MM-dd HH:mm:ss'
*/
public void setStartDate(final String startDate) {
this.startDate = startDate;
}
/**
*
* @return date in format 'yyyy-MM-dd HH:mm:ss'
*/
public String getEndDate() {
return endDate;
}
/**
*
* @return date in format 'yyyy-MM-dd HH:mm:ss'
*/
public String getEndDate() {
return endDate;
}
/**
*
* @param endDate date in format 'yyyy-MM-dd HH:mm:ss'
*/
public void setEndDate(final String endDate) {
this.endDate = endDate;
}
/**
*
* @param endDate date in format 'yyyy-MM-dd HH:mm:ss'
*/
public void setEndDate(final String endDate) {
this.endDate = endDate;
}
@Override
public String toString() {
return startDate + " - " + endDate;
}
@Override
public String toString() {
return startDate + " - " + endDate;
}
public DateTimeRange toDateTimeRange() {
public DateTimeRange toDateTimeRange() {
final OffsetDateTime start = LocalDateTime.parse(startDate, DATE_FORMAT).atOffset(ZoneOffset.UTC);
final OffsetDateTime end = LocalDateTime.parse(endDate, DATE_FORMAT).atOffset(ZoneOffset.UTC);
final OffsetDateTime start = LocalDateTime.parse(startDate, DATE_FORMAT).atOffset(ZoneOffset.UTC);
final OffsetDateTime end = LocalDateTime.parse(endDate, DATE_FORMAT).atOffset(ZoneOffset.UTC);
return new DateTimeRange(start, end);
}
return new DateTimeRange(start, end);
}
}

View File

@@ -6,39 +6,39 @@ import java.util.List;
import java.util.Set;
public class FilterDefaults {
private Set<String> groupBy;
private List<String> fields;
private Set<String> groupBy;
private String splitBy;
private List<String> fields;
public FilterDefaults(final List<String> fields, final Set<String> groupBy, final String splitBy) {
this.fields = new ArrayList<String>(fields);
this.groupBy = new HashSet<String>(groupBy);
this.splitBy = splitBy;
}
private String splitBy;
public Set<String> getGroupBy() {
return new HashSet<String>(groupBy);
}
public FilterDefaults(final List<String> fields, final Set<String> groupBy, final String splitBy) {
this.fields = new ArrayList<String>(fields);
this.groupBy = new HashSet<String>(groupBy);
this.splitBy = splitBy;
}
public void setGroupBy(Set<String> groupBy) {
this.groupBy = new HashSet<String>(groupBy);
}
public Set<String> getGroupBy() {
return new HashSet<String>(groupBy);
}
public List<String> getFields() {
return new ArrayList<String>(fields);
}
public void setGroupBy(Set<String> groupBy) {
this.groupBy = new HashSet<String>(groupBy);
}
public void setFields(List<String> fields) {
this.fields = new ArrayList<String>(fields);
}
public List<String> getFields() {
return new ArrayList<String>(fields);
}
public String getSplitBy() {
return splitBy;
}
public void setFields(List<String> fields) {
this.fields = new ArrayList<String>(fields);
}
public void setSplitBy(String splitBy) {
this.splitBy = splitBy;
}
public String getSplitBy() {
return splitBy;
}
public void setSplitBy(String splitBy) {
this.splitBy = splitBy;
}
}

View File

@@ -8,166 +8,166 @@ import org.lucares.pdb.plot.api.AxisScale;
import org.lucares.pdb.plot.api.Limit;
public class PlotRequest {
private String query;
private String query;
private int height = 1000;
private int height = 1000;
private int width = 1000;
private int width = 1000;
private int thumbnailMaxWidth = 0;
private int thumbnailMaxWidth = 0;
private int thumbnailMaxHeight = 0;
private int thumbnailMaxHeight = 0;
private List<String> groupBy;
private List<String> groupBy;
private Limit limitBy = Limit.NO_LIMIT;
private Limit limitBy = Limit.NO_LIMIT;
private AxisScale yAxisScale = AxisScale.LINEAR;
private AxisScale yAxisScale = AxisScale.LINEAR;
private int limit = Integer.MAX_VALUE;
private int limit = Integer.MAX_VALUE;
private String dateRange;
private String dateRange;
private List<Aggregate> aggregates = new ArrayList<>();
private List<Aggregate> aggregates = new ArrayList<>();
private int yRangeMin;
private int yRangeMax;
private TimeRangeUnit yRangeUnit = TimeRangeUnit.AUTOMATIC;
private int yRangeMin;
private int yRangeMax;
private TimeRangeUnit yRangeUnit = TimeRangeUnit.AUTOMATIC;
private boolean keyOutside;
private boolean keyOutside;
private boolean generateThumbnail;
private boolean generateThumbnail;
public String getQuery() {
return query;
}
public String getQuery() {
return query;
}
public void setQuery(final String query) {
this.query = query;
}
public void setQuery(final String query) {
this.query = query;
}
public int getWidth() {
return width;
}
public int getWidth() {
return width;
}
public void setWidth(final int width) {
this.width = width;
}
public void setWidth(final int width) {
this.width = width;
}
public int getHeight() {
return height;
}
public int getHeight() {
return height;
}
public void setHeight(final int height) {
this.height = height;
}
public void setHeight(final int height) {
this.height = height;
}
public int getThumbnailMaxWidth() {
return thumbnailMaxWidth;
}
public int getThumbnailMaxWidth() {
return thumbnailMaxWidth;
}
public void setThumbnailMaxWidth(final int thumbnailMaxWidth) {
this.thumbnailMaxWidth = thumbnailMaxWidth;
}
public void setThumbnailMaxWidth(final int thumbnailMaxWidth) {
this.thumbnailMaxWidth = thumbnailMaxWidth;
}
public int getThumbnailMaxHeight() {
return thumbnailMaxHeight;
}
public int getThumbnailMaxHeight() {
return thumbnailMaxHeight;
}
public void setThumbnailMaxHeight(final int thumbnailMaxHeight) {
this.thumbnailMaxHeight = thumbnailMaxHeight;
}
public void setThumbnailMaxHeight(final int thumbnailMaxHeight) {
this.thumbnailMaxHeight = thumbnailMaxHeight;
}
@Override
public String toString() {
return query + ":" + height + "x" + width;
}
@Override
public String toString() {
return query + ":" + height + "x" + width;
}
public List<String> getGroupBy() {
return groupBy;
}
public List<String> getGroupBy() {
return groupBy;
}
public void setGroupBy(final List<String> groupBy) {
this.groupBy = groupBy;
}
public void setGroupBy(final List<String> groupBy) {
this.groupBy = groupBy;
}
public Limit getLimitBy() {
return limitBy;
}
public Limit getLimitBy() {
return limitBy;
}
public void setLimitBy(final Limit limitBy) {
this.limitBy = limitBy;
}
public void setLimitBy(final Limit limitBy) {
this.limitBy = limitBy;
}
public int getLimit() {
return limit;
}
public int getLimit() {
return limit;
}
public void setLimit(final int limit) {
this.limit = limit;
}
public void setLimit(final int limit) {
this.limit = limit;
}
public String getDateRange() {
return dateRange;
}
public String getDateRange() {
return dateRange;
}
public void setDateRange(final String dateRange) {
this.dateRange = dateRange;
}
public void setDateRange(final String dateRange) {
this.dateRange = dateRange;
}
public AxisScale getAxisScale() {
return yAxisScale;
}
public AxisScale getAxisScale() {
return yAxisScale;
}
public void setAxisScale(final AxisScale yAxis) {
this.yAxisScale = yAxis;
}
public void setAxisScale(final AxisScale yAxis) {
this.yAxisScale = yAxis;
}
public void setAggregate(final List<Aggregate> aggregates) {
this.aggregates = aggregates;
}
public void setAggregate(final List<Aggregate> aggregates) {
this.aggregates = aggregates;
}
public List<Aggregate> getAggregates() {
return aggregates;
}
public List<Aggregate> getAggregates() {
return aggregates;
}
public void setKeyOutside(final boolean keyOutside) {
this.keyOutside = keyOutside;
}
public void setKeyOutside(final boolean keyOutside) {
this.keyOutside = keyOutside;
}
public boolean isKeyOutside() {
return keyOutside;
}
public boolean isKeyOutside() {
return keyOutside;
}
public boolean isGenerateThumbnail() {
return generateThumbnail;
}
public boolean isGenerateThumbnail() {
return generateThumbnail;
}
public void setGenerateThumbnail(final boolean generateThumbnail) {
this.generateThumbnail = generateThumbnail;
}
public void setGenerateThumbnail(final boolean generateThumbnail) {
this.generateThumbnail = generateThumbnail;
}
public int getyRangeMin() {
return yRangeMin;
}
public int getyRangeMin() {
return yRangeMin;
}
public void setyRangeMin(final int yRangeMin) {
this.yRangeMin = yRangeMin;
}
public void setyRangeMin(final int yRangeMin) {
this.yRangeMin = yRangeMin;
}
public int getyRangeMax() {
return yRangeMax;
}
public int getyRangeMax() {
return yRangeMax;
}
public void setyRangeMax(final int yRangeMax) {
this.yRangeMax = yRangeMax;
}
public void setyRangeMax(final int yRangeMax) {
this.yRangeMax = yRangeMax;
}
public TimeRangeUnit getyRangeUnit() {
return yRangeUnit;
}
public TimeRangeUnit getyRangeUnit() {
return yRangeUnit;
}
public void setyRangeUnit(final TimeRangeUnit yRangeUnit) {
this.yRangeUnit = yRangeUnit;
}
public void setyRangeUnit(final TimeRangeUnit yRangeUnit) {
this.yRangeUnit = yRangeUnit;
}
}

View File

@@ -1,43 +1,43 @@
package org.lucares.pdbui.domain;
public class PlotResponse {
private String imageUrl = "";
private PlotResponseStats stats;
private String thumbnailUrl;
private String imageUrl = "";
private PlotResponseStats stats;
private String thumbnailUrl;
public PlotResponse(final PlotResponseStats stats, final String imageUrl, final String thumbnailUrl) {
this.stats = stats;
this.imageUrl = imageUrl;
this.thumbnailUrl = thumbnailUrl;
}
public PlotResponse(final PlotResponseStats stats, final String imageUrl, final String thumbnailUrl) {
this.stats = stats;
this.imageUrl = imageUrl;
this.thumbnailUrl = thumbnailUrl;
}
public String getImageUrl() {
return imageUrl;
}
public String getImageUrl() {
return imageUrl;
}
public void setImageUrl(final String imageUrl) {
this.imageUrl = imageUrl;
}
public void setImageUrl(final String imageUrl) {
this.imageUrl = imageUrl;
}
public String getThumbnailUrl() {
return thumbnailUrl;
}
public String getThumbnailUrl() {
return thumbnailUrl;
}
public PlotResponseStats getStats() {
return stats;
}
public PlotResponseStats getStats() {
return stats;
}
public void setStats(final PlotResponseStats stats) {
this.stats = stats;
}
public void setStats(final PlotResponseStats stats) {
this.stats = stats;
}
public void setThumbnailUrl(final String thumbnailUrl) {
this.thumbnailUrl = thumbnailUrl;
}
public void setThumbnailUrl(final String thumbnailUrl) {
this.thumbnailUrl = thumbnailUrl;
}
@Override
public String toString() {
return "PlotResponse [imageUrl=" + imageUrl + ", stats=" + stats + ", thumbnailUrl=" + thumbnailUrl + "]";
}
@Override
public String toString() {
return "PlotResponse [imageUrl=" + imageUrl + ", stats=" + stats + ", thumbnailUrl=" + thumbnailUrl + "]";
}
}

View File

@@ -6,94 +6,94 @@ import java.util.List;
import org.lucares.recommind.logs.DataSeries;
public class PlotResponseStats {
private long maxValue;
private long maxValue;
private int values;
private int values;
private double average;
private double average;
private int plottedValues;
private int plottedValues;
private List<DataSeriesStats> dataSeriesStats;
private List<DataSeriesStats> dataSeriesStats;
public PlotResponseStats() {
super();
}
public PlotResponseStats() {
super();
}
public PlotResponseStats(final long maxValue, final int values, final int plottedValues, final double average,
final List<DataSeriesStats> dataSeriesStats) {
public PlotResponseStats(final long maxValue, final int values, final int plottedValues, final double average,
final List<DataSeriesStats> dataSeriesStats) {
this.maxValue = maxValue;
this.values = values;
this.plottedValues = plottedValues;
this.average = average;
this.dataSeriesStats = dataSeriesStats;
}
this.maxValue = maxValue;
this.values = values;
this.plottedValues = plottedValues;
this.average = average;
this.dataSeriesStats = dataSeriesStats;
}
public long getMaxValue() {
return maxValue;
}
public long getMaxValue() {
return maxValue;
}
public void setMaxValue(final long maxValue) {
this.maxValue = maxValue;
}
public void setMaxValue(final long maxValue) {
this.maxValue = maxValue;
}
public int getValues() {
return values;
}
public int getValues() {
return values;
}
public void setValues(final int values) {
this.values = values;
}
public void setValues(final int values) {
this.values = values;
}
public int getPlottedValues() {
return plottedValues;
}
public int getPlottedValues() {
return plottedValues;
}
public void setPlottedValues(final int plottedValues) {
this.plottedValues = plottedValues;
}
public void setPlottedValues(final int plottedValues) {
this.plottedValues = plottedValues;
}
public double getAverage() {
return average;
}
public double getAverage() {
return average;
}
public void setAverage(final double average) {
this.average = average;
}
public void setAverage(final double average) {
this.average = average;
}
public List<DataSeriesStats> getDataSeriesStats() {
return dataSeriesStats;
}
public List<DataSeriesStats> getDataSeriesStats() {
return dataSeriesStats;
}
public void setDataSeriesStats(final List<DataSeriesStats> dataSeriesStats) {
this.dataSeriesStats = dataSeriesStats;
}
public void setDataSeriesStats(final List<DataSeriesStats> dataSeriesStats) {
this.dataSeriesStats = dataSeriesStats;
}
@Override
public String toString() {
return "PlotResponseStats [maxValue=" + maxValue + ", values=" + values + ", average=" + average
+ ", plottedValues=" + plottedValues + ", dataSeriesStats=" + dataSeriesStats + "]";
}
@Override
public String toString() {
return "PlotResponseStats [maxValue=" + maxValue + ", values=" + values + ", average=" + average
+ ", plottedValues=" + plottedValues + ", dataSeriesStats=" + dataSeriesStats + "]";
}
public static PlotResponseStats fromDataSeries(final List<DataSeries> dataSeries) {
public static PlotResponseStats fromDataSeries(final List<DataSeries> dataSeries) {
int values = 0;
int plottedValues = 0;
long maxValue = 0;
final List<DataSeriesStats> dataSeriesStats = new ArrayList<>();
int values = 0;
int plottedValues = 0;
long maxValue = 0;
final List<DataSeriesStats> dataSeriesStats = new ArrayList<>();
for (final DataSeries dataSerie : dataSeries) {
values += dataSerie.getValues();
plottedValues += dataSerie.getPlottedValues();
maxValue = Math.max(maxValue, dataSerie.getMaxValue());
for (final DataSeries dataSerie : dataSeries) {
values += dataSerie.getValues();
plottedValues += dataSerie.getPlottedValues();
maxValue = Math.max(maxValue, dataSerie.getMaxValue());
dataSeriesStats.add(new DataSeriesStats(dataSerie.getValues(), dataSerie.getPlottedValues(),
dataSerie.getMaxValue(), dataSerie.getAverage()));
}
dataSeriesStats.add(new DataSeriesStats(dataSerie.getValues(), dataSerie.getPlottedValues(),
dataSerie.getMaxValue(), dataSerie.getAverage()));
}
final double average = Math.round(DataSeriesStats.average(dataSeriesStats));
final double average = Math.round(DataSeriesStats.average(dataSeriesStats));
return new PlotResponseStats(maxValue, values, plottedValues, average, dataSeriesStats);
}
return new PlotResponseStats(maxValue, values, plottedValues, average, dataSeriesStats);
}
}

View File

@@ -1,5 +1,5 @@
package org.lucares.pdbui.domain;
public enum TimeRangeUnit {
AUTOMATIC, MILLISECONDS, SECONDS, MINUTES, HOURS, DAYS
AUTOMATIC, MILLISECONDS, SECONDS, MINUTES, HOURS, DAYS
}