diff --git a/app/databrowser-json/pom.xml b/app/databrowser-json/pom.xml
new file mode 100644
index 0000000000..9fffffa15d
--- /dev/null
+++ b/app/databrowser-json/pom.xml
@@ -0,0 +1,60 @@
+
+ * Preferences used by the {@link JsonArchiveReader}. + *
+ *
+ * Each of the parameters corresponds to a property in the preferences system,
+ * using the org.phoebus.archive.reader.json
namespace.
+ *
+ * Please refer to the archive_reader_json_preferences.properties
+ * file for a full list of available properties and their meanings.
+ *
true
) or
+ * whether such a value should be printed using a default format
+ * (false
).
+ */
+public record JsonArchivePreferences(
+ boolean honor_zero_precision) {
+
+ private final static JsonArchivePreferences DEFAULT_INSTANCE;
+
+ static {
+ DEFAULT_INSTANCE = loadPreferences();
+ }
+
+ /**
+ * Returns the default instance of the preferences. This is the instance
+ * that is automatically configured through Phoebus’s
+ * {@link PreferencesReader}.
+ *
+ * @return preference instance created using the {@link PreferencesReader}.
+ */
+ public static JsonArchivePreferences getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ private static JsonArchivePreferences loadPreferences() {
+ final var logger = Logger.getLogger(
+ JsonArchivePreferences.class.getName());
+ final var preference_reader = new PreferencesReader(
+ JsonArchivePreferences.class,
+ "/archive_reader_json_preferences.properties");
+ final var honor_zero_precision = preference_reader.getBoolean(
+ "honor_zero_precision");
+ logger.config("honor_zero_precision = " + honor_zero_precision);
+ return new JsonArchivePreferences(honor_zero_precision);
+ }
+
+}
diff --git a/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchiveReader.java b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchiveReader.java
new file mode 100644
index 0000000000..9c55f4283c
--- /dev/null
+++ b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchiveReader.java
@@ -0,0 +1,501 @@
+/*******************************************************************************
+ * Copyright (c) 2013-2024 aquenos GmbH.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ ******************************************************************************/
+
+package org.phoebus.archive.reader.json;
+
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonToken;
+import com.fasterxml.jackson.core.json.JsonReadFeature;
+import org.phoebus.archive.reader.ArchiveReader;
+import org.phoebus.archive.reader.UnknownChannelException;
+import org.phoebus.archive.reader.ValueIterator;
+import org.phoebus.archive.reader.json.internal.JsonArchiveInfoReader;
+import org.phoebus.archive.reader.json.internal.JsonValueIterator;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.ref.Cleaner;
+import java.math.BigInteger;
+import java.net.URL;
+import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
+import java.time.Instant;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.Objects;
+import java.util.WeakHashMap;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.zip.DeflaterInputStream;
+import java.util.zip.GZIPInputStream;
+
+/**
+ * + * Archive reader implementation that connects to an archive server using an + * HTTP / JSON based protocol. Typically, this reader is used together with the + * JSON archive server. However, it will work with any compliant HTTP server. + *
+ * + *+ * Instances of this class are thread-safe. + *
+ */ +public class JsonArchiveReader implements ArchiveReader { + + private final static BigInteger ONE_BILLION = BigInteger + .valueOf(1000000000L); + + private final Cleaner cleaner; + private final String description; + private final String http_url; + private final Map+ * Creates an archive reader that requests samples from the specified URL. + * The URL must start with the scheme "json" followed by the HTTP or + * HTTPS URL of the archive server. The URL must include the context path, + * but not include the servlet path. + *
+ * + *
+ * For example, the URL json:http://localhost:8080/
will
+ * expect the archive server to run on port 8080 of the same computer and
+ * will use the URL
+ * http://localhost:8080/archive/<key>/channels-by-pattern/<pattern>
+ * when searching for channels.
+ *
+ * If not specified, the key
is assumes to be 1
.
+ * The key can be specified by adding ;key=<key>
to the
+ * archive URL (e.g. json:http://localhost:8080/;key=2
).
+ *
timestamp
.
+ */
+ private static BigInteger timestampToBigInteger(final Instant timestamp) {
+ return BigInteger.valueOf(timestamp.getNano()).add(
+ BigInteger.valueOf(timestamp.getEpochSecond()).multiply(
+ ONE_BILLION));
+ }
+
+ /**
+ *
+ * Sends a GET
request to the archive source and returns the
+ * response.
+ *
+ * Sends a GET
request to the archive source and returns a
+ * JSON parser for the response.
+ *
null
raw samples shall be retrieved.
+ * @return
+ * iterator iterating over the samples for the specified time period in
+ * ascending order by time.
+ * @throws IOException
+ * if there is an error while requesting the samples. If an error occurs
+ * later, while using the iterator, no exception is thrown and the
+ * iterator’s hasNext() method simply returns false
.
+ * @throws UnknownChannelException
+ * if the specified channel is not present in the archive.
+ */
+ private JsonValueIterator getValues(
+ final String name,
+ final Instant start,
+ final Instant end,
+ final Integer count)
+ throws IOException, UnknownChannelException {
+ // Construct the request URL.
+ final var sb = new StringBuilder();
+ sb.append("/");
+ sb.append(key);
+ sb.append("/samples/");
+ sb.append(URLEncoder.encode(name, StandardCharsets.UTF_8));
+ sb.append("?start=");
+ sb.append(timestampToBigInteger(start));
+ sb.append("&end=");
+ sb.append(timestampToBigInteger(end));
+ if (count != null) {
+ sb.append("&count=");
+ sb.append(count);
+ }
+ final var request_url = sb.toString();
+ // Send the request and create the JSON parser for the response.
+ final JsonParser parser;
+ try {
+ parser = doGetJson(request_url);
+ } catch (FileNotFoundException e) {
+ throw new UnknownChannelException(name);
+ }
+ // Before creating the iterator, we have to advance the parser to the
+ // first token.
+ try {
+ parser.nextToken();
+ } catch (IOException | RuntimeException e) {
+ parser.close();
+ throw e;
+ }
+ // Prepare the cleanup action. This action is executed when the
+ // iterator is closed or garbage collected.
+ final Runnable iterator_cleanup_action = () -> {
+ try {
+ parser.close();
+ } catch (IOException e) {
+ // We ignore an exception that happens on cleanup.
+ }
+ };
+ // Create an iterator based on the JSON parser.
+ try {
+ final var iterator = new JsonValueIterator(
+ parser,
+ this::unregisterValueIterator,
+ request_url,
+ preferences.honor_zero_precision());
+ // We register the iterator. This has two purposes: First, we have to
+ // be able to call its cancel() method. Second, we need to close the
+ // parser when the iterator is closed or garbage collected. We do
+ // not register the iterator if it has no more elements. In this
+ // case, it might already be closed (and if it is not, we close it
+ // now), so we do not have run any cleanup actions either and if we
+ // registered it, it would never be unregistered because it is
+ // already closed.
+ if (iterator.hasNext()) {
+ registerValueIterator(iterator, iterator_cleanup_action);
+ } else {
+ // The iterator should already be closed, but calling the
+ // close() method anyway does not hurt.
+ iterator.close();
+ }
+ return iterator;
+ } catch (IOException | RuntimeException e) {
+ // If we cannot create the iterator, we have to close the parser
+ // now. First, it is not going to be used for anything else.
+ // Second, the iterator does not exist, so it will not be closed
+ // when the iterator is closed.
+ parser.close();
+ throw e;
+ }
+ }
+
+ /**
+ * Registers a value iterator with this reader. This method is only
+ * intended for use by the {@link JsonValueIterator} constructor.
+ *
+ * @param iterator
+ * iterator that is calling this method.
+ * @param cleanup_action
+ * cleanup action that shall be run when the iterator is garbage
+ * collected or when {@link #unregisterValueIterator(JsonValueIterator)}
+ * is called for the iterator.
+ */
+ private void registerValueIterator(
+ JsonValueIterator iterator, Runnable cleanup_action) {
+ // If the iterator has not been closed properly, we have to ensure that
+ // we close the JSON parser and input stream. Usually, this will happen
+ // when unregisterValueIterator is called, which is called by the
+ // iterator’s close method. However, if close is never called for some
+ // reason, registering the cleanup action ensures that the external
+ // resources are freed. We cannot explicitly remove the iterator from
+ // our iterators map in this case, but this is not a problem because
+ // the WeakHashMap will automatically remove entries when the key is
+ // garbage collected.
+ final var cleanable = cleaner.register(iterator, cleanup_action);
+ synchronized (iterators) {
+ iterators.put(iterator, cleanable);
+ }
+ }
+
+ /**
+ * Retrieves the archive description from the archive server. If the
+ * description cannot be received, a warning is logged and a generic
+ * description is returned.
+ *
+ * @return
+ * the description for the archive specified by the URL and archive key or
+ * a generic description if the archive information cannot be retrieved
+ * from the server.
+ * @throws IllegalArgumentException
+ * if the server sends valid archive information, but it does not contain
+ * any information for the specified archive key.
+ */
+ private String retrieveArchiveDescription() {
+ try (final var parser = doGetJson("/")) {
+ // We have to advance to the first token before calling
+ // readArchiveInfos(…).
+ parser.nextToken();
+ final var archive_infos = JsonArchiveInfoReader
+ .readArchiveInfos(parser);
+ for (final var archive_info : archive_infos) {
+ if (archive_info.archive_key() == key) {
+ return archive_info.archive_description();
+ }
+ }
+ throw new IllegalArgumentException(
+ "The server at \""
+ + http_url
+ + "\" does not provide an archive with the key "
+ + key
+ + ".");
+ } catch (IOException e) {
+ logger.log(
+ Level.WARNING,
+ "Could not load archive information from server for URL \""
+ + http_url
+ + "\".");
+ // If we cannot get the archive description, we still want to
+ // initialize the archive reader. Maybe there is a temporary
+ // network problem and the archive reader will work correctly
+ // later. So, instead of throwing an exception, we rather use a
+ // generic description instead of the one retrieved from the
+ // server.
+ return "Provides archive access over HTTP/JSON.";
+ }
+ }
+
+ /**
+ * Unregister an iterator that has previously been registered. This method
+ * is called when the iterator is closed.
+ *
+ * @param iterator
+ * iterator that was previously registered using
+ * {@link #registerValueIterator(JsonValueIterator, Runnable)}.
+ */
+ private void unregisterValueIterator(JsonValueIterator iterator) {
+ final Cleaner.Cleanable cleanable;
+ synchronized (iterators) {
+ cleanable = iterators.remove(iterator);
+ }
+ if (cleanable != null) {
+ cleanable.clean();
+ }
+ }
+
+}
diff --git a/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchiveReaderFactory.java b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchiveReaderFactory.java
new file mode 100644
index 0000000000..9263c14c53
--- /dev/null
+++ b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchiveReaderFactory.java
@@ -0,0 +1,43 @@
+/*******************************************************************************
+ * Copyright (c) 2013-2024 aquenos GmbH.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ ******************************************************************************/
+
+package org.phoebus.archive.reader.json;
+
+import org.phoebus.archive.reader.ArchiveReader;
+import org.phoebus.archive.reader.spi.ArchiveReaderFactory;
+
+/**
+ *
+ * Factory for {@link JsonArchiveReader} instances. This type of archive reader
+ * handles archive URLs starting with json:
and implements the
+ *
+ * JSON archive access protocol 1.0.
+ *
+ * Instances of this class are thread-safe. + *
+ */ +public class JsonArchiveReaderFactory implements ArchiveReaderFactory { + + @Override + public ArchiveReader createReader(String url) throws Exception { + if (!url.startsWith("json:")) { + throw new IllegalArgumentException( + "URL must start with scheme \"json:\"."); + } + return new JsonArchiveReader( + url, JsonArchivePreferences.getDefaultInstance()); + } + + @Override + public String getPrefix() { + return "json"; + } + +} diff --git a/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/internal/JsonArchiveInfoReader.java b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/internal/JsonArchiveInfoReader.java new file mode 100644 index 0000000000..3a5074df13 --- /dev/null +++ b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/internal/JsonArchiveInfoReader.java @@ -0,0 +1,180 @@ +/******************************************************************************* + * Copyright (c) 2013-2024 aquenos GmbH. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + ******************************************************************************/ + +package org.phoebus.archive.reader.json.internal; + +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonToken; + +import java.io.IOException; +import java.util.LinkedList; +import java.util.List; + +/** + * Reads a {@link ArchiveInfo} objects from a {@link JsonParser}. + */ +public final class JsonArchiveInfoReader { + + /** + * Information about an archive that is available on the server. + * + * @param archive_description the archive’s description. + * @param archive_key key identifying the archive on the server. + * @param archive_name the archive’s name. + */ + public record ArchiveInfo( + String archive_description, + int archive_key, + String archive_name) { + } + + private JsonArchiveInfoReader() { + } + + /** + * Reads a {@link ArchiveInfo} value from a {@link JsonParser}. When + * calling this method, the parser’s current token must be + * {@link JsonToken#START_ARRAY START_ARRAY} and when the method returns + * successfully, the parser’s current token is the corresponding + * {@link JsonToken#END_ARRAY END_ARRAY}. + * + * @param parser JSON parser from which the tokens are read. + * @return list representing the parsed JSON array. + * @throws IOException + * if the JSON data is malformed or there is an I/O problem. + */ + public static Listtrue
) or a default number
+ * format should be used when the precision is zero (false
).
+ * This only applies to floating-point values. Integer values always use
+ * a number format that does not include fractional digits.
+ * @return value representing the parsed JSON object.
+ * @throws IOException
+ * if the JSON data is malformed or there is an I/O problem.
+ */
+ public static VType readValue(
+ final JsonParser parser, boolean honor_zero_precision)
+ throws IOException {
+ JsonToken token = parser.getCurrentToken();
+ if (token != JsonToken.START_OBJECT) {
+ throw new JsonParseException(
+ parser,
+ "Expected START_OBJECT but got " + token,
+ parser.getTokenLocation());
+ }
+ Display display = null;
+ ImmutableDoubleArray double_value = null;
+ EnumDisplay enum_display = null;
+ ImmutableIntArray enum_value = null;
+ String field_name = null;
+ boolean found_value = false;
+ ImmutableLongArray long_value = null;
+ Double maximum = null;
+ Double minimum = null;
+ String quality = null;
+ AlarmSeverity severity = null;
+ String status = null;
+ Instant timestamp = null;
+ ValueType type = null;
+ Listtrue
) or a default number
+ * format should be used when the precision is zero (false
).
+ * @return
+ * an instance of {@link String}[]
(storing the enum labels)
+ * or an instance of {@link Display} (storing numeric limits and number
+ * formatting information).
+ * @throws IOException
+ * if an error occurs while parsing the JSON input (e.g. interrupted
+ * stream, malformed data).
+ */
+ private static Object readMetaData(
+ final JsonParser parser, boolean honor_zero_precision)
+ throws IOException {
+ JsonToken token = parser.getCurrentToken();
+ if (token == null) {
+ throw new IOException("Unexpected end of stream.");
+ }
+ if (token != JsonToken.START_OBJECT) {
+ throw new JsonParseException(
+ parser,
+ "Expected START_OBJECT but got " + token,
+ parser.getTokenLocation());
+ }
+ Double alarm_high = null;
+ Double alarm_low = null;
+ Double display_high = null;
+ Double display_low = null;
+ String field_name = null;
+ Integer precision = null;
+ List+ * Iterator for the {@link JsonArchiveReader}. This class is only intended for + * instantiation by that class. + *
+ * + *+ * Like most iterators, instances of this class are not thread-safe. + * The one exception is the {@link #cancel()} method, which may be called by + * any thread. In order to implement cancellation in a thread-safe way, calling + * this method only results in a flag being set. The iterator is then closed + * the next time {@link #hasNext()} is called. + *
+ */ +public class JsonValueIterator implements ValueIterator { + + private volatile boolean canceled = false; + private final boolean honor_zero_precision; + private final Logger logger; + private VType next_value; + private Consumeron_close
function is called when the iterator is closed, so
+ * the calling code can pass a function that closes the parser.
+ *
+ * @param parser
+ * JSON parser from which samples are read. The iterator expects that the
+ * parser’s current token is the start of an array and reads samples until
+ * the current token is the corresponding end of an array.
+ * @param on_close
+ * function that is called when the iterator is closed. May be
+ * null
.
+ * @param request_url
+ * URL that was used to retrieve the JSON data. This is only used when
+ * logging error messages.
+ * @param honor_zero_precision
+ * whether a precision of zero should result in no fractional digits being
+ * used in the number format of returned values (true
) or a
+ * default number format should be used when the precision is zero
+ * (false
). This only applies to floating-point values.
+ * Integer values always use a number format that does not include
+ * fractional digits.
+ * @throws IOException
+ * if initial operations on the JSON parser fail or if the JSON document
+ * is malformed. Errors that occur later do not result in an exception
+ * being thrown. Instead, the error is logged and {@link #hasNext()}
+ * returns false
.
+ */
+ public JsonValueIterator(
+ final JsonParser parser,
+ final Consumerfalse
. For use by {@link JsonArchiveReader} only.
+ */
+ public void cancel() {
+ this.canceled = true;
+ }
+
+ @Override
+ public void close() {
+ // The parser field also serves as an indicator whether this iterator
+ // has been closed. If the parser is null, we know that the iterator
+ // has already been closed.
+ if (parser != null) {
+ // We have to call the on_close callback. Besides other things,
+ // this ensures that the parser is closed.
+ if (on_close != null) {
+ on_close.accept(this);
+ }
+ // Give up references that are not needed any longer. Setting the
+ // parser reference to null also has the effect that this iterator
+ // is marked as closed.
+ next_value = null;
+ on_close = null;
+ parser = null;
+ }
+ }
+
+ @Override
+ public boolean hasNext() {
+ final boolean has_next;
+ // The hasNext method is not supposed to throw an exception, so when
+ // there is an exception, we log it and return false.
+ try {
+ has_next = hasNextInternal();
+ } catch (IOException e) {
+ close();
+ logger.log(
+ Level.SEVERE,
+ "Error while trying to read sample from server response "
+ + "for URL \""
+ + request_url
+ + "\": "
+ + e.getMessage(),
+ e);
+ return false;
+ }
+ return has_next;
+ }
+
+ @Override
+ public VType next() {
+ // We check whether next_value is null before calling hasNext(). If we
+ // called hasNext() directly, this method would throw an exception when
+ // cancel was called between calling hasNext() and next(). As cancel()
+ // may be called by a different thread, this could result in an
+ // unexpected NoSuchElementException being thrown. Therefore, we rather
+ // return the already retrieved element and close the iterator on the
+ // next call to hasNext().
+ if (next_value == null && !hasNext()) {
+ // If the parser is null, the last call to hasNext() might have
+ // returned true, but close() has been called in between.
+ if (parser == null) {
+ throw new NoSuchElementException(
+ "This iterator has been closed, so no more elements "
+ + "available.");
+ }
+ // The last call to hasNext() must have returned false, so this
+ // call to next clearly is a violation of the API.
+ throw new NoSuchElementException(
+ "next() called while hasNext() == false.");
+ }
+ VType returnValue = next_value;
+ next_value = null;
+ return returnValue;
+ }
+
+ private boolean fetchNext() throws IOException {
+ if (canceled) {
+ return false;
+ }
+ final var token = parser.nextToken();
+ if (token == null) {
+ throw new IOException(
+ "Stream ended prematurely while trying to read next "
+ + "sample.");
+ }
+ if (token == JsonToken.END_ARRAY) {
+ // There should be no data after the end of the array.
+ final var next_token = parser.nextToken();
+ if (next_token != null) {
+ throw new JsonParseException(
+ parser,
+ "Expected end-of-stream but found " + next_token + ".",
+ parser.getTokenLocation());
+ }
+ return false;
+ }
+ next_value = JsonVTypeReader.readValue(parser, honor_zero_precision);
+ return true;
+ }
+
+ private boolean hasNextInternal() throws IOException {
+ if (next_value != null) {
+ // We already fetched the next value.
+ return true;
+ }
+ if (parser == null) {
+ // The iterator has been closed.
+ return false;
+ }
+ if (fetchNext()) {
+ return true;
+ }
+ close();
+ return false;
+ }
+
+}
diff --git a/app/databrowser-json/src/main/resources/META-INF/services/org.phoebus.archive.reader.spi.ArchiveReaderFactory b/app/databrowser-json/src/main/resources/META-INF/services/org.phoebus.archive.reader.spi.ArchiveReaderFactory
new file mode 100644
index 0000000000..35de6f7726
--- /dev/null
+++ b/app/databrowser-json/src/main/resources/META-INF/services/org.phoebus.archive.reader.spi.ArchiveReaderFactory
@@ -0,0 +1 @@
+org.phoebus.archive.reader.json.JsonArchiveReaderFactory
diff --git a/app/databrowser-json/src/main/resources/archive_reader_json_preferences.properties b/app/databrowser-json/src/main/resources/archive_reader_json_preferences.properties
new file mode 100644
index 0000000000..359b0f1813
--- /dev/null
+++ b/app/databrowser-json/src/main/resources/archive_reader_json_preferences.properties
@@ -0,0 +1,5 @@
+# Shall a precision of zero for a floating-point value result in this value
+# using a number format without fractional digits (true) or shall it be treated
+# as an indication that the value should be rendered with a default number of
+# fractional digits (false)?
+honor_zero_precision=true
diff --git a/app/databrowser-json/src/test/java/org/phoebus/archive/reader/json/HttpServerTestBase.java b/app/databrowser-json/src/test/java/org/phoebus/archive/reader/json/HttpServerTestBase.java
new file mode 100644
index 0000000000..afa7d7437b
--- /dev/null
+++ b/app/databrowser-json/src/test/java/org/phoebus/archive/reader/json/HttpServerTestBase.java
@@ -0,0 +1,211 @@
+/*******************************************************************************
+ * Copyright (c) 2024 aquenos GmbH.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ ******************************************************************************/
+
+package org.phoebus.archive.reader.json;
+
+import com.google.common.base.Splitter;
+import com.google.common.collect.Maps;
+import com.sun.net.httpserver.Headers;
+import com.sun.net.httpserver.HttpHandler;
+import com.sun.net.httpserver.HttpServer;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.URLDecoder;
+import java.nio.charset.StandardCharsets;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Consumer;
+
+/**
+ * Base class for tests that need an HTTP server.
+ */
+public class HttpServerTestBase {
+
+ /**
+ * Information about an HTTP request.
+ *
+ * @param headers request headers.
+ * @param method request method.
+ * @param uri request URI.
+ */
+ public record HttpRequest(
+ Headers headers,
+ String method,
+ URI uri) {
+ }
+
+ private static HttpServer http_server;
+
+ /**
+ * Parse a query string, returning the individual parameters. This function
+ * cannot handle query strings with duplicate parameters or parameters that
+ * do not have a value.
+ *
+ * @param query_string query string that shall be parsed.
+ * @return
+ * map mapping parameter names to their respective (decoded) values.
+ * @throws IllegalArgumentException
+ * if the query string is malformed, containers value-less parameters, or
+ * contains duplicate parameters.
+ */
+ public static Map/archive/
below the base URL that is passed to
+ * request_func
.
+ * @param request_func
+ * function that is called, passing the base URL of the provided archive
+ * service.
+ */
+ protected static void withArchiveInfo(
+ final String archive_info_json,
+ final Consumerarchive_key
.
+ *
+ * @param archive_key
+ * numerical key that identifies the archive that is provided.
+ * @param channel_name
+ * channel name for which samples are provided.
+ * @param samples_json
+ * content that is returned by the HTTP handler that serves the path
+ * /archive/<archive_key>/samples/<channel_name>
+ * below the base URL that is passed to the
+ * @param request_func
+ * function that is called, passing the base URL of the provided archive
+ * service.
+ * @return
+ * list with information about the requests that were made to the samples
+ * service. Requests to the archive-info service are not included.
+ */
+ protected static Listjson:
).
+ */
+ @Test
+ public void createWithInvalidUrl() {
+ assertThrows(IllegalArgumentException.class, () -> {
+ new JsonArchiveReader(
+ "http://invalid.example.com",
+ new JsonArchivePreferences(true));
+ });
+ }
+
+ /**
+ * Tests the {@link JsonArchiveReader#getDescription()} function.
+ */
+ @Test
+ public void getDescription() {
+ var archive_info_json = """
+ [ {
+ "key" : 1,
+ "name" : "Some name",
+ "description" : "Some description"
+ } ]
+ """;
+ final var preferences = new JsonArchivePreferences(true);
+ withArchiveInfo(archive_info_json, (base_url) -> {
+ try (final var reader = new JsonArchiveReader(
+ "json:" + base_url, preferences)) {
+ assertEquals(
+ "Some description", reader.getDescription());
+ }
+ });
+ archive_info_json = """
+ [ {
+ "key" : 1,
+ "name" : "Some name",
+ "description" : "Another description"
+ }, {
+ "key" : 3,
+ "name" : "Some name",
+ "description" : "Yet another description"
+ } ]
+ """;
+ withArchiveInfo(archive_info_json, (base_url) -> {
+ try (final var reader = new JsonArchiveReader(
+ "json:" + base_url, preferences)) {
+ assertEquals(
+ "Another description", reader.getDescription());
+ }
+ try (final var reader = new JsonArchiveReader(
+ "json:" + base_url + ";key=3", preferences)) {
+ assertEquals(
+ "Yet another description",
+ reader.getDescription());
+ }
+ });
+ }
+
+ /**
+ * Tests the {@link
+ * JsonArchiveReader#getOptimizedValues(String, Instant, Instant, int)}
+ * function.
+ */
+ @Test
+ public void getOptimizedValues() {
+ final var samples_json = """
+ [ {
+ "time" : 123,
+ "severity" : {
+ "level" : "OK",
+ "hasValue" : true
+ },
+ "status" : "NO_ALARM",
+ "quality" : "Interpolated",
+ "metaData" : {
+ "type" : "numeric",
+ "precision" : 1,
+ "units" : "V",
+ "displayLow" : -100.0,
+ "displayHigh" : 100.0,
+ "warnLow" : "NaN",
+ "warnHigh" : "NaN",
+ "alarmLow" : "NaN",
+ "alarmHigh" : "NaN"
+ },
+ "type" : "minMaxDouble",
+ "value" : [ -5.0, -1.2 ],
+ "minimum" : -15.1,
+ "maximum" : 2.7
+ }, {
+ "time" : 456,
+ "severity" : {
+ "level" : "OK",
+ "hasValue" : true
+ },
+ "status" : "NO_ALARM",
+ "quality" : "Interpolated",
+ "metaData" : {
+ "type" : "numeric",
+ "precision" : 1,
+ "units" : "V",
+ "displayLow" : -100.0,
+ "displayHigh" : 100.0,
+ "warnLow" : "NaN",
+ "warnHigh" : "NaN",
+ "alarmLow" : "NaN",
+ "alarmHigh" : "NaN"
+ },
+ "type" : "minMaxDouble",
+ "value" : [ 4.7 ],
+ "minimum" : -3.9,
+ "maximum" : 17.1
+ } ]
+ """;
+ final var channel_name = "double-channel";
+ final var start = Instant.ofEpochMilli(0L);
+ final var end = Instant.ofEpochMilli(1L);
+ final var preferences = new JsonArchivePreferences(true);
+ var requests = withSamples(
+ 7, channel_name, samples_json, (base_url) -> {
+ try (
+ final var reader = new JsonArchiveReader(
+ "json:" + base_url + ";key=7",
+ preferences);
+ final var iterator = reader.getOptimizedValues(
+ channel_name, start, end, 10)
+ ) {
+ // Check the first sample. The statistics VType does
+ // not support arrays, so we expect a VDoubleArray.
+ final var double_array = (VDoubleArray) iterator.next();
+ assertEquals(2, double_array.getData().size());
+ assertEquals(
+ -5.0, double_array.getData().getDouble(0));
+ assertEquals(
+ -1.2, double_array.getData().getDouble(1));
+ assertEquals(
+ "NO_ALARM", double_array.getAlarm().getName());
+ assertEquals(
+ AlarmSeverity.NONE,
+ double_array.getAlarm().getSeverity());
+ assertEquals(
+ Range.undefined(),
+ double_array.getDisplay().getAlarmRange());
+ assertEquals(
+ Range.undefined(),
+ double_array.getDisplay().getControlRange()
+ );
+ assertEquals(
+ Range.of(-100.0, 100.0),
+ double_array.getDisplay().getDisplayRange());
+ assertEquals(
+ Range.undefined(),
+ double_array.getDisplay().getWarningRange());
+ assertEquals(
+ 1,
+ double_array
+ .getDisplay()
+ .getFormat()
+ .getMinimumFractionDigits());
+ assertEquals(
+ 1,
+ double_array
+ .getDisplay()
+ .getFormat()
+ .getMaximumFractionDigits());
+ assertEquals(
+ "V",
+ double_array.getDisplay().getUnit());
+ assertEquals(
+ Instant.ofEpochSecond(0, 123L),
+ double_array.getTime().getTimestamp());
+ // Check the second sample.
+ final var statistics = (VStatistics) iterator.next();
+ assertEquals(
+ 4.7,
+ statistics.getAverage().doubleValue());
+ assertEquals(
+ -3.9,
+ statistics.getMin().doubleValue());
+ assertEquals(
+ 17.1,
+ statistics.getMax().doubleValue());
+ assertEquals(
+ 0,
+ statistics.getNSamples().intValue());
+ assertEquals(
+ Double.NaN,
+ statistics.getStdDev().doubleValue());
+ assertEquals(
+ "NO_ALARM",
+ statistics.getAlarm().getName());
+ assertEquals(
+ AlarmSeverity.NONE,
+ statistics.getAlarm().getSeverity());
+ assertEquals(
+ Range.undefined(),
+ statistics.getDisplay().getAlarmRange());
+ assertEquals(
+ Range.undefined(),
+ statistics.getDisplay().getControlRange()
+ );
+ assertEquals(
+ Range.of(-100.0, 100.0),
+ statistics.getDisplay().getDisplayRange());
+ assertEquals(
+ Range.undefined(),
+ statistics.getDisplay().getWarningRange());
+ assertEquals(
+ 1,
+ statistics
+ .getDisplay()
+ .getFormat()
+ .getMinimumFractionDigits());
+ assertEquals(
+ 1,
+ statistics
+ .getDisplay()
+ .getFormat()
+ .getMaximumFractionDigits());
+ assertEquals(
+ "V",
+ statistics.getDisplay().getUnit());
+ assertEquals(
+ Instant.ofEpochSecond(0L, 456L),
+ statistics.getTime().getTimestamp());
+ // There should be no more samples.
+ assertFalse(iterator.hasNext());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ });
+ assertEquals(1, requests.size());
+ final var request = requests.get(0);
+ assertEquals("GET", request.method());
+ final var query_params = parseQueryString(request.uri().getQuery());
+ assertEquals("0", query_params.get("start"));
+ assertEquals("1000000", query_params.get("end"));
+ assertEquals("10", query_params.get("count"));
+ }
+
+ /**
+ * Tests the
+ * {@link JsonArchiveReader#getRawValues(String, Instant, Instant)}
+ * function with double samples. Of the tests for numeric values, this is
+ * the most detailed one.
+ */
+ @Test
+ public void getRawValuesWithDoubleSamples() {
+ final var samples_json = """
+ [ {
+ "time" : 123457000001,
+ "severity" : {
+ "level" : "OK",
+ "hasValue" : true
+ },
+ "status" : "NO_ALARM",
+ "quality" : "Original",
+ "metaData" : {
+ "type" : "numeric",
+ "precision" : 3,
+ "units" : "mA",
+ "displayLow" : 0.0,
+ "displayHigh" : 300.0,
+ "warnLow" : 5.0,
+ "warnHigh" : 100.0,
+ "alarmLow" : 2.0,
+ "alarmHigh" : "NaN"
+ },
+ "type" : "double",
+ "value" : [ 27.2, 48.3 ]
+ }, {
+ "time" : 123457000002,
+ "severity" : {
+ "level" : "MAJOR",
+ "hasValue" : true
+ },
+ "status" : "TEST_STATUS",
+ "quality" : "Original",
+ "metaData" : {
+ "type" : "numeric",
+ "precision" : 3,
+ "units" : "mA",
+ "displayLow" : 0.0,
+ "displayHigh" : 300.0,
+ "warnLow" : 5.0,
+ "warnHigh" : 100.0,
+ "alarmLow" : 2.0,
+ "alarmHigh" : "NaN"
+ },
+ "type" : "double",
+ "value" : [ 31.9 ]
+ } ]
+ """;
+ final var channel_name = "double-channel";
+ final var start = Instant.ofEpochMilli(123456L);
+ final var end = Instant.ofEpochMilli(456789L);
+ final var preferences = new JsonArchivePreferences(true);
+ var requests = withSamples(
+ 2, channel_name, samples_json, (base_url) -> {
+ try (
+ final var reader = new JsonArchiveReader(
+ "json:" + base_url + ";key=2",
+ preferences);
+ final var iterator = reader.getRawValues(
+ channel_name, start, end)
+ ) {
+ // Check the first sample.
+ assertTrue(iterator.hasNext());
+ final var double_array = (VDoubleArray) iterator.next();
+ assertEquals(2, double_array.getData().size());
+ assertEquals(
+ 27.2, double_array.getData().getDouble(0));
+ assertEquals(
+ 48.3, double_array.getData().getDouble(1));
+ assertEquals(
+ "NO_ALARM", double_array.getAlarm().getName());
+ assertEquals(
+ AlarmSeverity.NONE,
+ double_array.getAlarm().getSeverity());
+ assertEquals(
+ 2.0,
+ double_array
+ .getDisplay().getAlarmRange().getMinimum());
+ assertEquals(
+ Double.POSITIVE_INFINITY,
+ double_array
+ .getDisplay().getAlarmRange().getMaximum());
+ assertEquals(
+ Range.undefined(),
+ double_array.getDisplay().getControlRange()
+ );
+ assertEquals(
+ 0.0,
+ double_array
+ .getDisplay().getDisplayRange().getMinimum());
+ assertEquals(
+ 300.0,
+ double_array
+ .getDisplay().getDisplayRange().getMaximum());
+ assertEquals(
+ 5.0,
+ double_array
+ .getDisplay().getWarningRange().getMinimum());
+ assertEquals(
+ 100.0,
+ double_array
+ .getDisplay().getWarningRange().getMaximum());
+ assertEquals(
+ 3,
+ double_array
+ .getDisplay()
+ .getFormat()
+ .getMinimumFractionDigits());
+ assertEquals(
+ 3,
+ double_array
+ .getDisplay()
+ .getFormat()
+ .getMaximumFractionDigits());
+ assertEquals(
+ "mA",
+ double_array.getDisplay().getUnit());
+ assertEquals(
+ Instant.ofEpochSecond(123L, 457000001L),
+ double_array.getTime().getTimestamp());
+ // Check the second sample (only the parts that differ
+ // from the first on).
+ assertTrue(iterator.hasNext());
+ final var double_scalar = (VDouble) iterator.next();
+ assertEquals(
+ 31.9, double_scalar.getValue().doubleValue());
+ assertEquals(
+ "TEST_STATUS",
+ double_scalar.getAlarm().getName());
+ assertEquals(
+ AlarmSeverity.MAJOR,
+ double_scalar.getAlarm().getSeverity());
+ assertEquals(
+ Instant.ofEpochSecond(123L, 457000002L),
+ double_scalar.getTime().getTimestamp());
+ // There should be no more samples.
+ assertFalse(iterator.hasNext());
+ assertThrows(NoSuchElementException.class, iterator::next);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ });
+ assertEquals(1, requests.size());
+ final var request = requests.get(0);
+ assertEquals("GET", request.method());
+ final var query_params = parseQueryString(request.uri().getQuery());
+ assertEquals("123456000000", query_params.get("start"));
+ assertEquals("456789000000", query_params.get("end"));
+ assertFalse(query_params.containsKey("count"));
+ }
+
+ /**
+ * Tests the
+ * {@link JsonArchiveReader#getRawValues(String, Instant, Instant)} method
+ * with enum samples.
+ */
+ @Test
+ public void getRawValuesWithEnumSamples() {
+ final var samples_json = """
+ [ {
+ "time" : 123000000009,
+ "severity" : {
+ "level" : "OK",
+ "hasValue" : true
+ },
+ "status" : "NO_ALARM",
+ "quality" : "Original",
+ "metaData" : {
+ "type" : "enum",
+ "states" : [ "High", "Low" ]
+ },
+ "type" : "enum",
+ "value" : [ 1, 0 ]
+ }, {
+ "time" : 124000000011,
+ "severity" : {
+ "level" : "INVALID",
+ "hasValue" : true
+ },
+ "status" : "LINK",
+ "quality" : "Original",
+ "metaData" : {
+ "type" : "enum",
+ "states" : [ "High", "Low" ]
+ },
+ "type" : "enum",
+ "value" : [ 1 ]
+ }, {
+ "time" : 124000000012,
+ "severity" : {
+ "level" : "OK",
+ "hasValue" : true
+ },
+ "status" : "NO_ALARM",
+ "quality" : "Original",
+ "metaData" : {
+ "type" : "enum",
+ "states" : [ "High", "Low" ]
+ },
+ "type" : "enum",
+ "value" : [ 1, 2 ]
+ }, {
+ "time" : 124000000013,
+ "severity" : {
+ "level" : "OK",
+ "hasValue" : true
+ },
+ "status" : "NO_ALARM",
+ "quality" : "Original",
+ "metaData" : {
+ "type" : "enum",
+ "states" : [ "High", "Low" ]
+ },
+ "type" : "enum",
+ "value" : [ -1 ]
+ } ]
+ """;
+ final var channel_name = "enum-channel";
+ final var start = Instant.ofEpochMilli(4321L);
+ final var end = Instant.ofEpochMilli(999999L);
+ final var preferences = new JsonArchivePreferences(true);
+ var requests = withSamples(
+ 1, channel_name, samples_json, (base_url) -> {
+ try (
+ final var reader = new JsonArchiveReader(
+ "json:" + base_url, preferences);
+ final var iterator = reader.getRawValues(
+ channel_name, start, end)
+ ) {
+ // Check the first sample.
+ final var enum_array = (VEnumArray) iterator.next();
+ assertEquals(2, enum_array.getIndexes().size());
+ assertEquals(1, enum_array.getIndexes().getInt(0));
+ assertEquals(0, enum_array.getIndexes().getInt(1));
+ assertEquals(
+ "NO_ALARM",
+ enum_array.getAlarm().getName());
+ assertEquals(
+ AlarmSeverity.NONE,
+ enum_array.getAlarm().getSeverity());
+ assertEquals(
+ Arrays.asList("High", "Low"),
+ enum_array.getDisplay().getChoices());
+ assertEquals(
+ Instant.ofEpochSecond(123L, 9L),
+ enum_array.getTime().getTimestamp());
+ // Check the second sample (only the parts that differ
+ // from the first on).
+ final var enum_scalar = (VEnum) iterator.next();
+ assertEquals(1, enum_scalar.getIndex());
+ assertEquals(
+ "LINK",
+ enum_scalar.getAlarm().getName());
+ assertEquals(
+ AlarmSeverity.INVALID,
+ enum_scalar.getAlarm().getSeverity());
+ assertEquals(
+ Instant.ofEpochSecond(124L, 11L),
+ enum_scalar.getTime().getTimestamp());
+ // Check the third sample. As this sample contains a
+ // value for which there is no label, we expect a
+ // VIntArray instead of a VEnumArray.
+ final var int_array = (VIntArray) iterator.next();
+ assertEquals(2, int_array.getData().size());
+ assertEquals(1, int_array.getData().getInt(0));
+ assertEquals(2, int_array.getData().getInt(1));
+ assertEquals(
+ 0,
+ int_array
+ .getDisplay()
+ .getFormat()
+ .getMaximumFractionDigits());
+ // Check the fourth sample. As this sample contains a
+ // value for which there is no label, we expect a
+ // VInt instead of a VEnum.
+ final var int_scalar = (VInt) iterator.next();
+ assertEquals(-1, int_scalar.getValue().intValue());
+ // There should be no more samples.
+ assertFalse(iterator.hasNext());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ });
+ assertEquals(1, requests.size());
+ final var request = requests.get(0);
+ assertEquals("GET", request.method());
+ final var query_params = parseQueryString(request.uri().getQuery());
+ assertEquals("4321000000", query_params.get("start"));
+ assertEquals("999999000000", query_params.get("end"));
+ assertFalse(query_params.containsKey("count"));
+ }
+
+ /**
+ * Tests the
+ * {@link JsonArchiveReader#getRawValues(String, Instant, Instant)} method
+ * with long samples.
+ */
+ @Test
+ public void getRawValuesWithLongSamples() {
+ final var samples_json = """
+ [ {
+ "time" : 456000000001,
+ "severity" : {
+ "level" : "MAJOR",
+ "hasValue" : true
+ },
+ "status" : "SOME_ALARM",
+ "quality" : "Original",
+ "metaData" : {
+ "type" : "numeric",
+ "precision" : 0,
+ "units" : "pcs.",
+ "displayLow" : 1.0,
+ "displayHigh" : 100.0,
+ "warnLow" : 0.0,
+ "warnHigh" : 0.0,
+ "alarmLow" : 0.0,
+ "alarmHigh" : 0.0
+ },
+ "type" : "long",
+ "value" : [ 14, 2 ]
+ }, {
+ "time" : 456000000002,
+ "severity" : {
+ "level" : "INVALID",
+ "hasValue" : true
+ },
+ "status" : "INVALID_ALARM",
+ "quality" : "Original",
+ "metaData" : {
+ "type" : "numeric",
+ "precision" : 0,
+ "units" : "pcs.",
+ "displayLow" : 1.0,
+ "displayHigh" : 100.0,
+ "warnLow" : 0.0,
+ "warnHigh" : 0.0,
+ "alarmLow" : 0.0,
+ "alarmHigh" : 0.0
+ },
+ "type" : "long",
+ "value" : [ 19 ]
+ } ]
+ """;
+ final var channel_name = "long-channel";
+ final var start = Instant.ofEpochMilli(4321L);
+ final var end = Instant.ofEpochMilli(999999L);
+ final var preferences = new JsonArchivePreferences(true);
+ var requests = withSamples(
+ 1, channel_name, samples_json, (base_url) -> {
+ try (
+ final var reader = new JsonArchiveReader(
+ "json:" + base_url, preferences);
+ final var iterator = reader.getRawValues(
+ channel_name, start, end)
+ ) {
+
+ // Check the first sample. We do not check the limits
+ // because the code parsing them is identical the same
+ // for the double samples, and we already check them
+ // there.
+ final var long_array = (VLongArray) iterator.next();
+ assertEquals(2, long_array.getData().size());
+ assertEquals(14, long_array.getData().getLong(0));
+ assertEquals(2, long_array.getData().getLong(1));
+ assertEquals(
+ "SOME_ALARM",
+ long_array.getAlarm().getName());
+ assertEquals(
+ AlarmSeverity.MAJOR,
+ long_array.getAlarm().getSeverity());
+ assertEquals(
+ 0,
+ long_array
+ .getDisplay()
+ .getFormat()
+ .getMaximumFractionDigits());
+ assertEquals(
+ "pcs.",
+ long_array.getDisplay().getUnit());
+ assertEquals(
+ Instant.ofEpochSecond(456L, 1L),
+ long_array.getTime().getTimestamp());
+ // Check the second sample (only the parts that differ
+ // from the first on).
+ final var long_scalar = (VLong) iterator.next();
+ assertEquals(19, long_scalar.getValue().longValue());
+ assertEquals(
+ "INVALID_ALARM",
+ long_scalar.getAlarm().getName());
+ assertEquals(
+ AlarmSeverity.INVALID,
+ long_scalar.getAlarm().getSeverity());
+ assertEquals(
+ Instant.ofEpochSecond(456L, 2L),
+ long_scalar.getTime().getTimestamp());
+ // There should be no more samples.
+ assertFalse(iterator.hasNext());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ });
+ assertEquals(1, requests.size());
+ final var request = requests.get(0);
+ assertEquals("GET", request.method());
+ final var query_params = parseQueryString(request.uri().getQuery());
+ assertEquals("4321000000", query_params.get("start"));
+ assertEquals("999999000000", query_params.get("end"));
+ assertFalse(query_params.containsKey("count"));
+ }
+
+ /**
+ * Tests the
+ * {@link JsonArchiveReader#getRawValues(String, Instant, Instant)} method
+ * with a malformed response.
+ */
+ @Test
+ public void getRawValuesWithMalformedResponse() {
+ final var channel_name = "some-channel";
+ final var start = Instant.ofEpochMilli(123L);
+ final var end = Instant.ofEpochMilli(456L);
+ final var preferences = new JsonArchivePreferences(true);
+ // First, we test that we get an immediate exception if the first
+ // sample is malformed.
+ var samples_json = """
+ [ {
+ "time_modified" : 123457000001,
+ "severity" : {
+ "level" : "OK",
+ "hasValue" : true
+ },
+ "status" : "NO_ALARM",
+ "quality" : "Original",
+ "metaData" : {
+ "type" : "numeric",
+ "precision" : 3,
+ "units" : "mA",
+ "displayLow" : 0.0,
+ "displayHigh" : 300.0,
+ "warnLow" : 5.0,
+ "warnHigh" : 100.0,
+ "alarmLow" : 2.0,
+ "alarmHigh" : "NaN"
+ },
+ "type" : "double",
+ "value" : [ 27.2, 48.3 ]
+ } ]
+ """;
+
+ withSamples(
+ 1, channel_name, samples_json, (base_url) -> {
+ try (final var reader = new JsonArchiveReader(
+ "json:" + base_url, preferences)) {
+ assertThrows(IOException.class, () -> {
+ reader.getRawValues(
+ channel_name, start, end);
+ });
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ });
+ // Second, we test that we do not get an exception when a subsequent
+ // sample is malformed. Instead, we expect hasNext() to return false.
+ // As a side effect, an error message should be logged. but we cannot
+ // test this easily.
+ samples_json = """
+ [ {
+ "time" : 123457000001,
+ "severity" : {
+ "level" : "OK",
+ "hasValue" : true
+ },
+ "status" : "NO_ALARM",
+ "quality" : "Original",
+ "metaData" : {
+ "type" : "numeric",
+ "precision" : 3,
+ "units" : "mA",
+ "displayLow" : 0.0,
+ "displayHigh" : 300.0,
+ "warnLow" : 5.0,
+ "warnHigh" : 100.0,
+ "alarmLow" : 2.0,
+ "alarmHigh" : "NaN"
+ },
+ "type" : "double",
+ "value" : [ 27.2, 48.3 ]
+ }, {
+ "time_modified" : 123457000002,
+ "severity" : {
+ "level" : "MAJOR",
+ "hasValue" : true
+ },
+ "status" : "TEST_STATUS",
+ "quality" : "Original",
+ "metaData" : {
+ "type" : "numeric",
+ "precision" : 3,
+ "units" : "mA",
+ "displayLow" : 0.0,
+ "displayHigh" : 300.0,
+ "warnLow" : 5.0,
+ "warnHigh" : 100.0,
+ "alarmLow" : 2.0,
+ "alarmHigh" : "NaN"
+ },
+ "type" : "double",
+ "value" : [ 31.9 ]
+ } ]
+ """;
+ withSamples(1, channel_name, samples_json, (base_url) -> {
+ try (
+ final var reader = new JsonArchiveReader(
+ "json:" + base_url, preferences);
+ final var iterator = reader.getRawValues(
+ channel_name, start, end)
+ ) {
+ // We should be able to retrieve the first sample, but
+ // not the second one.
+ assertTrue(iterator.hasNext());
+ iterator.next();
+ // Before calling hasNext() the second time, we suppress error
+ // logging for the iterator.
+ final var iterator_logger = Logger.getLogger(
+ iterator.getClass().getName());
+ final var log_level = iterator_logger.getLevel();
+ iterator_logger.setLevel(Level.OFF);
+ try {
+ assertFalse(iterator.hasNext());
+ } finally {
+ iterator_logger.setLevel(log_level);
+ }
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ });
+ }
+
+ /**
+ * Tests the
+ * {@link JsonArchiveReader#getRawValues(String, Instant, Instant)} method
+ * with no samples.
+ */
+ @Test
+ public void getRawValuesWithNoSamples() {
+ final var channel_name = "empty-channel";
+ final var start = Instant.ofEpochMilli(456L);
+ final var end = Instant.ofEpochMilli(789L);
+ final var preferences = new JsonArchivePreferences(true);
+ var requests = withSamples(
+ 1, channel_name, "[]", (base_url) -> {
+ try (
+ final var reader = new JsonArchiveReader(
+ "json:" + base_url, preferences);
+ final var iterator = reader.getRawValues(
+ channel_name, start, end)
+ ) {
+ // The iterator should be empty.
+ assertFalse(iterator.hasNext());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ });
+ assertEquals(1, requests.size());
+ final var request = requests.get(0);
+ assertEquals("GET", request.method());
+ final var query_params = parseQueryString(request.uri().getQuery());
+ assertEquals("456000000", query_params.get("start"));
+ assertEquals("789000000", query_params.get("end"));
+ assertFalse(query_params.containsKey("count"));
+ }
+
+ /**
+ * Tests the
+ * {@link JsonArchiveReader#getRawValues(String, Instant, Instant)} method
+ * with string samples.
+ */
+ @Test
+ public void getRawValuesWithStringSamples() {
+ final var samples_json = """
+ [ {
+ "time" : 123000000001,
+ "severity" : {
+ "level" : "OK",
+ "hasValue" : true
+ },
+ "status" : "NO_ALARM",
+ "quality" : "Original",
+ "type" : "string",
+ "value" : [ "abc", "def", "ghi" ]
+ }, {
+ "time" : 123000000002,
+ "severity" : {
+ "level" : "OK",
+ "hasValue" : true
+ },
+ "status" : "NO_ALARM",
+ "quality" : "Original",
+ "type" : "string",
+ "value" : [ "123" ]
+ } ]
+ """;
+ final var channel_name = "long-channel";
+ final var start = Instant.ofEpochMilli(0L);
+ final var end = Instant.ofEpochMilli(999000L);
+ final var preferences = new JsonArchivePreferences(true);
+ var requests = withSamples(
+ 1, channel_name, samples_json, (base_url) -> {
+ try (
+ final var reader = new JsonArchiveReader(
+ "json:" + base_url, preferences);
+ final var iterator = reader.getRawValues(
+ channel_name, start, end)
+ ) {
+ // Check the first sample.
+ final var string_array = (VStringArray) iterator.next();
+ assertEquals(3, string_array.getData().size());
+ assertEquals("abc", string_array.getData().get(0));
+ assertEquals("def", string_array.getData().get(1));
+ assertEquals("ghi", string_array.getData().get(2));
+ assertEquals(
+ Instant.ofEpochSecond(123L, 1L),
+ string_array.getTime().getTimestamp());
+ // Check the second sample.
+ final var string_scalar = (VString) iterator.next();
+ assertEquals("123", string_scalar.getValue());
+ assertEquals(
+ Instant.ofEpochSecond(123L, 2L),
+ string_scalar.getTime().getTimestamp());
+ // There should be no more samples.
+ assertFalse(iterator.hasNext());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ });
+ assertEquals(1, requests.size());
+ final var request = requests.get(0);
+ assertEquals("GET", request.method());
+ final var query_params = parseQueryString(request.uri().getQuery());
+ assertEquals("0", query_params.get("start"));
+ assertEquals("999000000000", query_params.get("end"));
+ assertFalse(query_params.containsKey("count"));
+ }
+
+ /**
+ * Tests the
+ * {@link JsonArchiveReader#getRawValues(String, Instant, Instant)} method
+ * with a channel name that is not known by the server.
+ */
+ @Test
+ public void getRawValuesWithUnknownChannel() {
+ final var start = Instant.ofEpochMilli(123L);
+ final var end = Instant.ofEpochMilli(456L);
+ final var preferences = new JsonArchivePreferences(true);
+ withSamples(1, "some-channel", "", (base_url) -> {
+ try (final var reader = new JsonArchiveReader(
+ "json:" + base_url, preferences)) {
+ assertThrows(UnknownChannelException.class, () -> {
+ reader.getRawValues("another-channel", start, end);
+ });
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ });
+ }
+
+ /**
+ * Tests the {@link JsonArchivePreferences#honor_zero_precision()} flag.
+ */
+ @Test
+ public void honorZeroPrecision() {
+ final var samples_json = """
+ [ {
+ "time" : 123457000001,
+ "severity" : {
+ "level" : "OK",
+ "hasValue" : true
+ },
+ "status" : "NO_ALARM",
+ "quality" : "Original",
+ "metaData" : {
+ "type" : "numeric",
+ "precision" : 0,
+ "units" : "mA",
+ "displayLow" : 0.0,
+ "displayHigh" : 300.0,
+ "warnLow" : 5.0,
+ "warnHigh" : 100.0,
+ "alarmLow" : 2.0,
+ "alarmHigh" : "NaN"
+ },
+ "type" : "double",
+ "value" : [ 1.5 ]
+ }, {
+ "time" : 456000000002,
+ "severity" : {
+ "level" : "INVALID",
+ "hasValue" : true
+ },
+ "status" : "INVALID_ALARM",
+ "quality" : "Original",
+ "metaData" : {
+ "type" : "numeric",
+ "precision" : 0,
+ "units" : "pcs.",
+ "displayLow" : 1.0,
+ "displayHigh" : 100.0,
+ "warnLow" : 0.0,
+ "warnHigh" : 0.0,
+ "alarmLow" : 0.0,
+ "alarmHigh" : 0.0
+ },
+ "type" : "long",
+ "value" : [ 19 ]
+ } ]
+ """;
+ final var channel_name = "double-channel";
+ final var start = Instant.ofEpochMilli(123456L);
+ final var end = Instant.ofEpochMilli(456789L);
+ // When honor_zero_precision is set, a sample with a precision of zero
+ // should have a number format that does not include fractional digits.
+ withSamples(
+ 1, channel_name, samples_json, (base_url) -> {
+ final var preferences = new JsonArchivePreferences(true);
+ try (
+ final var reader = new JsonArchiveReader(
+ "json:" + base_url, preferences);
+ final var iterator = reader.getRawValues(
+ channel_name, start, end)
+ ) {
+ final var double_scalar = (VDouble) iterator.next();
+ assertEquals(
+ 0,
+ double_scalar
+ .getDisplay()
+ .getFormat()
+ .getMaximumFractionDigits());
+ final var long_scalar = (VLong) iterator.next();
+ assertEquals(
+ 0,
+ long_scalar
+ .getDisplay()
+ .getFormat()
+ .getMaximumFractionDigits());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ });
+ // When honor_zero_precision is clear, a sample with a precision of
+ // zero should have a number format that includes fractional digits,
+ // but only for double samples and not for long samples.
+ withSamples(
+ 1, channel_name, samples_json, (base_url) -> {
+ final var preferences = new JsonArchivePreferences(false);
+ try (
+ final var reader = new JsonArchiveReader(
+ "json:" + base_url, preferences);
+ final var iterator = reader.getRawValues(
+ channel_name, start, end)
+ ) {
+ final var double_scalar = (VDouble) iterator.next();
+ assertNotEquals(
+ 0,
+ double_scalar
+ .getDisplay()
+ .getFormat()
+ .getMaximumFractionDigits());
+ final var long_scalar = (VLong) iterator.next();
+ assertEquals(
+ 0,
+ long_scalar
+ .getDisplay()
+ .getFormat()
+ .getMaximumFractionDigits());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ });
+ }
+
+}
diff --git a/app/pom.xml b/app/pom.xml
index 3771508034..ff5bb5eee0 100644
--- a/app/pom.xml
+++ b/app/pom.xml
@@ -20,6 +20,7 @@