diff --git a/app/databrowser-json/pom.xml b/app/databrowser-json/pom.xml new file mode 100644 index 0000000000..9fffffa15d --- /dev/null +++ b/app/databrowser-json/pom.xml @@ -0,0 +1,60 @@ + + 4.0.0 + + org.phoebus + app + 4.7.4-SNAPSHOT + + app-databrowser-json + + + org.junit.jupiter + junit-jupiter + ${junit.version} + test + + + + org.hamcrest + hamcrest-all + 1.3 + test + + + + org.phoebus + app-databrowser + 4.7.4-SNAPSHOT + + + + org.phoebus + core-framework + 4.7.4-SNAPSHOT + + + + com.fasterxml.jackson.core + jackson-core + ${jackson.version} + + + + com.google.guava + guava + ${guava.version} + + + + org.epics + epics-util + ${epics.util.version} + + + + org.epics + vtype + ${vtype.version} + + + diff --git a/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchivePreferences.java b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchivePreferences.java new file mode 100644 index 0000000000..16405f75c3 --- /dev/null +++ b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchivePreferences.java @@ -0,0 +1,66 @@ +/******************************************************************************* + * Copyright (c) 2024 aquenos GmbH. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + ******************************************************************************/ + +package org.phoebus.archive.reader.json; + +import org.phoebus.framework.preferences.PreferencesReader; + +import java.util.logging.Logger; + +/** + *

+ * Preferences used by the {@link JsonArchiveReader}. + *

+ *

+ * Each of the parameters corresponds to a property in the preferences system, + * using the org.phoebus.archive.reader.json namespace. + *

+ *

+ * Please refer to the archive_reader_json_preferences.properties + * file for a full list of available properties and their meanings. + *

+ * + * @param honor_zero_precision + * flag indicating whether a floating-point value specifying a precision of + * zero shall be printed without any fractional digits (true) or + * whether such a value should be printed using a default format + * (false). + */ +public record JsonArchivePreferences( + boolean honor_zero_precision) { + + private final static JsonArchivePreferences DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = loadPreferences(); + } + + /** + * Returns the default instance of the preferences. This is the instance + * that is automatically configured through Phoebus’s + * {@link PreferencesReader}. + * + * @return preference instance created using the {@link PreferencesReader}. + */ + public static JsonArchivePreferences getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static JsonArchivePreferences loadPreferences() { + final var logger = Logger.getLogger( + JsonArchivePreferences.class.getName()); + final var preference_reader = new PreferencesReader( + JsonArchivePreferences.class, + "/archive_reader_json_preferences.properties"); + final var honor_zero_precision = preference_reader.getBoolean( + "honor_zero_precision"); + logger.config("honor_zero_precision = " + honor_zero_precision); + return new JsonArchivePreferences(honor_zero_precision); + } + +} diff --git a/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchiveReader.java b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchiveReader.java new file mode 100644 index 0000000000..9c55f4283c --- /dev/null +++ b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchiveReader.java @@ -0,0 +1,501 @@ +/******************************************************************************* + * Copyright (c) 2013-2024 aquenos GmbH. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + ******************************************************************************/ + +package org.phoebus.archive.reader.json; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.core.json.JsonReadFeature; +import org.phoebus.archive.reader.ArchiveReader; +import org.phoebus.archive.reader.UnknownChannelException; +import org.phoebus.archive.reader.ValueIterator; +import org.phoebus.archive.reader.json.internal.JsonArchiveInfoReader; +import org.phoebus.archive.reader.json.internal.JsonValueIterator; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.lang.ref.Cleaner; +import java.math.BigInteger; +import java.net.URL; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.util.Collection; +import java.util.LinkedList; +import java.util.Map; +import java.util.Objects; +import java.util.WeakHashMap; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.zip.DeflaterInputStream; +import java.util.zip.GZIPInputStream; + +/** + *

+ * Archive reader implementation that connects to an archive server using an + * HTTP / JSON based protocol. Typically, this reader is used together with the + * JSON archive server. However, it will work with any compliant HTTP server. + *

+ * + *

+ * Instances of this class are thread-safe. + *

+ */ +public class JsonArchiveReader implements ArchiveReader { + + private final static BigInteger ONE_BILLION = BigInteger + .valueOf(1000000000L); + + private final Cleaner cleaner; + private final String description; + private final String http_url; + private final Map iterators; + private final JsonFactory json_factory; + private final int key; + private final Logger logger; + private final JsonArchivePreferences preferences; + + /** + *

+ * Creates an archive reader that requests samples from the specified URL. + * The URL must start with the scheme "json" followed by the HTTP or + * HTTPS URL of the archive server. The URL must include the context path, + * but not include the servlet path. + *

+ * + *

+ * For example, the URL json:http://localhost:8080/ will + * expect the archive server to run on port 8080 of the same computer and + * will use the URL + * http://localhost:8080/archive/<key>/channels-by-pattern/<pattern> + * when searching for channels. + *

+ * + *

+ * If not specified, the key is assumes to be 1. + * The key can be specified by adding ;key=<key> to the + * archive URL (e.g. json:http://localhost:8080/;key=2). + *

+ * + * @param url + * archive URL with the scheme "json" followed by a valid HTTP HTTPS URL. + * @param preferences + * preferences that are used by this archive reader. + * @throws IllegalArgumentException + * if the specified URL is invalid. + */ + public JsonArchiveReader(String url, JsonArchivePreferences preferences) { + // Initialize the logger first. + this.logger = Logger.getLogger(getClass().getName()); + // The URL must start with the json: prefix. + if (!url.startsWith("json:")) { + throw new IllegalArgumentException( + "The URL \"" + + url + + "\" is not a valid archive URL, because it does " + + "not start with \"json:\"."); + } + // Remove the prefix. + var http_url = url.substring(5); + // Extract the key=… part, if present. + var key = 1; + var semicolon_index = http_url.indexOf(';'); + if (semicolon_index != -1) { + final var args_part = http_url.substring(semicolon_index + 1); + http_url = http_url.substring(0, semicolon_index); + if (args_part.startsWith("key=")) { + try { + key = Integer.parseInt(args_part.substring(4)); + } catch (NumberFormatException e) { + throw new IllegalArgumentException( + "The URL \"" + + url + + "\" is not a valid archive URL, because " + + "the argument \";" + + args_part + + "\" is invalid."); + } + } + } + // We want the base URL to always have a trailing slash, so that we + // have a common basis for constructing specific URLs. + if (!http_url.endsWith("/")) { + http_url = http_url + "/"; + } + // Initialize the class fields. + this.cleaner = Cleaner.create(); + this.http_url = http_url; + this.iterators = new WeakHashMap<>(); + this.json_factory = JsonFactory.builder() + .enable(JsonReadFeature.ALLOW_NON_NUMERIC_NUMBERS).build(); + // We want to ensure that the underlying input stream is closed when + // closing a parser. This should be the default, but it is better to be + // sure. + this.json_factory.enable(JsonParser.Feature.AUTO_CLOSE_SOURCE); + this.key = key; + this.preferences = Objects.requireNonNull(preferences); + // We have to initialize most fields before we can retrieve the + // description. + this.description = retrieveArchiveDescription(); + } + + @Override + public void cancel() { + synchronized (iterators) { + for (JsonValueIterator i : iterators.keySet()) { + // We only call cancel. The iterator is going to be removed + // from the map when it is closed. + i.cancel(); + } + } + } + + @Override + public void close() { + // We do nothing here, because we do not hold any expensive resources + // that need to be closed. + } + + @Override + public String getDescription() { + return description; + } + + @Override + public Collection getNamesByPattern(String glob_pattern) + throws Exception { + final var url = "/" + key + "/channels-by-pattern/" + + URLEncoder.encode(glob_pattern, StandardCharsets.UTF_8); + try (final var parser = doGetJson(url)) { + var token = parser.nextToken(); + if (token == null) { + throw new IOException("Unexpected end of stream."); + } + if (token != JsonToken.START_ARRAY) { + throw new JsonParseException( + parser, + "Expected START_ARRAY but got " + token, + parser.getTokenLocation()); + } + final var channel_names = new LinkedList(); + while (true) { + token = parser.nextToken(); + if (token == null) { + throw new IOException("Unexpected end of stream."); + } + if (token == JsonToken.END_ARRAY) { + break; + } + if (token == JsonToken.VALUE_STRING) { + String channel_name = parser.getText(); + channel_names.add(channel_name); + } else { + throw new JsonParseException( + parser, + "Expected VALUE_STRING but got " + token, + parser.getTokenLocation()); + } + } + return channel_names; + } + } + + @Override + public ValueIterator getOptimizedValues( + String name, Instant start, Instant end, int count) + throws UnknownChannelException, Exception { + return getValues(name, start, end, count); + } + + @Override + public ValueIterator getRawValues( + String name, Instant start, Instant end) + throws UnknownChannelException, Exception { + return getValues(name, start, end, null); + } + + /** + * Converts a {@link BigInteger} representing the number of nanoseconds + * since epoch to an {@link Instant}. + * + * @param timestamp + * number of nanoseconds since UNIX epoch (January 1st, 1970, + * 00:00:00 UTC). + * @return + * instant representing the timestamp. + */ + private static BigInteger timestampToBigInteger(final Instant timestamp) { + return BigInteger.valueOf(timestamp.getNano()).add( + BigInteger.valueOf(timestamp.getEpochSecond()).multiply( + ONE_BILLION)); + } + + /** + *

+ * Sends a GET request to the archive source and returns the + * response. + *

+ * + * @param url + * URL which shall be requested. Must start with a forward slash and be + * relative to the base HTTP url configured for this reader. + * @return + * input stream that provides the HTTP server’s response. + * @throws IOException + * if the URL is malformed, the connection cannot be opened, or the input + * stream cannot be retrieved. + */ + private InputStream doGet(String url) throws IOException { + final var request_url = this.http_url + "archive" + url; + final var connection = new URL(request_url).openConnection(); + connection.addRequestProperty("Accept-Encoding", "gzip, deflate"); + connection.connect(); + final var content_encoding = connection.getHeaderField( + "Content-Encoding"); + final var input_stream = connection.getInputStream(); + try { + if (content_encoding != null) { + if (content_encoding.equals("gzip")) { + return new GZIPInputStream(input_stream); + } else if (content_encoding.equals("deflate")) { + return new DeflaterInputStream(input_stream); + } + } + return input_stream; + } catch (IOException | RuntimeException e) { + input_stream.close(); + throw e; + } + } + + /** + *

+ * Sends a GET request to the archive source and returns a + * JSON parser for the response. + *

+ * + * @param url + * URL which shall be requested. Must start with a forward slash and be + * relative to the base HTTP url configured for this reader. + * @return + * JSON parser that parses HTTP server’s response. + * @throws IOException + * if the URL is malformed, the connection cannot be opened, or the JSON + * parser cannot be created. + */ + private JsonParser doGetJson(String url) throws IOException { + final var input_stream = doGet(url); + try { + return json_factory.createParser(input_stream); + } catch (IOException | RuntimeException e) { + // If we could not create the parser, we have to close the input + // stream. Otherwise, the input stream is going to be closed when + // the parser is closed. + input_stream.close(); + throw e; + } + } + + /** + * Sends a request for samples to the archive server and returns an + * iterator providing the samples. + * + * @param name + * channel name in the archive. + * @param start + * beginning of the time period for which samples shall be retrieved. + * @param end + * end of the time period for which samples shall be retrieved. + * @param count + * approximate number of samples that shall be retrieved. If + * null raw samples shall be retrieved. + * @return + * iterator iterating over the samples for the specified time period in + * ascending order by time. + * @throws IOException + * if there is an error while requesting the samples. If an error occurs + * later, while using the iterator, no exception is thrown and the + * iterator’s hasNext() method simply returns false. + * @throws UnknownChannelException + * if the specified channel is not present in the archive. + */ + private JsonValueIterator getValues( + final String name, + final Instant start, + final Instant end, + final Integer count) + throws IOException, UnknownChannelException { + // Construct the request URL. + final var sb = new StringBuilder(); + sb.append("/"); + sb.append(key); + sb.append("/samples/"); + sb.append(URLEncoder.encode(name, StandardCharsets.UTF_8)); + sb.append("?start="); + sb.append(timestampToBigInteger(start)); + sb.append("&end="); + sb.append(timestampToBigInteger(end)); + if (count != null) { + sb.append("&count="); + sb.append(count); + } + final var request_url = sb.toString(); + // Send the request and create the JSON parser for the response. + final JsonParser parser; + try { + parser = doGetJson(request_url); + } catch (FileNotFoundException e) { + throw new UnknownChannelException(name); + } + // Before creating the iterator, we have to advance the parser to the + // first token. + try { + parser.nextToken(); + } catch (IOException | RuntimeException e) { + parser.close(); + throw e; + } + // Prepare the cleanup action. This action is executed when the + // iterator is closed or garbage collected. + final Runnable iterator_cleanup_action = () -> { + try { + parser.close(); + } catch (IOException e) { + // We ignore an exception that happens on cleanup. + } + }; + // Create an iterator based on the JSON parser. + try { + final var iterator = new JsonValueIterator( + parser, + this::unregisterValueIterator, + request_url, + preferences.honor_zero_precision()); + // We register the iterator. This has two purposes: First, we have to + // be able to call its cancel() method. Second, we need to close the + // parser when the iterator is closed or garbage collected. We do + // not register the iterator if it has no more elements. In this + // case, it might already be closed (and if it is not, we close it + // now), so we do not have run any cleanup actions either and if we + // registered it, it would never be unregistered because it is + // already closed. + if (iterator.hasNext()) { + registerValueIterator(iterator, iterator_cleanup_action); + } else { + // The iterator should already be closed, but calling the + // close() method anyway does not hurt. + iterator.close(); + } + return iterator; + } catch (IOException | RuntimeException e) { + // If we cannot create the iterator, we have to close the parser + // now. First, it is not going to be used for anything else. + // Second, the iterator does not exist, so it will not be closed + // when the iterator is closed. + parser.close(); + throw e; + } + } + + /** + * Registers a value iterator with this reader. This method is only + * intended for use by the {@link JsonValueIterator} constructor. + * + * @param iterator + * iterator that is calling this method. + * @param cleanup_action + * cleanup action that shall be run when the iterator is garbage + * collected or when {@link #unregisterValueIterator(JsonValueIterator)} + * is called for the iterator. + */ + private void registerValueIterator( + JsonValueIterator iterator, Runnable cleanup_action) { + // If the iterator has not been closed properly, we have to ensure that + // we close the JSON parser and input stream. Usually, this will happen + // when unregisterValueIterator is called, which is called by the + // iterator’s close method. However, if close is never called for some + // reason, registering the cleanup action ensures that the external + // resources are freed. We cannot explicitly remove the iterator from + // our iterators map in this case, but this is not a problem because + // the WeakHashMap will automatically remove entries when the key is + // garbage collected. + final var cleanable = cleaner.register(iterator, cleanup_action); + synchronized (iterators) { + iterators.put(iterator, cleanable); + } + } + + /** + * Retrieves the archive description from the archive server. If the + * description cannot be received, a warning is logged and a generic + * description is returned. + * + * @return + * the description for the archive specified by the URL and archive key or + * a generic description if the archive information cannot be retrieved + * from the server. + * @throws IllegalArgumentException + * if the server sends valid archive information, but it does not contain + * any information for the specified archive key. + */ + private String retrieveArchiveDescription() { + try (final var parser = doGetJson("/")) { + // We have to advance to the first token before calling + // readArchiveInfos(…). + parser.nextToken(); + final var archive_infos = JsonArchiveInfoReader + .readArchiveInfos(parser); + for (final var archive_info : archive_infos) { + if (archive_info.archive_key() == key) { + return archive_info.archive_description(); + } + } + throw new IllegalArgumentException( + "The server at \"" + + http_url + + "\" does not provide an archive with the key " + + key + + "."); + } catch (IOException e) { + logger.log( + Level.WARNING, + "Could not load archive information from server for URL \"" + + http_url + + "\"."); + // If we cannot get the archive description, we still want to + // initialize the archive reader. Maybe there is a temporary + // network problem and the archive reader will work correctly + // later. So, instead of throwing an exception, we rather use a + // generic description instead of the one retrieved from the + // server. + return "Provides archive access over HTTP/JSON."; + } + } + + /** + * Unregister an iterator that has previously been registered. This method + * is called when the iterator is closed. + * + * @param iterator + * iterator that was previously registered using + * {@link #registerValueIterator(JsonValueIterator, Runnable)}. + */ + private void unregisterValueIterator(JsonValueIterator iterator) { + final Cleaner.Cleanable cleanable; + synchronized (iterators) { + cleanable = iterators.remove(iterator); + } + if (cleanable != null) { + cleanable.clean(); + } + } + +} diff --git a/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchiveReaderFactory.java b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchiveReaderFactory.java new file mode 100644 index 0000000000..9263c14c53 --- /dev/null +++ b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/JsonArchiveReaderFactory.java @@ -0,0 +1,43 @@ +/******************************************************************************* + * Copyright (c) 2013-2024 aquenos GmbH. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + ******************************************************************************/ + +package org.phoebus.archive.reader.json; + +import org.phoebus.archive.reader.ArchiveReader; +import org.phoebus.archive.reader.spi.ArchiveReaderFactory; + +/** + *

+ * Factory for {@link JsonArchiveReader} instances. This type of archive reader + * handles archive URLs starting with json: and implements the + * + * JSON archive access protocol 1.0. + *

+ * + *

+ * Instances of this class are thread-safe. + *

+ */ +public class JsonArchiveReaderFactory implements ArchiveReaderFactory { + + @Override + public ArchiveReader createReader(String url) throws Exception { + if (!url.startsWith("json:")) { + throw new IllegalArgumentException( + "URL must start with scheme \"json:\"."); + } + return new JsonArchiveReader( + url, JsonArchivePreferences.getDefaultInstance()); + } + + @Override + public String getPrefix() { + return "json"; + } + +} diff --git a/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/internal/JsonArchiveInfoReader.java b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/internal/JsonArchiveInfoReader.java new file mode 100644 index 0000000000..3a5074df13 --- /dev/null +++ b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/internal/JsonArchiveInfoReader.java @@ -0,0 +1,180 @@ +/******************************************************************************* + * Copyright (c) 2013-2024 aquenos GmbH. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + ******************************************************************************/ + +package org.phoebus.archive.reader.json.internal; + +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonToken; + +import java.io.IOException; +import java.util.LinkedList; +import java.util.List; + +/** + * Reads a {@link ArchiveInfo} objects from a {@link JsonParser}. + */ +public final class JsonArchiveInfoReader { + + /** + * Information about an archive that is available on the server. + * + * @param archive_description the archive’s description. + * @param archive_key key identifying the archive on the server. + * @param archive_name the archive’s name. + */ + public record ArchiveInfo( + String archive_description, + int archive_key, + String archive_name) { + } + + private JsonArchiveInfoReader() { + } + + /** + * Reads a {@link ArchiveInfo} value from a {@link JsonParser}. When + * calling this method, the parser’s current token must be + * {@link JsonToken#START_ARRAY START_ARRAY} and when the method returns + * successfully, the parser’s current token is the corresponding + * {@link JsonToken#END_ARRAY END_ARRAY}. + * + * @param parser JSON parser from which the tokens are read. + * @return list representing the parsed JSON array. + * @throws IOException + * if the JSON data is malformed or there is an I/O problem. + */ + public static List readArchiveInfos(JsonParser parser) + throws IOException { + var token = parser.currentToken(); + if (token == null) { + throw new IOException("Unexpected end of stream."); + } + if (token != JsonToken.START_ARRAY) { + throw new JsonParseException( + parser, + "Expected START_ARRAY but got " + token, + parser.getTokenLocation()); + } + final var archive_infos = new LinkedList(); + while (true) { + token = parser.nextToken(); + if (token == null) { + throw new IOException("Unexpected end of stream."); + } + if (token == JsonToken.END_ARRAY) { + break; + } + archive_infos.add(readArchiveInfo(parser)); + } + return archive_infos; + } + + private static void duplicateFieldIfNotNull( + final JsonParser parser, + final String field_name, + final Object field_value) + throws JsonParseException { + if (field_value != null) { + throw new JsonParseException( + parser, + "Field \"" + field_name + "\" occurs twice.", + parser.getTokenLocation()); + } + } + + private static ArchiveInfo readArchiveInfo(JsonParser parser) + throws IOException { + JsonToken token = parser.getCurrentToken(); + if (token != JsonToken.START_OBJECT) { + throw new JsonParseException( + parser, + "Expected START_OBJECT but got " + token, + parser.getTokenLocation()); + } + Integer archive_key = null; + String archive_name = null; + String archive_description = null; + String field_name = null; + while (true) { + token = parser.nextToken(); + if (token == null) { + throw new IOException("Unexpected end of stream."); + } + if (token == JsonToken.END_OBJECT) { + break; + } + if (field_name == null) { + if (token == JsonToken.FIELD_NAME) { + field_name = parser.getCurrentName(); + continue; + } else { + throw new JsonParseException( + parser, + "Expected FIELD_NAME but got " + token, + parser.getTokenLocation()); + } + } + switch (field_name) { + case "description" -> { + duplicateFieldIfNotNull( + parser, field_name, archive_description); + archive_description = readStringValue(parser); + } + case "key" -> { + duplicateFieldIfNotNull(parser, field_name, archive_key); + archive_key = readIntValue(parser); + } + case "name" -> { + duplicateFieldIfNotNull(parser, field_name, archive_name); + archive_name = readStringValue(parser); + } + default -> throw new JsonParseException( + parser, + "Found unknown field \"" + field_name + "\".", + parser.getTokenLocation()); + } + field_name = null; + } + if (archive_description == null + || archive_key == null + || archive_name == null) { + throw new JsonParseException( + parser, + "Mandatory field is missing in object.", + parser.getTokenLocation()); + } + return new ArchiveInfo(archive_description, archive_key, archive_name); + } + + private static int readIntValue(final JsonParser parser) + throws IOException { + final var token = parser.getCurrentToken(); + if (token != JsonToken.VALUE_NUMBER_INT) { + throw new JsonParseException( + parser, + "Expected VALUE_NUMBER_INT but got " + + token, + parser.getTokenLocation()); + } + return parser.getIntValue(); + } + + private static String readStringValue(final JsonParser parser) + throws IOException { + final var token = parser.currentToken(); + if (token != JsonToken.VALUE_STRING) { + throw new JsonParseException( + parser, + "Expected VALUE_STRING but got " + token, + parser.getTokenLocation()); + } + return parser.getText(); + } + +} diff --git a/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/internal/JsonVTypeReader.java b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/internal/JsonVTypeReader.java new file mode 100644 index 0000000000..4febe23c57 --- /dev/null +++ b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/internal/JsonVTypeReader.java @@ -0,0 +1,933 @@ +/******************************************************************************* + * Copyright (c) 2013-2024 aquenos GmbH. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + ******************************************************************************/ + +package org.phoebus.archive.reader.json.internal; + +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonToken; +import com.google.common.primitives.ImmutableDoubleArray; +import com.google.common.primitives.ImmutableIntArray; +import com.google.common.primitives.ImmutableLongArray; +import org.epics.util.array.CollectionNumbers; +import org.epics.util.array.ListDouble; +import org.epics.util.array.ListInteger; +import org.epics.util.array.ListLong; +import org.epics.util.stats.Range; +import org.epics.util.text.NumberFormats; +import org.epics.vtype.Alarm; +import org.epics.vtype.AlarmSeverity; +import org.epics.vtype.AlarmStatus; +import org.epics.vtype.Display; +import org.epics.vtype.EnumDisplay; +import org.epics.vtype.Time; +import org.epics.vtype.VDouble; +import org.epics.vtype.VDoubleArray; +import org.epics.vtype.VEnum; +import org.epics.vtype.VEnumArray; +import org.epics.vtype.VInt; +import org.epics.vtype.VIntArray; +import org.epics.vtype.VLong; +import org.epics.vtype.VLongArray; +import org.epics.vtype.VStatistics; +import org.epics.vtype.VString; +import org.epics.vtype.VStringArray; +import org.epics.vtype.VType; + +import java.io.IOException; +import java.math.BigInteger; +import java.text.NumberFormat; +import java.time.Instant; +import java.util.LinkedList; +import java.util.List; +import java.util.Locale; + +/** + * Reads a {@link org.epics.vtype.VType} from a {@link JsonParser}. + */ +public final class JsonVTypeReader { + + private enum ValueType { + DOUBLE("double"), + ENUM("enum"), + LONG("long"), + MIN_MAX_DOUBLE("minMaxDouble"), + STRING("string"); + + public final String name; + + ValueType(String name) { + this.name = name; + } + + } + + private final static BigInteger ONE_BILLION = BigInteger + .valueOf(1000000000L); + + private JsonVTypeReader() { + } + + /** + * Reads a {@link VType} value from a {@link JsonParser}. When calling this + * method, the parser’s current token must be {@link JsonToken#START_OBJECT + * START_OBJECT} and when the method returns successfully, the parser’s + * current token is the corresponding {@link JsonToken#END_OBJECT + * END_OBJECT}. + * + * @param parser + * JSON parser from which the tokens are read. + * @param honor_zero_precision + * whether a precision of zero should result in no fractional digits being + * used in the number format (true) or a default number + * format should be used when the precision is zero (false). + * This only applies to floating-point values. Integer values always use + * a number format that does not include fractional digits. + * @return value representing the parsed JSON object. + * @throws IOException + * if the JSON data is malformed or there is an I/O problem. + */ + public static VType readValue( + final JsonParser parser, boolean honor_zero_precision) + throws IOException { + JsonToken token = parser.getCurrentToken(); + if (token != JsonToken.START_OBJECT) { + throw new JsonParseException( + parser, + "Expected START_OBJECT but got " + token, + parser.getTokenLocation()); + } + Display display = null; + ImmutableDoubleArray double_value = null; + EnumDisplay enum_display = null; + ImmutableIntArray enum_value = null; + String field_name = null; + boolean found_value = false; + ImmutableLongArray long_value = null; + Double maximum = null; + Double minimum = null; + String quality = null; + AlarmSeverity severity = null; + String status = null; + Instant timestamp = null; + ValueType type = null; + List string_value = null; + while (true) { + token = parser.nextToken(); + if (token == null) { + throw new IOException("Unexpected end of stream."); + } + if (token == JsonToken.END_OBJECT) { + break; + } + if (field_name == null) { + if (token != JsonToken.FIELD_NAME) { + throw new JsonParseException( + parser, + "Expected FIELD_NAME but got " + token, + parser.getTokenLocation()); + } + field_name = parser.getCurrentName(); + continue; + } + switch (field_name) { + case "maximum" -> { + duplicateFieldIfNotNull(parser, field_name, maximum); + maximum = readDoubleValue(parser); + } + case "metaData" -> { + if (enum_display != null || display != null) { + throw new JsonParseException( + parser, + "Field \"" + field_name + "\" occurs twice.", + parser.getTokenLocation()); + } + Object metaData = readMetaData( + parser, honor_zero_precision); + if (metaData instanceof Display) { + display = (Display) metaData; + } else if (metaData instanceof EnumDisplay) { + enum_display = (EnumDisplay) metaData; + } else { + throw new RuntimeException( + "Return value of internal method readMetaData " + + "has unexpected type " + + metaData.getClass().getName() + + "."); + } + } + case "minimum" -> { + duplicateFieldIfNotNull(parser, field_name, minimum); + minimum = readDoubleValue(parser); + } + case "quality" -> { + // We do not use the quality field any longer (Phoebus’s + // VType system does not support it), but we still want to + // ensure that the data is well-formed. + duplicateFieldIfNotNull(parser, field_name, quality); + quality = readStringValue(parser); + } + case "severity" -> { + duplicateFieldIfNotNull(parser, field_name, severity); + severity = readSeverity(parser); + } + case "status" -> { + duplicateFieldIfNotNull(parser, field_name, status); + status = readStringValue(parser); + } + case "time" -> { + duplicateFieldIfNotNull(parser, field_name, timestamp); + timestamp = readInstant(parser); + } + case "type" -> { + duplicateFieldIfNotNull(parser, field_name, type); + final var type_name = readStringValue(parser); + type = switch (type_name.toLowerCase(Locale.ROOT)) { + case "double" -> ValueType.DOUBLE; + case "enum" -> ValueType.ENUM; + case "long" -> ValueType.LONG; + case "minmaxdouble" -> ValueType.MIN_MAX_DOUBLE; + case "string" -> ValueType.STRING; + default -> throw new JsonParseException( + parser, + "Unknown type \"" + type_name + "\".", + parser.getTokenLocation()); + }; + } + case"value" -> { + if (found_value) { + throw new JsonParseException( + parser, + "Field \"" + field_name + "\" occurs twice.", + parser.getTokenLocation()); + } + if (type == null) { + throw new JsonParseException( + parser, + "\"value\" field must be specified after " + + "\"type\" field.", + parser.getTokenLocation()); + } + found_value = true; + switch (type) { + case DOUBLE, MIN_MAX_DOUBLE -> { + double_value = readDoubleArray(parser); + } + case ENUM -> { + enum_value = readIntArray(parser); + } + case LONG -> { + long_value = readLongArray(parser); + } + case STRING -> { + string_value = readStringArray(parser); + } + } + } + default -> throw new JsonParseException( + parser, + "Found unknown field \"" + field_name + "\".", + parser.getTokenLocation()); + } + field_name = null; + } + if (!found_value + || quality == null + || severity == null + || status == null + || timestamp == null + || type == null) { + throw new JsonParseException( + parser, + "Mandatory field is missing in object.", + parser.getTokenLocation()); + } + if (type != ValueType.ENUM && enum_display != null) { + throw new JsonParseException( + parser, + "Value of type \"" + + type.name + + "\" does not accept enum meta-data.", + parser.getTokenLocation()); + } + if (type != ValueType.MIN_MAX_DOUBLE && ( + minimum != null || maximum != null)) { + throw new JsonParseException( + parser, + "Invalid field specified for value of type\"" + + type.name + + "\".", + parser.getTokenLocation()); + } + if ((type == ValueType.ENUM || type == ValueType.STRING) + && display != null) { + throw new JsonParseException( + parser, + "Value of type \"" + + type.name + + "\" does not accept numeric meta-data.", + parser.getTokenLocation()); + } + final var alarm = Alarm.of(severity, AlarmStatus.NONE, status); + final var time = Time.of(timestamp); + switch (type) { + case DOUBLE -> { + if (display == null) { + display = Display.none(); + } + if (double_value.length() == 1) { + return VDouble.of( + double_value.get(0), alarm, time, display); + } else { + return VDoubleArray.of( + CollectionNumbers.toListDouble( + double_value.toArray()), + alarm, + time, + display); + } + } + case ENUM -> { + // Ensure that we have labels for all indices. + int min_value = Integer.MAX_VALUE; + int max_value = Integer.MIN_VALUE; + for (var i = 0; i < enum_value.length(); ++i) { + final var value = enum_value.get(i); + min_value = Math.min(min_value, value); + max_value = Math.max(max_value, value); + } + // If we have a negative value or we have a value without a + // label, we cannot use the meta-data and return a regular + // integer instead. + if (min_value < 0 + || max_value >= enum_display.getChoices().size()) { + enum_display = null; + } + // If there is no meta-data, we cannot return an enum because + // an enum must have meta-data and this meta-data must include + // labels for all values. + if (enum_display == null) { + // If there are no labels, there is no benefit in returning + // an enum, so we rather return an integer type. + display = Display.of( + Range.undefined(), + Range.undefined(), + Range.undefined(), + Range.undefined(), + "", + NumberFormats.precisionFormat(0)); + if (enum_value.length() == 1) { + return VInt.of( + enum_value.get(0), + alarm, + time, + display); + } else { + return VIntArray.of( + toListInteger(enum_value), + alarm, + time, + display); + } + } + if (enum_value.length() == 1) { + return VEnum.of( + enum_value.get(0), enum_display, alarm, time); + } else { + return VEnumArray.of( + toListInteger(enum_value), + enum_display, + alarm, + time); + } + } + case LONG -> { + if (display == null) { + display = Display.none(); + } else if (display.getFormat() + .getMaximumFractionDigits() != 0) { + // The Display instance that was generated by readMetaData + // might use a number format that includes fractional + // digits because that function does not know yet that we + // are dealing with an integer value. In this case, we + // replace the number format with one that does not include + // fractional digits. + display = Display.of( + display.getDisplayRange(), + display. getAlarmRange(), + display.getWarningRange(), + display.getControlRange(), + display.getUnit(), + NumberFormats.precisionFormat(0), + display.getDescription()); + } + if (long_value.length() == 1) { + return VLong.of(long_value.get(0), alarm, time, display); + } else { + return VLongArray.of( + toListLong(long_value), + alarm, + time, + display); + } + } + case MIN_MAX_DOUBLE -> { + if (display == null) { + display = Display.none(); + } + if (minimum == null || maximum == null) { + throw new JsonParseException( + parser, + "Mandatory field is missing in object.", + parser.getTokenLocation()); + } + if (double_value.length() == 1) { + return VStatistics.of( + double_value.get(0), + Double.NaN, + minimum, + maximum, + 0, + alarm, + time, + display); + } else { + // There is no type for arrays with statistics, so we have + // to choose between dropping statistics information and + // dropping array elements. We choose to drop statistics + // information. This is supposed to be a rare exception + // anyway, there typically is no sense in building this + // kind of statistics for arrays. + return VDoubleArray.of( + toListDouble(double_value), + alarm, + time, + display); + } + } + case STRING -> { + if (string_value.size() == 1) { + return VString.of(string_value.get(0), alarm, time); + } else { + return VStringArray.of(string_value, alarm, time); + } + } + } + throw new JsonParseException( + parser, + "Invalid value type \"" + type + "\".", + parser.getTokenLocation()); + } + + private static Instant bigIntegerToTimestamp(final BigInteger big_int) { + BigInteger[] quotient_and_remainder = big_int + .divideAndRemainder(ONE_BILLION); + return Instant.ofEpochSecond( + quotient_and_remainder[0].longValue(), + quotient_and_remainder[1].longValue()); + } + + private static void duplicateFieldIfNotNull( + final JsonParser parser, + final String field_name, + final Object field_value) + throws JsonParseException { + if (field_value != null) { + throw new JsonParseException( + parser, + "Field \"" + field_name + "\" occurs twice.", + parser.getTokenLocation()); + } + } + + private static boolean readBooleanValue(final JsonParser parser) + throws IOException { + final var token = parser.currentToken(); + if (token != JsonToken.VALUE_TRUE + && token != JsonToken.VALUE_FALSE) { + throw new JsonParseException( + parser, + "Expected VALUE_TRUE or VALUE_FALSE but got " + + token, + parser.getTokenLocation()); + } + return parser.getBooleanValue(); + } + + private static ImmutableDoubleArray readDoubleArray( + final JsonParser parser) throws IOException { + final var array_builder = ImmutableDoubleArray.builder(1); + var token = parser.getCurrentToken(); + if (token != JsonToken.START_ARRAY) { + throw new JsonParseException( + parser, + "Expected START_ARRAY but got " + token, + parser.getTokenLocation()); + } + while (true) { + token = parser.nextToken(); + if (token == null) { + throw new IOException("Unexpected end of stream."); + } + if (token == JsonToken.END_ARRAY) { + break; + } + array_builder.add(readDoubleValue(parser)); + } + return array_builder.build(); + } + + private static double readDoubleValue(final JsonParser parser) + throws IOException { + final var token = parser.currentToken(); + if (token != JsonToken.VALUE_NUMBER_INT + && token != JsonToken.VALUE_NUMBER_FLOAT) { + if (token != JsonToken.VALUE_STRING) { + throw new JsonParseException( + parser, + "Expected VALUE_NUMBER_INT, VALUE_NUMBER_FLOAT, or " + + "VALUE_STRING but got " + + token, + parser.getTokenLocation()); + } + return stringToSpecialDouble(parser.getText(), + parser); + } else { + return parser.getDoubleValue(); + } + } + + private static Instant readInstant(final JsonParser parser) + throws IOException { + final var token = parser.currentToken(); + if (token != JsonToken.VALUE_NUMBER_INT) { + throw new JsonParseException( + parser, + "Expected VALUE_NUMBER_INT but got " + + token, + parser.getTokenLocation()); + } + return bigIntegerToTimestamp(parser.getBigIntegerValue()); + } + + private static ImmutableIntArray readIntArray(final JsonParser parser) + throws IOException { + final var array_builder = ImmutableIntArray.builder(1); + var token = parser.getCurrentToken(); + if (token != JsonToken.START_ARRAY) { + throw new JsonParseException( + parser, + "Expected START_ARRAY but got " + token, + parser.getTokenLocation()); + } + while (true) { + token = parser.nextToken(); + if (token == null) { + throw new IOException("Unexpected end of stream."); + } + if (token == JsonToken.END_ARRAY) { + break; + } + array_builder.add(readIntValue(parser)); + } + return array_builder.build(); + } + + private static int readIntValue(final JsonParser parser) + throws IOException { + final var token = parser.getCurrentToken(); + if (token != JsonToken.VALUE_NUMBER_INT) { + throw new JsonParseException( + parser, + "Expected VALUE_NUMBER_INT but got " + + token, + parser.getTokenLocation()); + } + return parser.getIntValue(); + } + + private static ImmutableLongArray readLongArray(final JsonParser parser) + throws IOException { + final var array_builder = ImmutableLongArray.builder(1); + var token = parser.getCurrentToken(); + if (token != JsonToken.START_ARRAY) { + throw new JsonParseException( + parser, + "Expected START_ARRAY but got " + token, + parser.getTokenLocation()); + } + while (true) { + token = parser.nextToken(); + if (token == null) { + throw new IOException("Unexpected end of stream."); + } + if (token == JsonToken.END_ARRAY) { + break; + } + array_builder.add(readLongValue(parser)); + } + return array_builder.build(); + } + + private static long readLongValue(final JsonParser parser) + throws IOException { + final var token = parser.getCurrentToken(); + if (token != JsonToken.VALUE_NUMBER_INT) { + throw new JsonParseException( + parser, + "Expected VALUE_NUMBER_INT but got " + + token, + parser.getTokenLocation()); + } + return parser.getLongValue(); + } + + /** + * Reads the meta-data associated with a value. There are different + * types of meta-data for numeric and enum values, therefore the type of + * the return value has to be determined at runtime. + * + * @param parser the JSON parser that is used to read the meta-data. + * @param honor_zero_precision + * whether a precision of zero should result in no fractional digits being + * used in the number format (true) or a default number + * format should be used when the precision is zero (false). + * @return + * an instance of {@link String}[] (storing the enum labels) + * or an instance of {@link Display} (storing numeric limits and number + * formatting information). + * @throws IOException + * if an error occurs while parsing the JSON input (e.g. interrupted + * stream, malformed data). + */ + private static Object readMetaData( + final JsonParser parser, boolean honor_zero_precision) + throws IOException { + JsonToken token = parser.getCurrentToken(); + if (token == null) { + throw new IOException("Unexpected end of stream."); + } + if (token != JsonToken.START_OBJECT) { + throw new JsonParseException( + parser, + "Expected START_OBJECT but got " + token, + parser.getTokenLocation()); + } + Double alarm_high = null; + Double alarm_low = null; + Double display_high = null; + Double display_low = null; + String field_name = null; + Integer precision = null; + List states = null; + String type = null; + String units = null; + Double warn_high = null; + Double warn_low = null; + while (true) { + token = parser.nextToken(); + if (token == null) { + throw new IOException("Unexpected end of stream."); + } + if (token == JsonToken.END_OBJECT) { + break; + } + if (field_name == null) { + if (token != JsonToken.FIELD_NAME) { + throw new JsonParseException( + parser, + "Expected FIELD_NAME but got " + token, + parser.getTokenLocation()); + } + field_name = parser.getCurrentName(); + continue; + } + switch (field_name) { + case "precision" -> { + duplicateFieldIfNotNull(parser, field_name, precision); + precision = readIntValue(parser); + } + case "type" -> { + duplicateFieldIfNotNull(parser, field_name, type); + type = readStringValue(parser); + } + case "units" -> { + duplicateFieldIfNotNull(parser, field_name, units); + units = readStringValue(parser); + } + case "displayLow" -> { + duplicateFieldIfNotNull(parser, field_name, display_low); + display_low = readDoubleValue(parser); + } + case "displayHigh" -> { + duplicateFieldIfNotNull(parser, field_name, display_high); + display_high = readDoubleValue(parser); + } + case "warnLow" -> { + duplicateFieldIfNotNull(parser, field_name, warn_low); + warn_low = readDoubleValue(parser); + } + case "warnHigh" -> { + duplicateFieldIfNotNull(parser, field_name, warn_high); + warn_high = readDoubleValue(parser); + } + case "alarmLow" -> { + duplicateFieldIfNotNull(parser, field_name, alarm_low); + alarm_low = readDoubleValue(parser); + } + case "alarmHigh" -> { + duplicateFieldIfNotNull(parser, field_name, alarm_high); + alarm_high = readDoubleValue(parser); + } + case "states" -> { + duplicateFieldIfNotNull(parser, field_name, states); + states = readStringArray(parser); + } + default -> throw new JsonParseException( + parser, + "Found unknown field \"" + field_name + "\".", + parser.getTokenLocation()); + } + field_name = null; + } + if (type == null) { + throw new JsonParseException( + parser, + "Mandatory field is missing in object.", + parser.getTokenLocation()); + + } + if (type.equalsIgnoreCase("enum")) { + if (states == null) { + throw new JsonParseException( + parser, + "Mandatory field is missing in object.", + parser.getTokenLocation()); + } + if (alarm_high != null + || alarm_low != null + || display_high != null + || display_low != null + || precision != null + || units != null + || warn_high != null + || warn_low != null) { + throw new JsonParseException( + parser, + "Invalid field specified for enum meta-data.", + parser.getTokenLocation()); + } + return EnumDisplay.of(states); + } else if (type.equalsIgnoreCase("numeric")) { + if (alarm_high == null + || alarm_low == null + || display_high == null + || display_low == null + || precision == null + || units == null + || warn_high == null + || warn_low == null) { + throw new JsonParseException( + parser, + "Mandatory field is missing in object.", + parser.getTokenLocation()); + } + if (states != null) { + throw new JsonParseException( + parser, + "Invalid field specified for numeric meta-data.", + parser.getTokenLocation()); + } + final NumberFormat format; + if (precision > 0 || (precision == 0 && honor_zero_precision)) { + format = NumberFormats.precisionFormat(precision); + } else { + format = NumberFormats.toStringFormat(); + } + return Display.of( + Range.of(display_low, display_high), + Range.of(alarm_low, alarm_high), + Range.of(warn_low, warn_high), + Range.undefined(), + units, + format); + } else { + throw new JsonParseException( + parser, + "Invalid meta-data type \"" + type + "\".", + parser.getTokenLocation()); + } + } + + private static AlarmSeverity readSeverity(final JsonParser parser) + throws IOException { + var token = parser.getCurrentToken(); + if (token != JsonToken.START_OBJECT) { + throw new JsonParseException( + parser, + "Expected START_OBJECT but got " + token, + parser.getTokenLocation()); + } + String field_name = null; + Boolean has_value = null; + String level_string = null; + while (true) { + token = parser.nextToken(); + if (token == null) { + throw new IOException("Unexpected end of stream."); + } + if (token == JsonToken.END_OBJECT) { + break; + } + if (field_name == null) { + if (token != JsonToken.FIELD_NAME) { + throw new JsonParseException( + parser, + "Expected FIELD_NAME but got " + token, + parser.getTokenLocation()); + } + field_name = parser.getCurrentName(); + } else { + if (field_name.equals("level")) { + duplicateFieldIfNotNull(parser, field_name, level_string); + level_string = readStringValue(parser); + } else if (field_name.equals("hasValue")) { + // We do not use the hasValue field any longer (Phoebus’s + // VType system does not support it), but we still want to + // ensure that the data is well-formed. + duplicateFieldIfNotNull(parser, field_name, has_value); + has_value = readBooleanValue(parser); + } else { + throw new JsonParseException( + parser, + "Found unknown field \"" + field_name + "\".", + parser.getTokenLocation()); + } + field_name = null; + } + } + if (has_value == null || level_string == null) { + throw new JsonParseException( + parser, + "Mandatory field is missing in object.", + parser.getTokenLocation()); + } + return switch(level_string.toUpperCase(Locale.ROOT)) { + case "OK" -> AlarmSeverity.NONE; + case "MINOR" -> AlarmSeverity.MINOR; + case "MAJOR" -> AlarmSeverity.MAJOR; + case "INVALID" -> AlarmSeverity.INVALID; + default -> throw new JsonParseException( + parser, + "Unknown severity \"" + level_string + "\".", + parser.getTokenLocation()); + }; + } + + private static List readStringArray(final JsonParser parser) + throws IOException { + final var elements = new LinkedList(); + JsonToken token = parser.getCurrentToken(); + if (token != JsonToken.START_ARRAY) { + throw new JsonParseException( + parser, + "Expected START_ARRAY but got " + token, + parser.getTokenLocation()); + } + while (true) { + token = parser.nextToken(); + if (token == null) { + throw new IOException("Unexpected end of stream."); + } + if (token == JsonToken.END_ARRAY) { + break; + } + if (token == JsonToken.VALUE_STRING) { + elements.add(parser.getText()); + } else { + throw new JsonParseException( + parser, + "Expected VALUE_STRING but got " + token, + parser.getTokenLocation()); + } + } + return elements; + } + + private static String readStringValue(final JsonParser parser) + throws IOException { + final var token = parser.currentToken(); + if (token != JsonToken.VALUE_STRING) { + throw new JsonParseException( + parser, + "Expected VALUE_STRING but got " + token, + parser.getTokenLocation()); + } + return parser.getText(); + } + + private static double stringToSpecialDouble( + final String value, final JsonParser parser) throws IOException { + return switch (value.toLowerCase()) { + case "inf", "infinity", "+inf", "+infinity" -> ( + Double.POSITIVE_INFINITY); + case "-inf", "-infinity" -> Double.NEGATIVE_INFINITY; + case "nan" -> Double.NaN; + default -> throw new JsonParseException( + parser, + "String \"" + + value + + "\" does not qualify as a special double " + + "number.", + parser.getTokenLocation()); + }; + } + + private static ListDouble toListDouble(final ImmutableDoubleArray array) { + return new ListDouble() { + @Override + public double getDouble(int index) { + return array.get(index); + } + + @Override + public int size() { + return array.length(); + } + }; + } + + private static ListInteger toListInteger(final ImmutableIntArray array) { + return new ListInteger() { + @Override + public int getInt(int index) { + return array.get(index); + } + + @Override + public int size() { + return array.length(); + } + }; + } + + private static ListLong toListLong(final ImmutableLongArray array) { + return new ListLong() { + @Override + public long getLong(int index) { + return array.get(index); + } + + @Override + public int size() { + return array.length(); + } + }; + } + +} diff --git a/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/internal/JsonValueIterator.java b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/internal/JsonValueIterator.java new file mode 100644 index 0000000000..d6909a4d70 --- /dev/null +++ b/app/databrowser-json/src/main/java/org/phoebus/archive/reader/json/internal/JsonValueIterator.java @@ -0,0 +1,224 @@ +/******************************************************************************* + * Copyright (c) 2013-2024 aquenos GmbH. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + ******************************************************************************/ + +package org.phoebus.archive.reader.json.internal; + +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonToken; +import org.epics.vtype.VType; +import org.phoebus.archive.reader.ValueIterator; +import org.phoebus.archive.reader.json.JsonArchiveReader; + +import java.io.IOException; +import java.util.NoSuchElementException; +import java.util.function.Consumer; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + *

+ * Iterator for the {@link JsonArchiveReader}. This class is only intended for + * instantiation by that class. + *

+ * + *

+ * Like most iterators, instances of this class are not thread-safe. + * The one exception is the {@link #cancel()} method, which may be called by + * any thread. In order to implement cancellation in a thread-safe way, calling + * this method only results in a flag being set. The iterator is then closed + * the next time {@link #hasNext()} is called. + *

+ */ +public class JsonValueIterator implements ValueIterator { + + private volatile boolean canceled = false; + private final boolean honor_zero_precision; + private final Logger logger; + private VType next_value; + private Consumer on_close; + private JsonParser parser; + private final String request_url; + + /** + * Create an iterator reading samples from a JSON parser. The parser is + * not closed when this iterator is closed. However, the + * on_close function is called when the iterator is closed, so + * the calling code can pass a function that closes the parser. + * + * @param parser + * JSON parser from which samples are read. The iterator expects that the + * parser’s current token is the start of an array and reads samples until + * the current token is the corresponding end of an array. + * @param on_close + * function that is called when the iterator is closed. May be + * null. + * @param request_url + * URL that was used to retrieve the JSON data. This is only used when + * logging error messages. + * @param honor_zero_precision + * whether a precision of zero should result in no fractional digits being + * used in the number format of returned values (true) or a + * default number format should be used when the precision is zero + * (false). This only applies to floating-point values. + * Integer values always use a number format that does not include + * fractional digits. + * @throws IOException + * if initial operations on the JSON parser fail or if the JSON document + * is malformed. Errors that occur later do not result in an exception + * being thrown. Instead, the error is logged and {@link #hasNext()} + * returns false. + */ + public JsonValueIterator( + final JsonParser parser, + final Consumer on_close, + final String request_url, + final boolean honor_zero_precision) + throws IOException { + this.logger = Logger.getLogger(getClass().getName()); + this.honor_zero_precision = honor_zero_precision; + this.on_close = on_close; + this.parser = parser; + this.request_url = request_url; + final var token = this.parser.currentToken(); + if (token == null) { + throw new IOException("Unexpected end of stream."); + } + if (token != JsonToken.START_ARRAY) { + // The server response is malformed, so we cannot continue. + throw new JsonParseException( + parser, + "Expected START_ARRAY but got " + token, + parser.getTokenLocation()); + } + // We try to read the first sample. If that sample is malformed, the + // exception is raised before an iterator is even returned. If it is + // well-formed, there is a good chance that the remaining samples are + // going to be well-formed as well. + hasNextInternal(); + } + + /** + * Cancels this iterator. Subsequent calls to {@link #hasNext()} return + * false. For use by {@link JsonArchiveReader} only. + */ + public void cancel() { + this.canceled = true; + } + + @Override + public void close() { + // The parser field also serves as an indicator whether this iterator + // has been closed. If the parser is null, we know that the iterator + // has already been closed. + if (parser != null) { + // We have to call the on_close callback. Besides other things, + // this ensures that the parser is closed. + if (on_close != null) { + on_close.accept(this); + } + // Give up references that are not needed any longer. Setting the + // parser reference to null also has the effect that this iterator + // is marked as closed. + next_value = null; + on_close = null; + parser = null; + } + } + + @Override + public boolean hasNext() { + final boolean has_next; + // The hasNext method is not supposed to throw an exception, so when + // there is an exception, we log it and return false. + try { + has_next = hasNextInternal(); + } catch (IOException e) { + close(); + logger.log( + Level.SEVERE, + "Error while trying to read sample from server response " + + "for URL \"" + + request_url + + "\": " + + e.getMessage(), + e); + return false; + } + return has_next; + } + + @Override + public VType next() { + // We check whether next_value is null before calling hasNext(). If we + // called hasNext() directly, this method would throw an exception when + // cancel was called between calling hasNext() and next(). As cancel() + // may be called by a different thread, this could result in an + // unexpected NoSuchElementException being thrown. Therefore, we rather + // return the already retrieved element and close the iterator on the + // next call to hasNext(). + if (next_value == null && !hasNext()) { + // If the parser is null, the last call to hasNext() might have + // returned true, but close() has been called in between. + if (parser == null) { + throw new NoSuchElementException( + "This iterator has been closed, so no more elements " + + "available."); + } + // The last call to hasNext() must have returned false, so this + // call to next clearly is a violation of the API. + throw new NoSuchElementException( + "next() called while hasNext() == false."); + } + VType returnValue = next_value; + next_value = null; + return returnValue; + } + + private boolean fetchNext() throws IOException { + if (canceled) { + return false; + } + final var token = parser.nextToken(); + if (token == null) { + throw new IOException( + "Stream ended prematurely while trying to read next " + + "sample."); + } + if (token == JsonToken.END_ARRAY) { + // There should be no data after the end of the array. + final var next_token = parser.nextToken(); + if (next_token != null) { + throw new JsonParseException( + parser, + "Expected end-of-stream but found " + next_token + ".", + parser.getTokenLocation()); + } + return false; + } + next_value = JsonVTypeReader.readValue(parser, honor_zero_precision); + return true; + } + + private boolean hasNextInternal() throws IOException { + if (next_value != null) { + // We already fetched the next value. + return true; + } + if (parser == null) { + // The iterator has been closed. + return false; + } + if (fetchNext()) { + return true; + } + close(); + return false; + } + +} diff --git a/app/databrowser-json/src/main/resources/META-INF/services/org.phoebus.archive.reader.spi.ArchiveReaderFactory b/app/databrowser-json/src/main/resources/META-INF/services/org.phoebus.archive.reader.spi.ArchiveReaderFactory new file mode 100644 index 0000000000..35de6f7726 --- /dev/null +++ b/app/databrowser-json/src/main/resources/META-INF/services/org.phoebus.archive.reader.spi.ArchiveReaderFactory @@ -0,0 +1 @@ +org.phoebus.archive.reader.json.JsonArchiveReaderFactory diff --git a/app/databrowser-json/src/main/resources/archive_reader_json_preferences.properties b/app/databrowser-json/src/main/resources/archive_reader_json_preferences.properties new file mode 100644 index 0000000000..359b0f1813 --- /dev/null +++ b/app/databrowser-json/src/main/resources/archive_reader_json_preferences.properties @@ -0,0 +1,5 @@ +# Shall a precision of zero for a floating-point value result in this value +# using a number format without fractional digits (true) or shall it be treated +# as an indication that the value should be rendered with a default number of +# fractional digits (false)? +honor_zero_precision=true diff --git a/app/databrowser-json/src/test/java/org/phoebus/archive/reader/json/HttpServerTestBase.java b/app/databrowser-json/src/test/java/org/phoebus/archive/reader/json/HttpServerTestBase.java new file mode 100644 index 0000000000..afa7d7437b --- /dev/null +++ b/app/databrowser-json/src/test/java/org/phoebus/archive/reader/json/HttpServerTestBase.java @@ -0,0 +1,211 @@ +/******************************************************************************* + * Copyright (c) 2024 aquenos GmbH. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + ******************************************************************************/ + +package org.phoebus.archive.reader.json; + +import com.google.common.base.Splitter; +import com.google.common.collect.Maps; +import com.sun.net.httpserver.Headers; +import com.sun.net.httpserver.HttpHandler; +import com.sun.net.httpserver.HttpServer; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; + +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.URI; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +/** + * Base class for tests that need an HTTP server. + */ +public class HttpServerTestBase { + + /** + * Information about an HTTP request. + * + * @param headers request headers. + * @param method request method. + * @param uri request URI. + */ + public record HttpRequest( + Headers headers, + String method, + URI uri) { + } + + private static HttpServer http_server; + + /** + * Parse a query string, returning the individual parameters. This function + * cannot handle query strings with duplicate parameters or parameters that + * do not have a value. + * + * @param query_string query string that shall be parsed. + * @return + * map mapping parameter names to their respective (decoded) values. + * @throws IllegalArgumentException + * if the query string is malformed, containers value-less parameters, or + * contains duplicate parameters. + */ + public static Map parseQueryString( + final String query_string) { + return Maps.transformValues( + Splitter + .on('&') + .withKeyValueSeparator('=') + .split(query_string), + (value) -> URLDecoder.decode(value, StandardCharsets.UTF_8)); + } + + /** + * Returns the port of the HTTP server that is started for the tests. Must + * only be called after {@link #startHttpServer()} and before + * {@link #stopHttpServer()}. + * + * @return TCP port where the HTTP server is listening. + */ + protected static int getHttpServerPort() { + return http_server.getAddress().getPort(); + } + + /** + * Start the HTTP server that is needed for the tests. Must be called + * before running the tests. + */ + @BeforeAll + protected static void startHttpServer() { + try { + http_server = HttpServer.create( + new InetSocketAddress( + InetAddress.getByName("127.0.0.1"), 0), + 0); + } catch (IOException e) { + throw new RuntimeException(e); + } + http_server.start(); + } + + /** + * Start the HTTP server that is needed for the tests. Must be called + * before running the tests. + */ + @AfterAll + protected static void stopHttpServer() { + http_server.stop(1); + http_server = null; + } + + /** + * Runs a function while providing an HTTP service for the archive + * information. This only works when the HTTP server has previously been + * started and has not been stopped yet. + * + * @param archive_info_json + * content that is returned by the HTTP handler that serves the path + * /archive/ below the base URL that is passed to + * request_func. + * @param request_func + * function that is called, passing the base URL of the provided archive + * service. + */ + protected static void withArchiveInfo( + final String archive_info_json, + final Consumer request_func) { + final HttpHandler info_handler = (http_exchange) -> { + if (!http_exchange.getRequestURI().getPath().equals("/archive/")) { + http_exchange.sendResponseHeaders(404, -1); + return; + } + http_exchange.getResponseHeaders().add( + "Content-Type", "application/json;charset=UTF-8"); + http_exchange.sendResponseHeaders(200, 0); + try (final var writer = new OutputStreamWriter( + http_exchange.getResponseBody(), StandardCharsets.UTF_8)) { + writer.write(archive_info_json); + } + }; + final var info_context = http_server.createContext( + "/archive", info_handler); + try { + request_func.accept("http://127.0.0.1:" + getHttpServerPort()); + } finally { + http_server.removeContext(info_context); + } + } + + /** + * Runs a function while providing an HTTP service providing archived + * samples. This only works when the HTTP server has previously been + * started and has not been stopped yet. In addition to providing samples, + * this function also provides rudimentary archive information for the + * specified archive_key. + * + * @param archive_key + * numerical key that identifies the archive that is provided. + * @param channel_name + * channel name for which samples are provided. + * @param samples_json + * content that is returned by the HTTP handler that serves the path + * /archive/<archive_key>/samples/<channel_name> + * below the base URL that is passed to the + * @param request_func + * function that is called, passing the base URL of the provided archive + * service. + * @return + * list with information about the requests that were made to the samples + * service. Requests to the archive-info service are not included. + */ + protected static List withSamples( + final int archive_key, + final String channel_name, + final String samples_json, + final Consumer request_func) { + final LinkedList http_requests = new LinkedList<>(); + final HttpHandler samples_handler = (http_exchange) -> { + http_requests.add(new HttpRequest( + http_exchange.getRequestHeaders(), + http_exchange.getRequestMethod(), + http_exchange.getRequestURI())); + http_exchange.getResponseHeaders().add( + "Content-Type", "application/json;charset=UTF-8"); + http_exchange.sendResponseHeaders(200, 0); + try (final var writer = new OutputStreamWriter( + http_exchange.getResponseBody(), StandardCharsets.UTF_8)) { + writer.write(samples_json); + } + }; + final var samples_path = + "/archive/" + archive_key + "/samples/" + channel_name; + final var samples_context = http_server.createContext( + samples_path, samples_handler); + final var archive_info_json = + "[{\"key\":" + + archive_key + + ", \"name\": \"Test\"" + + ", \"description\":\"Test description\"}]"; + // We also provide some rudimentary archive information in order to + // avoid a warning being logged when creating the JsonArchiveReader. + withArchiveInfo(archive_info_json, (base_url) -> { + try { + request_func.accept(base_url); + } finally { + http_server.removeContext(samples_context); + } + }); + return http_requests; + } + +} diff --git a/app/databrowser-json/src/test/java/org/phoebus/archive/reader/json/JsonArchiveReaderFactoryTest.java b/app/databrowser-json/src/test/java/org/phoebus/archive/reader/json/JsonArchiveReaderFactoryTest.java new file mode 100644 index 0000000000..84decc4663 --- /dev/null +++ b/app/databrowser-json/src/test/java/org/phoebus/archive/reader/json/JsonArchiveReaderFactoryTest.java @@ -0,0 +1,53 @@ +/******************************************************************************* + * Copyright (c) 2024 aquenos GmbH. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + ******************************************************************************/ + +package org.phoebus.archive.reader.json; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; + +/** + * Tests for the {@link JsonArchiveReaderFactory}. + */ +public class JsonArchiveReaderFactoryTest extends HttpServerTestBase { + + /** + * Tests the {@link JsonArchiveReaderFactory#createReader(String)} method. + */ + @Test + public void createReader() { + var archive_info_json = """ + [ { + "key" : 1, + "name" : "", + "description" : "Dummy archive" + } ] + """; + withArchiveInfo(archive_info_json, (base_url) -> { + try { + assertEquals( + "Dummy archive", + new JsonArchiveReaderFactory() + .createReader("json:" + base_url) + .getDescription()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + /** + * Tests the {@link JsonArchiveReaderFactory#getPrefix()} method. + */ + @Test + public void getPrefix() { + assertEquals("json", new JsonArchiveReaderFactory().getPrefix()); + } + +} diff --git a/app/databrowser-json/src/test/java/org/phoebus/archive/reader/json/JsonArchiveReaderTest.java b/app/databrowser-json/src/test/java/org/phoebus/archive/reader/json/JsonArchiveReaderTest.java new file mode 100644 index 0000000000..e0f44fce2e --- /dev/null +++ b/app/databrowser-json/src/test/java/org/phoebus/archive/reader/json/JsonArchiveReaderTest.java @@ -0,0 +1,1166 @@ +/******************************************************************************* + * Copyright (c) 2024 aquenos GmbH. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + ******************************************************************************/ + +package org.phoebus.archive.reader.json; + +import org.epics.util.stats.Range; +import org.epics.vtype.AlarmSeverity; +import org.epics.vtype.VDouble; +import org.epics.vtype.VDoubleArray; +import org.epics.vtype.VEnum; +import org.epics.vtype.VEnumArray; +import org.epics.vtype.VInt; +import org.epics.vtype.VIntArray; +import org.epics.vtype.VLong; +import org.epics.vtype.VLongArray; +import org.epics.vtype.VStatistics; +import org.epics.vtype.VString; +import org.epics.vtype.VStringArray; +import org.junit.jupiter.api.Test; +import org.phoebus.archive.reader.UnknownChannelException; + +import java.io.IOException; +import java.time.Instant; +import java.util.Arrays; +import java.util.NoSuchElementException; +import java.util.logging.Level; +import java.util.logging.Logger; + +import static org.junit.jupiter.api.Assertions.*; + +/** + * Tests for the {@link JsonArchiveReader}. + */ +public class JsonArchiveReaderTest extends HttpServerTestBase { + + /** + * Tests the {@link JsonArchiveReader#cancel()} method. + */ + @Test + public void cancel() { + final var channel_name = "some-channel"; + final var start = Instant.ofEpochMilli(123L); + final var end = Instant.ofEpochMilli(456L); + final var preferences = new JsonArchivePreferences(true); + // We need two samples, so that we can cancel the iterator after + // retrieving the first one. + final var samples_json = """ + [ { + "time" : 123457000001, + "severity" : { + "level" : "OK", + "hasValue" : true + }, + "status" : "NO_ALARM", + "quality" : "Original", + "metaData" : { + "type" : "numeric", + "precision" : 3, + "units" : "mA", + "displayLow" : 0.0, + "displayHigh" : 300.0, + "warnLow" : 5.0, + "warnHigh" : 100.0, + "alarmLow" : 2.0, + "alarmHigh" : "NaN" + }, + "type" : "double", + "value" : [ 27.2, 48.3 ] + }, { + "time_modified" : 123457000002, + "severity" : { + "level" : "MAJOR", + "hasValue" : true + }, + "status" : "TEST_STATUS", + "quality" : "Original", + "metaData" : { + "type" : "numeric", + "precision" : 3, + "units" : "mA", + "displayLow" : 0.0, + "displayHigh" : 300.0, + "warnLow" : 5.0, + "warnHigh" : 100.0, + "alarmLow" : 2.0, + "alarmHigh" : "NaN" + }, + "type" : "double", + "value" : [ 31.9 ] + } ] + """; + withSamples( + 1, channel_name, samples_json, (base_url) -> { + try ( + final var reader = new JsonArchiveReader( + "json:" + base_url, preferences); + final var iterator = reader.getRawValues( + channel_name, start, end + ) + ) { + // Retrieve the first sample. + iterator.next(); + // Cancel all iterators. + reader.cancel(); + // Now, hasNext() should return false. + assertFalse(iterator.hasNext()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + /** + * Tests creating a {@link JsonArchiveReader} with an archive key which + * does not specify a valid archive on the archive server. + */ + @Test + public void createWithInvalidArchiveKey() { + final var archive_info_json = """ + [ { + "key" : 2, + "name" : "Some name", + "description" : "Some description" + } ] + """; + withArchiveInfo(archive_info_json, (base_url) -> { + assertThrows(IllegalArgumentException.class, () -> { + new JsonArchiveReader( + "json:" + base_url, + new JsonArchivePreferences(true)); + }); + }); + } + + /** + * Tests creating a {@link JsonArchiveReader} with a base URL that is + * invalid (does not start with json:). + */ + @Test + public void createWithInvalidUrl() { + assertThrows(IllegalArgumentException.class, () -> { + new JsonArchiveReader( + "http://invalid.example.com", + new JsonArchivePreferences(true)); + }); + } + + /** + * Tests the {@link JsonArchiveReader#getDescription()} function. + */ + @Test + public void getDescription() { + var archive_info_json = """ + [ { + "key" : 1, + "name" : "Some name", + "description" : "Some description" + } ] + """; + final var preferences = new JsonArchivePreferences(true); + withArchiveInfo(archive_info_json, (base_url) -> { + try (final var reader = new JsonArchiveReader( + "json:" + base_url, preferences)) { + assertEquals( + "Some description", reader.getDescription()); + } + }); + archive_info_json = """ + [ { + "key" : 1, + "name" : "Some name", + "description" : "Another description" + }, { + "key" : 3, + "name" : "Some name", + "description" : "Yet another description" + } ] + """; + withArchiveInfo(archive_info_json, (base_url) -> { + try (final var reader = new JsonArchiveReader( + "json:" + base_url, preferences)) { + assertEquals( + "Another description", reader.getDescription()); + } + try (final var reader = new JsonArchiveReader( + "json:" + base_url + ";key=3", preferences)) { + assertEquals( + "Yet another description", + reader.getDescription()); + } + }); + } + + /** + * Tests the {@link + * JsonArchiveReader#getOptimizedValues(String, Instant, Instant, int)} + * function. + */ + @Test + public void getOptimizedValues() { + final var samples_json = """ + [ { + "time" : 123, + "severity" : { + "level" : "OK", + "hasValue" : true + }, + "status" : "NO_ALARM", + "quality" : "Interpolated", + "metaData" : { + "type" : "numeric", + "precision" : 1, + "units" : "V", + "displayLow" : -100.0, + "displayHigh" : 100.0, + "warnLow" : "NaN", + "warnHigh" : "NaN", + "alarmLow" : "NaN", + "alarmHigh" : "NaN" + }, + "type" : "minMaxDouble", + "value" : [ -5.0, -1.2 ], + "minimum" : -15.1, + "maximum" : 2.7 + }, { + "time" : 456, + "severity" : { + "level" : "OK", + "hasValue" : true + }, + "status" : "NO_ALARM", + "quality" : "Interpolated", + "metaData" : { + "type" : "numeric", + "precision" : 1, + "units" : "V", + "displayLow" : -100.0, + "displayHigh" : 100.0, + "warnLow" : "NaN", + "warnHigh" : "NaN", + "alarmLow" : "NaN", + "alarmHigh" : "NaN" + }, + "type" : "minMaxDouble", + "value" : [ 4.7 ], + "minimum" : -3.9, + "maximum" : 17.1 + } ] + """; + final var channel_name = "double-channel"; + final var start = Instant.ofEpochMilli(0L); + final var end = Instant.ofEpochMilli(1L); + final var preferences = new JsonArchivePreferences(true); + var requests = withSamples( + 7, channel_name, samples_json, (base_url) -> { + try ( + final var reader = new JsonArchiveReader( + "json:" + base_url + ";key=7", + preferences); + final var iterator = reader.getOptimizedValues( + channel_name, start, end, 10) + ) { + // Check the first sample. The statistics VType does + // not support arrays, so we expect a VDoubleArray. + final var double_array = (VDoubleArray) iterator.next(); + assertEquals(2, double_array.getData().size()); + assertEquals( + -5.0, double_array.getData().getDouble(0)); + assertEquals( + -1.2, double_array.getData().getDouble(1)); + assertEquals( + "NO_ALARM", double_array.getAlarm().getName()); + assertEquals( + AlarmSeverity.NONE, + double_array.getAlarm().getSeverity()); + assertEquals( + Range.undefined(), + double_array.getDisplay().getAlarmRange()); + assertEquals( + Range.undefined(), + double_array.getDisplay().getControlRange() + ); + assertEquals( + Range.of(-100.0, 100.0), + double_array.getDisplay().getDisplayRange()); + assertEquals( + Range.undefined(), + double_array.getDisplay().getWarningRange()); + assertEquals( + 1, + double_array + .getDisplay() + .getFormat() + .getMinimumFractionDigits()); + assertEquals( + 1, + double_array + .getDisplay() + .getFormat() + .getMaximumFractionDigits()); + assertEquals( + "V", + double_array.getDisplay().getUnit()); + assertEquals( + Instant.ofEpochSecond(0, 123L), + double_array.getTime().getTimestamp()); + // Check the second sample. + final var statistics = (VStatistics) iterator.next(); + assertEquals( + 4.7, + statistics.getAverage().doubleValue()); + assertEquals( + -3.9, + statistics.getMin().doubleValue()); + assertEquals( + 17.1, + statistics.getMax().doubleValue()); + assertEquals( + 0, + statistics.getNSamples().intValue()); + assertEquals( + Double.NaN, + statistics.getStdDev().doubleValue()); + assertEquals( + "NO_ALARM", + statistics.getAlarm().getName()); + assertEquals( + AlarmSeverity.NONE, + statistics.getAlarm().getSeverity()); + assertEquals( + Range.undefined(), + statistics.getDisplay().getAlarmRange()); + assertEquals( + Range.undefined(), + statistics.getDisplay().getControlRange() + ); + assertEquals( + Range.of(-100.0, 100.0), + statistics.getDisplay().getDisplayRange()); + assertEquals( + Range.undefined(), + statistics.getDisplay().getWarningRange()); + assertEquals( + 1, + statistics + .getDisplay() + .getFormat() + .getMinimumFractionDigits()); + assertEquals( + 1, + statistics + .getDisplay() + .getFormat() + .getMaximumFractionDigits()); + assertEquals( + "V", + statistics.getDisplay().getUnit()); + assertEquals( + Instant.ofEpochSecond(0L, 456L), + statistics.getTime().getTimestamp()); + // There should be no more samples. + assertFalse(iterator.hasNext()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + assertEquals(1, requests.size()); + final var request = requests.get(0); + assertEquals("GET", request.method()); + final var query_params = parseQueryString(request.uri().getQuery()); + assertEquals("0", query_params.get("start")); + assertEquals("1000000", query_params.get("end")); + assertEquals("10", query_params.get("count")); + } + + /** + * Tests the + * {@link JsonArchiveReader#getRawValues(String, Instant, Instant)} + * function with double samples. Of the tests for numeric values, this is + * the most detailed one. + */ + @Test + public void getRawValuesWithDoubleSamples() { + final var samples_json = """ + [ { + "time" : 123457000001, + "severity" : { + "level" : "OK", + "hasValue" : true + }, + "status" : "NO_ALARM", + "quality" : "Original", + "metaData" : { + "type" : "numeric", + "precision" : 3, + "units" : "mA", + "displayLow" : 0.0, + "displayHigh" : 300.0, + "warnLow" : 5.0, + "warnHigh" : 100.0, + "alarmLow" : 2.0, + "alarmHigh" : "NaN" + }, + "type" : "double", + "value" : [ 27.2, 48.3 ] + }, { + "time" : 123457000002, + "severity" : { + "level" : "MAJOR", + "hasValue" : true + }, + "status" : "TEST_STATUS", + "quality" : "Original", + "metaData" : { + "type" : "numeric", + "precision" : 3, + "units" : "mA", + "displayLow" : 0.0, + "displayHigh" : 300.0, + "warnLow" : 5.0, + "warnHigh" : 100.0, + "alarmLow" : 2.0, + "alarmHigh" : "NaN" + }, + "type" : "double", + "value" : [ 31.9 ] + } ] + """; + final var channel_name = "double-channel"; + final var start = Instant.ofEpochMilli(123456L); + final var end = Instant.ofEpochMilli(456789L); + final var preferences = new JsonArchivePreferences(true); + var requests = withSamples( + 2, channel_name, samples_json, (base_url) -> { + try ( + final var reader = new JsonArchiveReader( + "json:" + base_url + ";key=2", + preferences); + final var iterator = reader.getRawValues( + channel_name, start, end) + ) { + // Check the first sample. + assertTrue(iterator.hasNext()); + final var double_array = (VDoubleArray) iterator.next(); + assertEquals(2, double_array.getData().size()); + assertEquals( + 27.2, double_array.getData().getDouble(0)); + assertEquals( + 48.3, double_array.getData().getDouble(1)); + assertEquals( + "NO_ALARM", double_array.getAlarm().getName()); + assertEquals( + AlarmSeverity.NONE, + double_array.getAlarm().getSeverity()); + assertEquals( + 2.0, + double_array + .getDisplay().getAlarmRange().getMinimum()); + assertEquals( + Double.POSITIVE_INFINITY, + double_array + .getDisplay().getAlarmRange().getMaximum()); + assertEquals( + Range.undefined(), + double_array.getDisplay().getControlRange() + ); + assertEquals( + 0.0, + double_array + .getDisplay().getDisplayRange().getMinimum()); + assertEquals( + 300.0, + double_array + .getDisplay().getDisplayRange().getMaximum()); + assertEquals( + 5.0, + double_array + .getDisplay().getWarningRange().getMinimum()); + assertEquals( + 100.0, + double_array + .getDisplay().getWarningRange().getMaximum()); + assertEquals( + 3, + double_array + .getDisplay() + .getFormat() + .getMinimumFractionDigits()); + assertEquals( + 3, + double_array + .getDisplay() + .getFormat() + .getMaximumFractionDigits()); + assertEquals( + "mA", + double_array.getDisplay().getUnit()); + assertEquals( + Instant.ofEpochSecond(123L, 457000001L), + double_array.getTime().getTimestamp()); + // Check the second sample (only the parts that differ + // from the first on). + assertTrue(iterator.hasNext()); + final var double_scalar = (VDouble) iterator.next(); + assertEquals( + 31.9, double_scalar.getValue().doubleValue()); + assertEquals( + "TEST_STATUS", + double_scalar.getAlarm().getName()); + assertEquals( + AlarmSeverity.MAJOR, + double_scalar.getAlarm().getSeverity()); + assertEquals( + Instant.ofEpochSecond(123L, 457000002L), + double_scalar.getTime().getTimestamp()); + // There should be no more samples. + assertFalse(iterator.hasNext()); + assertThrows(NoSuchElementException.class, iterator::next); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + assertEquals(1, requests.size()); + final var request = requests.get(0); + assertEquals("GET", request.method()); + final var query_params = parseQueryString(request.uri().getQuery()); + assertEquals("123456000000", query_params.get("start")); + assertEquals("456789000000", query_params.get("end")); + assertFalse(query_params.containsKey("count")); + } + + /** + * Tests the + * {@link JsonArchiveReader#getRawValues(String, Instant, Instant)} method + * with enum samples. + */ + @Test + public void getRawValuesWithEnumSamples() { + final var samples_json = """ + [ { + "time" : 123000000009, + "severity" : { + "level" : "OK", + "hasValue" : true + }, + "status" : "NO_ALARM", + "quality" : "Original", + "metaData" : { + "type" : "enum", + "states" : [ "High", "Low" ] + }, + "type" : "enum", + "value" : [ 1, 0 ] + }, { + "time" : 124000000011, + "severity" : { + "level" : "INVALID", + "hasValue" : true + }, + "status" : "LINK", + "quality" : "Original", + "metaData" : { + "type" : "enum", + "states" : [ "High", "Low" ] + }, + "type" : "enum", + "value" : [ 1 ] + }, { + "time" : 124000000012, + "severity" : { + "level" : "OK", + "hasValue" : true + }, + "status" : "NO_ALARM", + "quality" : "Original", + "metaData" : { + "type" : "enum", + "states" : [ "High", "Low" ] + }, + "type" : "enum", + "value" : [ 1, 2 ] + }, { + "time" : 124000000013, + "severity" : { + "level" : "OK", + "hasValue" : true + }, + "status" : "NO_ALARM", + "quality" : "Original", + "metaData" : { + "type" : "enum", + "states" : [ "High", "Low" ] + }, + "type" : "enum", + "value" : [ -1 ] + } ] + """; + final var channel_name = "enum-channel"; + final var start = Instant.ofEpochMilli(4321L); + final var end = Instant.ofEpochMilli(999999L); + final var preferences = new JsonArchivePreferences(true); + var requests = withSamples( + 1, channel_name, samples_json, (base_url) -> { + try ( + final var reader = new JsonArchiveReader( + "json:" + base_url, preferences); + final var iterator = reader.getRawValues( + channel_name, start, end) + ) { + // Check the first sample. + final var enum_array = (VEnumArray) iterator.next(); + assertEquals(2, enum_array.getIndexes().size()); + assertEquals(1, enum_array.getIndexes().getInt(0)); + assertEquals(0, enum_array.getIndexes().getInt(1)); + assertEquals( + "NO_ALARM", + enum_array.getAlarm().getName()); + assertEquals( + AlarmSeverity.NONE, + enum_array.getAlarm().getSeverity()); + assertEquals( + Arrays.asList("High", "Low"), + enum_array.getDisplay().getChoices()); + assertEquals( + Instant.ofEpochSecond(123L, 9L), + enum_array.getTime().getTimestamp()); + // Check the second sample (only the parts that differ + // from the first on). + final var enum_scalar = (VEnum) iterator.next(); + assertEquals(1, enum_scalar.getIndex()); + assertEquals( + "LINK", + enum_scalar.getAlarm().getName()); + assertEquals( + AlarmSeverity.INVALID, + enum_scalar.getAlarm().getSeverity()); + assertEquals( + Instant.ofEpochSecond(124L, 11L), + enum_scalar.getTime().getTimestamp()); + // Check the third sample. As this sample contains a + // value for which there is no label, we expect a + // VIntArray instead of a VEnumArray. + final var int_array = (VIntArray) iterator.next(); + assertEquals(2, int_array.getData().size()); + assertEquals(1, int_array.getData().getInt(0)); + assertEquals(2, int_array.getData().getInt(1)); + assertEquals( + 0, + int_array + .getDisplay() + .getFormat() + .getMaximumFractionDigits()); + // Check the fourth sample. As this sample contains a + // value for which there is no label, we expect a + // VInt instead of a VEnum. + final var int_scalar = (VInt) iterator.next(); + assertEquals(-1, int_scalar.getValue().intValue()); + // There should be no more samples. + assertFalse(iterator.hasNext()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + assertEquals(1, requests.size()); + final var request = requests.get(0); + assertEquals("GET", request.method()); + final var query_params = parseQueryString(request.uri().getQuery()); + assertEquals("4321000000", query_params.get("start")); + assertEquals("999999000000", query_params.get("end")); + assertFalse(query_params.containsKey("count")); + } + + /** + * Tests the + * {@link JsonArchiveReader#getRawValues(String, Instant, Instant)} method + * with long samples. + */ + @Test + public void getRawValuesWithLongSamples() { + final var samples_json = """ + [ { + "time" : 456000000001, + "severity" : { + "level" : "MAJOR", + "hasValue" : true + }, + "status" : "SOME_ALARM", + "quality" : "Original", + "metaData" : { + "type" : "numeric", + "precision" : 0, + "units" : "pcs.", + "displayLow" : 1.0, + "displayHigh" : 100.0, + "warnLow" : 0.0, + "warnHigh" : 0.0, + "alarmLow" : 0.0, + "alarmHigh" : 0.0 + }, + "type" : "long", + "value" : [ 14, 2 ] + }, { + "time" : 456000000002, + "severity" : { + "level" : "INVALID", + "hasValue" : true + }, + "status" : "INVALID_ALARM", + "quality" : "Original", + "metaData" : { + "type" : "numeric", + "precision" : 0, + "units" : "pcs.", + "displayLow" : 1.0, + "displayHigh" : 100.0, + "warnLow" : 0.0, + "warnHigh" : 0.0, + "alarmLow" : 0.0, + "alarmHigh" : 0.0 + }, + "type" : "long", + "value" : [ 19 ] + } ] + """; + final var channel_name = "long-channel"; + final var start = Instant.ofEpochMilli(4321L); + final var end = Instant.ofEpochMilli(999999L); + final var preferences = new JsonArchivePreferences(true); + var requests = withSamples( + 1, channel_name, samples_json, (base_url) -> { + try ( + final var reader = new JsonArchiveReader( + "json:" + base_url, preferences); + final var iterator = reader.getRawValues( + channel_name, start, end) + ) { + + // Check the first sample. We do not check the limits + // because the code parsing them is identical the same + // for the double samples, and we already check them + // there. + final var long_array = (VLongArray) iterator.next(); + assertEquals(2, long_array.getData().size()); + assertEquals(14, long_array.getData().getLong(0)); + assertEquals(2, long_array.getData().getLong(1)); + assertEquals( + "SOME_ALARM", + long_array.getAlarm().getName()); + assertEquals( + AlarmSeverity.MAJOR, + long_array.getAlarm().getSeverity()); + assertEquals( + 0, + long_array + .getDisplay() + .getFormat() + .getMaximumFractionDigits()); + assertEquals( + "pcs.", + long_array.getDisplay().getUnit()); + assertEquals( + Instant.ofEpochSecond(456L, 1L), + long_array.getTime().getTimestamp()); + // Check the second sample (only the parts that differ + // from the first on). + final var long_scalar = (VLong) iterator.next(); + assertEquals(19, long_scalar.getValue().longValue()); + assertEquals( + "INVALID_ALARM", + long_scalar.getAlarm().getName()); + assertEquals( + AlarmSeverity.INVALID, + long_scalar.getAlarm().getSeverity()); + assertEquals( + Instant.ofEpochSecond(456L, 2L), + long_scalar.getTime().getTimestamp()); + // There should be no more samples. + assertFalse(iterator.hasNext()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + assertEquals(1, requests.size()); + final var request = requests.get(0); + assertEquals("GET", request.method()); + final var query_params = parseQueryString(request.uri().getQuery()); + assertEquals("4321000000", query_params.get("start")); + assertEquals("999999000000", query_params.get("end")); + assertFalse(query_params.containsKey("count")); + } + + /** + * Tests the + * {@link JsonArchiveReader#getRawValues(String, Instant, Instant)} method + * with a malformed response. + */ + @Test + public void getRawValuesWithMalformedResponse() { + final var channel_name = "some-channel"; + final var start = Instant.ofEpochMilli(123L); + final var end = Instant.ofEpochMilli(456L); + final var preferences = new JsonArchivePreferences(true); + // First, we test that we get an immediate exception if the first + // sample is malformed. + var samples_json = """ + [ { + "time_modified" : 123457000001, + "severity" : { + "level" : "OK", + "hasValue" : true + }, + "status" : "NO_ALARM", + "quality" : "Original", + "metaData" : { + "type" : "numeric", + "precision" : 3, + "units" : "mA", + "displayLow" : 0.0, + "displayHigh" : 300.0, + "warnLow" : 5.0, + "warnHigh" : 100.0, + "alarmLow" : 2.0, + "alarmHigh" : "NaN" + }, + "type" : "double", + "value" : [ 27.2, 48.3 ] + } ] + """; + + withSamples( + 1, channel_name, samples_json, (base_url) -> { + try (final var reader = new JsonArchiveReader( + "json:" + base_url, preferences)) { + assertThrows(IOException.class, () -> { + reader.getRawValues( + channel_name, start, end); + }); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + // Second, we test that we do not get an exception when a subsequent + // sample is malformed. Instead, we expect hasNext() to return false. + // As a side effect, an error message should be logged. but we cannot + // test this easily. + samples_json = """ + [ { + "time" : 123457000001, + "severity" : { + "level" : "OK", + "hasValue" : true + }, + "status" : "NO_ALARM", + "quality" : "Original", + "metaData" : { + "type" : "numeric", + "precision" : 3, + "units" : "mA", + "displayLow" : 0.0, + "displayHigh" : 300.0, + "warnLow" : 5.0, + "warnHigh" : 100.0, + "alarmLow" : 2.0, + "alarmHigh" : "NaN" + }, + "type" : "double", + "value" : [ 27.2, 48.3 ] + }, { + "time_modified" : 123457000002, + "severity" : { + "level" : "MAJOR", + "hasValue" : true + }, + "status" : "TEST_STATUS", + "quality" : "Original", + "metaData" : { + "type" : "numeric", + "precision" : 3, + "units" : "mA", + "displayLow" : 0.0, + "displayHigh" : 300.0, + "warnLow" : 5.0, + "warnHigh" : 100.0, + "alarmLow" : 2.0, + "alarmHigh" : "NaN" + }, + "type" : "double", + "value" : [ 31.9 ] + } ] + """; + withSamples(1, channel_name, samples_json, (base_url) -> { + try ( + final var reader = new JsonArchiveReader( + "json:" + base_url, preferences); + final var iterator = reader.getRawValues( + channel_name, start, end) + ) { + // We should be able to retrieve the first sample, but + // not the second one. + assertTrue(iterator.hasNext()); + iterator.next(); + // Before calling hasNext() the second time, we suppress error + // logging for the iterator. + final var iterator_logger = Logger.getLogger( + iterator.getClass().getName()); + final var log_level = iterator_logger.getLevel(); + iterator_logger.setLevel(Level.OFF); + try { + assertFalse(iterator.hasNext()); + } finally { + iterator_logger.setLevel(log_level); + } + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + /** + * Tests the + * {@link JsonArchiveReader#getRawValues(String, Instant, Instant)} method + * with no samples. + */ + @Test + public void getRawValuesWithNoSamples() { + final var channel_name = "empty-channel"; + final var start = Instant.ofEpochMilli(456L); + final var end = Instant.ofEpochMilli(789L); + final var preferences = new JsonArchivePreferences(true); + var requests = withSamples( + 1, channel_name, "[]", (base_url) -> { + try ( + final var reader = new JsonArchiveReader( + "json:" + base_url, preferences); + final var iterator = reader.getRawValues( + channel_name, start, end) + ) { + // The iterator should be empty. + assertFalse(iterator.hasNext()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + assertEquals(1, requests.size()); + final var request = requests.get(0); + assertEquals("GET", request.method()); + final var query_params = parseQueryString(request.uri().getQuery()); + assertEquals("456000000", query_params.get("start")); + assertEquals("789000000", query_params.get("end")); + assertFalse(query_params.containsKey("count")); + } + + /** + * Tests the + * {@link JsonArchiveReader#getRawValues(String, Instant, Instant)} method + * with string samples. + */ + @Test + public void getRawValuesWithStringSamples() { + final var samples_json = """ + [ { + "time" : 123000000001, + "severity" : { + "level" : "OK", + "hasValue" : true + }, + "status" : "NO_ALARM", + "quality" : "Original", + "type" : "string", + "value" : [ "abc", "def", "ghi" ] + }, { + "time" : 123000000002, + "severity" : { + "level" : "OK", + "hasValue" : true + }, + "status" : "NO_ALARM", + "quality" : "Original", + "type" : "string", + "value" : [ "123" ] + } ] + """; + final var channel_name = "long-channel"; + final var start = Instant.ofEpochMilli(0L); + final var end = Instant.ofEpochMilli(999000L); + final var preferences = new JsonArchivePreferences(true); + var requests = withSamples( + 1, channel_name, samples_json, (base_url) -> { + try ( + final var reader = new JsonArchiveReader( + "json:" + base_url, preferences); + final var iterator = reader.getRawValues( + channel_name, start, end) + ) { + // Check the first sample. + final var string_array = (VStringArray) iterator.next(); + assertEquals(3, string_array.getData().size()); + assertEquals("abc", string_array.getData().get(0)); + assertEquals("def", string_array.getData().get(1)); + assertEquals("ghi", string_array.getData().get(2)); + assertEquals( + Instant.ofEpochSecond(123L, 1L), + string_array.getTime().getTimestamp()); + // Check the second sample. + final var string_scalar = (VString) iterator.next(); + assertEquals("123", string_scalar.getValue()); + assertEquals( + Instant.ofEpochSecond(123L, 2L), + string_scalar.getTime().getTimestamp()); + // There should be no more samples. + assertFalse(iterator.hasNext()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + assertEquals(1, requests.size()); + final var request = requests.get(0); + assertEquals("GET", request.method()); + final var query_params = parseQueryString(request.uri().getQuery()); + assertEquals("0", query_params.get("start")); + assertEquals("999000000000", query_params.get("end")); + assertFalse(query_params.containsKey("count")); + } + + /** + * Tests the + * {@link JsonArchiveReader#getRawValues(String, Instant, Instant)} method + * with a channel name that is not known by the server. + */ + @Test + public void getRawValuesWithUnknownChannel() { + final var start = Instant.ofEpochMilli(123L); + final var end = Instant.ofEpochMilli(456L); + final var preferences = new JsonArchivePreferences(true); + withSamples(1, "some-channel", "", (base_url) -> { + try (final var reader = new JsonArchiveReader( + "json:" + base_url, preferences)) { + assertThrows(UnknownChannelException.class, () -> { + reader.getRawValues("another-channel", start, end); + }); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + /** + * Tests the {@link JsonArchivePreferences#honor_zero_precision()} flag. + */ + @Test + public void honorZeroPrecision() { + final var samples_json = """ + [ { + "time" : 123457000001, + "severity" : { + "level" : "OK", + "hasValue" : true + }, + "status" : "NO_ALARM", + "quality" : "Original", + "metaData" : { + "type" : "numeric", + "precision" : 0, + "units" : "mA", + "displayLow" : 0.0, + "displayHigh" : 300.0, + "warnLow" : 5.0, + "warnHigh" : 100.0, + "alarmLow" : 2.0, + "alarmHigh" : "NaN" + }, + "type" : "double", + "value" : [ 1.5 ] + }, { + "time" : 456000000002, + "severity" : { + "level" : "INVALID", + "hasValue" : true + }, + "status" : "INVALID_ALARM", + "quality" : "Original", + "metaData" : { + "type" : "numeric", + "precision" : 0, + "units" : "pcs.", + "displayLow" : 1.0, + "displayHigh" : 100.0, + "warnLow" : 0.0, + "warnHigh" : 0.0, + "alarmLow" : 0.0, + "alarmHigh" : 0.0 + }, + "type" : "long", + "value" : [ 19 ] + } ] + """; + final var channel_name = "double-channel"; + final var start = Instant.ofEpochMilli(123456L); + final var end = Instant.ofEpochMilli(456789L); + // When honor_zero_precision is set, a sample with a precision of zero + // should have a number format that does not include fractional digits. + withSamples( + 1, channel_name, samples_json, (base_url) -> { + final var preferences = new JsonArchivePreferences(true); + try ( + final var reader = new JsonArchiveReader( + "json:" + base_url, preferences); + final var iterator = reader.getRawValues( + channel_name, start, end) + ) { + final var double_scalar = (VDouble) iterator.next(); + assertEquals( + 0, + double_scalar + .getDisplay() + .getFormat() + .getMaximumFractionDigits()); + final var long_scalar = (VLong) iterator.next(); + assertEquals( + 0, + long_scalar + .getDisplay() + .getFormat() + .getMaximumFractionDigits()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + // When honor_zero_precision is clear, a sample with a precision of + // zero should have a number format that includes fractional digits, + // but only for double samples and not for long samples. + withSamples( + 1, channel_name, samples_json, (base_url) -> { + final var preferences = new JsonArchivePreferences(false); + try ( + final var reader = new JsonArchiveReader( + "json:" + base_url, preferences); + final var iterator = reader.getRawValues( + channel_name, start, end) + ) { + final var double_scalar = (VDouble) iterator.next(); + assertNotEquals( + 0, + double_scalar + .getDisplay() + .getFormat() + .getMaximumFractionDigits()); + final var long_scalar = (VLong) iterator.next(); + assertEquals( + 0, + long_scalar + .getDisplay() + .getFormat() + .getMaximumFractionDigits()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + +} diff --git a/app/pom.xml b/app/pom.xml index 3771508034..ff5bb5eee0 100644 --- a/app/pom.xml +++ b/app/pom.xml @@ -20,6 +20,7 @@ logbook rtplot databrowser + databrowser-json databrowser-timescale display alarm diff --git a/phoebus-product/pom.xml b/phoebus-product/pom.xml index 694c33c2e0..66b9b5b2e5 100644 --- a/phoebus-product/pom.xml +++ b/phoebus-product/pom.xml @@ -116,6 +116,11 @@ app-databrowser 4.7.4-SNAPSHOT + + org.phoebus + app-databrowser-json + 4.7.4-SNAPSHOT + org.phoebus app-databrowser-timescale