From 6231d0cbc31d00a6d11fba3a60e5a48a42f1b369 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Wed, 10 Jan 2024 18:15:29 +0100 Subject: [PATCH 001/146] Add create() methods to NQuadsFormat and NTriplesFormat #166 --- .../java/fr/inria/corese/core/print/NQuadsFormat.java | 5 +++++ .../fr/inria/corese/core/print/NTriplesFormat.java | 11 +++++++++++ .../java/fr/inria/corese/core/print/ResultFormat.java | 6 ++++++ 3 files changed, 22 insertions(+) diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java index 137a15849..3ceea8177 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java @@ -2,6 +2,7 @@ import fr.inria.corese.core.Graph; import fr.inria.corese.kgram.api.core.ExpType; +import fr.inria.corese.kgram.core.Mappings; public class NQuadsFormat extends NTriplesFormat { @@ -13,6 +14,10 @@ public static NQuadsFormat create(Graph graph) { return new NQuadsFormat(graph); } + public static NQuadsFormat create(Mappings map) { + return new NQuadsFormat((Graph) map.getGraph()); + } + /** * Converts the graph to a string in N-Quads format. * diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java index 723ad2f7b..6355f0784 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java @@ -7,6 +7,7 @@ import fr.inria.corese.core.Graph; import fr.inria.corese.kgram.api.core.Node; +import fr.inria.corese.kgram.core.Mappings; import fr.inria.corese.sparql.triple.parser.NSManager; /** @@ -40,6 +41,16 @@ public static NTriplesFormat create(Graph graph) { return new NTriplesFormat(graph); } + /** + * Factory method to create a new NTriplesFormat instance. + * + * @param map the mappings to be formatted + * @return a new NTriplesFormat instance + */ + public static NTriplesFormat create(Mappings map) { + return new NTriplesFormat((Graph) map.getGraph()); + } + /** * Converts the graph to a string in N-Triples format. * diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java index f5a52ea8f..38ba2a6fa 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java @@ -485,6 +485,8 @@ boolean isGraphFormat(int type) { case TURTLE_FORMAT: case TRIG_FORMAT: case JSONLD_FORMAT: + case NTRIPLES_FORMAT: + case NQUADS_FORMAT: // case RDF_FORMAT: return true; default: @@ -535,6 +537,10 @@ String processBasic(Mappings map, int type) { return TripleFormat.create(map, true).setNbTriple(getNbTriple()).toString(); case JSONLD_FORMAT: return JSONLDFormat.create(map).toString(); + case NTRIPLES_FORMAT: + return NTriplesFormat.create(map).toString(); + case NQUADS_FORMAT: + return NQuadsFormat.create(map).toString(); case RDF_FORMAT: // W3C RDF Graph Mappings From bd80f4116ec305e88eae579d8faa9443ea0644be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Thu, 11 Jan 2024 15:36:35 +0100 Subject: [PATCH 002/146] Fix malformed prefixed URIs in Turtle syntax, closes #167 --- .../sparql/triple/parser/NSManager.java | 38 ++++++++++++++----- 1 file changed, 29 insertions(+), 9 deletions(-) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/NSManager.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/NSManager.java index 094e8a492..4669cc651 100755 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/NSManager.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/NSManager.java @@ -926,20 +926,40 @@ public static String stripResource(String uri) { return "/" + strip(uri, ns); } - public static String namespace(String type) { // retourne le namespace d'un type + /** + * Create a namespace from a URI + * + * @param type URI + * @return namespace + */ + public static String namespace(String type) { + if (type == null || type.isEmpty()) { + return ""; + } + + // Return empty string if type starts with HASH if (type.startsWith(HASH)) { return ""; } - int index; - for (int i = 0; i < END_CHAR.length; i++) { - index = type.lastIndexOf(END_CHAR[i]); - if (index != -1) { - String str = type.substring(0, index + 1); - if (!str.equals("http://")) { - return str; - } + + // Iterate through END_CHAR array to find the last occurrence of any char in it + int lastIndex = -1; + for (char endChar : END_CHAR) { + int currentIndex = type.lastIndexOf(endChar); + if (currentIndex > lastIndex) { + lastIndex = currentIndex; } } + + // Check if a valid index is found and the substring is not a specific unwanted + // string + if (lastIndex != -1) { + String namespace = type.substring(0, lastIndex + 1); + if (!"http://".equals(namespace)) { + return namespace; + } + } + return ""; } From 82eb9ed09867c83e377aeda0bbbb2395d7f2da03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 12 Jan 2024 14:41:21 +0100 Subject: [PATCH 003/146] Refactor ZeroaryFunction and Binding classes to fix now() function The Binding class has been updated to store and retrieve the current time value, allowing the now() function to return the same value during processing. This improves the consistency and reliability of the now() function. Fix #168. --- .../triple/function/core/ZeroaryFunction.java | 53 ++++++++++++++----- .../sparql/triple/function/term/Binding.java | 21 ++++++++ 2 files changed, 61 insertions(+), 13 deletions(-) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/function/core/ZeroaryFunction.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/function/core/ZeroaryFunction.java index 6e882b25f..4de1aa3db 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/function/core/ZeroaryFunction.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/function/core/ZeroaryFunction.java @@ -1,15 +1,17 @@ package fr.inria.corese.sparql.triple.function.core; +import java.util.Optional; + +import fr.inria.corese.kgram.api.core.ExprType; +import fr.inria.corese.kgram.api.query.Environment; +import fr.inria.corese.kgram.api.query.Producer; import fr.inria.corese.sparql.api.Computer; import fr.inria.corese.sparql.api.IDatatype; import fr.inria.corese.sparql.datatype.CoreseDouble; import fr.inria.corese.sparql.datatype.DatatypeMap; +import fr.inria.corese.sparql.exceptions.EngineException; import fr.inria.corese.sparql.triple.function.term.Binding; import fr.inria.corese.sparql.triple.function.term.TermEval; -import fr.inria.corese.kgram.api.core.ExprType; -import fr.inria.corese.kgram.api.query.Environment; -import fr.inria.corese.sparql.exceptions.EngineException; -import fr.inria.corese.kgram.api.query.Producer; /** * @@ -17,19 +19,44 @@ * */ public class ZeroaryFunction extends TermEval { - - public ZeroaryFunction(){} - - public ZeroaryFunction(String name){ + + public ZeroaryFunction() { + } + + public ZeroaryFunction(String name) { super(name); } - + @Override public IDatatype eval(Computer eval, Binding b, Environment env, Producer p) throws EngineException { - switch (oper()){ - case ExprType.RANDOM: return CoreseDouble.create(Math.random()); - case ExprType.NOW: return DatatypeMap.newDate(); - } + switch (oper()) { + case ExprType.RANDOM: + return CoreseDouble.create(Math.random()); + case ExprType.NOW: + return this.getOrSetCurrentTime(b); + + } return null; } + + /** + * Returns the current time, or sets it if it is not already set. + * + * @param binding The Binding to get or set the current time in + * @return The current time + */ + private IDatatype getOrSetCurrentTime(Binding binding) { + + // Check if current time is already set in the Binding + Optional savedNowTime = binding.getNowValue(); + + if (savedNowTime.isPresent()) { + return savedNowTime.get(); + } else { + // If not set, create a new time, save it, and return it + IDatatype nowValue = DatatypeMap.newDate(); + binding.setNowValue(nowValue); + return nowValue; + } + } } diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/function/term/Binding.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/function/term/Binding.java index 5f17c483c..b2f352699 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/function/term/Binding.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/function/term/Binding.java @@ -20,6 +20,8 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.Optional; + import org.slf4j.LoggerFactory; import org.slf4j.Logger; @@ -112,6 +114,9 @@ public class Share { // transformer also records its Binding and hence its Visitor private Object transformerVisitor; + // Enables function now() to return the same value during processing + private Optional savNowValue = Optional.empty(); + public Object getTransformerVisitor() { return transformerVisitor; } @@ -120,6 +125,14 @@ public void setTransformerVisitor(Object transformerVisitor) { this.transformerVisitor = transformerVisitor; } + public Optional getSavNowValue() { + return savNowValue; + } + + public void setNowValue(IDatatype nowValue) { + this.savNowValue = Optional.ofNullable(nowValue); + } + } public static Binding create() { @@ -662,6 +675,14 @@ void shareGlobalVariable(Binding b) { setGlobalVariableNames(b.getGlobalVariableNames()); setGlobalVariableValues(b.getGlobalVariableValues()); } + + public Optional getNowValue() { + return getShare().getSavNowValue(); + } + + public void setNowValue(IDatatype nowValue) { + getShare().setNowValue(nowValue); + } public HashMap getGlobalVariableNames() { return globalVariable; From 079afcbdadfb07b242d0c129a2834a57fdd7f418 Mon Sep 17 00:00:00 2001 From: corby Date: Wed, 24 Jan 2024 17:25:06 +0100 Subject: [PATCH 004/146] Correct bug with init() which was overloaded by error renamed it as initlocal() --- .../inria/corese/core/storage/CoreseGraphDataManager.java | 6 +++--- .../java/fr/inria/corese/core/storage/DataManagerJava.java | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/storage/CoreseGraphDataManager.java b/corese-core/src/main/java/fr/inria/corese/core/storage/CoreseGraphDataManager.java index b7e90136b..e7828b1e0 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/storage/CoreseGraphDataManager.java +++ b/corese-core/src/main/java/fr/inria/corese/core/storage/CoreseGraphDataManager.java @@ -35,7 +35,7 @@ public class CoreseGraphDataManager implements DataManager { */ protected CoreseGraphDataManager() { setGraph(new Graph()); - init(); + initlocal(); } /** @@ -46,10 +46,10 @@ protected CoreseGraphDataManager() { */ protected CoreseGraphDataManager(Graph g) { setGraph(g); - init(); + initlocal(); } - void init() { + void initlocal() { emptyNodeList = new ArrayList<>(0); emptyEdgeList = new ArrayList<>(0); } diff --git a/corese-core/src/main/java/fr/inria/corese/core/storage/DataManagerJava.java b/corese-core/src/main/java/fr/inria/corese/core/storage/DataManagerJava.java index e87a5ef9e..b7d6d2410 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/storage/DataManagerJava.java +++ b/corese-core/src/main/java/fr/inria/corese/core/storage/DataManagerJava.java @@ -128,7 +128,7 @@ String clean(String str) { return str.replace("%20", " "); } - @Override + //@Override void init() { if (isLdscript()) { initldscript(); From 5e9bd88d5ec3ea802ba43b5cf5673d92522cd7d3 Mon Sep 17 00:00:00 2001 From: corby Date: Wed, 24 Jan 2024 17:25:26 +0100 Subject: [PATCH 005/146] Leverage isURI --- .../sparql/datatype/extension/CoreseJSON.java | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/datatype/extension/CoreseJSON.java b/sparql/src/main/java/fr/inria/corese/sparql/datatype/extension/CoreseJSON.java index 7f2e2ee26..17c0fc2f9 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/datatype/extension/CoreseJSON.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/datatype/extension/CoreseJSON.java @@ -7,6 +7,8 @@ import fr.inria.corese.sparql.datatype.CoreseDatatype; import fr.inria.corese.sparql.datatype.DatatypeMap; import fr.inria.corese.sparql.triple.parser.NSManager; +import java.net.URI; +import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; @@ -315,7 +317,7 @@ IDatatype cast(Object obj) { return new CoreseJSON((JSONObject) obj); } else if (obj instanceof String) { String str = (String) obj; - if (NSManager.isURI(str)) { + if (isURI(str)) { return DatatypeMap.newResource(str); } else { @@ -325,6 +327,15 @@ IDatatype cast(Object obj) { return DatatypeMap.castObject(obj); } + boolean isURI(String str) { + try { + URI uri = new URI(str); + return uri.isAbsolute(); + } catch (URISyntaxException ex) { + return false; + } + } + IDatatype cast(JSONArray ar) { ArrayList list = new ArrayList<>(); for (Object obj : ar) { From 6fb263fcd99d1841c5a72d5e4d1afe7afa7ab362 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Tue, 30 Jan 2024 09:37:45 +0100 Subject: [PATCH 006/146] Add support for printing blank nodes in NTriples/Quads Format class --- .../fr/inria/corese/core/print/NTriplesFormat.java | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java index 6355f0784..5010a93bd 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java @@ -94,8 +94,10 @@ protected String printNode(Node node) { return printURI(node); } else if (node.getDatatypeValue().isLiteral()) { return printDatatype(node); + } else if (node.isBlank()) { + return printBlank(node); } else { - throw new IllegalArgumentException("Node " + node + " is not a URI or a literal"); + throw new IllegalArgumentException("Node " + node + " is not a URI, Literal, or blank node."); } } @@ -135,6 +137,16 @@ private String printDatatype(Node node) { } } + /** + * Converts a blank node to a string. + * + * @param node the blank node to be formatted + * @return a string representation of the blank node + */ + private String printBlank(Node node) { + return node.getLabel(); + } + /** * Escapes special characters in a string. * From 799b8d8299d892ad815d075ad07c9d22e2576e86 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Tue, 30 Jan 2024 09:38:07 +0100 Subject: [PATCH 007/146] Fix formatting issue in NQuadsFormat double space --- .../src/main/java/fr/inria/corese/core/print/NQuadsFormat.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java index 3ceea8177..1fb450bfd 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java @@ -37,9 +37,10 @@ public String toString() { if (edge.getGraph().getValue().stringValue() != ExpType.DEFAULT_GRAPH) { sb.append(printNode(edge.getGraph())); + sb.append(" "); } - sb.append(" .\n"); + sb.append(".\n"); } return sb.toString(); From 28e41461e1c0ef5260b215331c12c1bfa200f50a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Tue, 30 Jan 2024 11:40:15 +0100 Subject: [PATCH 008/146] Refactor edge iteration to fix Edge Iterable --- .../inria/corese/core/print/JSOND3Format.java | 47 ++++++++++--------- .../inria/corese/core/print/NQuadsFormat.java | 8 +++- .../corese/core/print/NTriplesFormat.java | 8 +++- 3 files changed, 38 insertions(+), 25 deletions(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/JSOND3Format.java b/corese-core/src/main/java/fr/inria/corese/core/print/JSOND3Format.java index b6703ae10..651d0a377 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/JSOND3Format.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/JSOND3Format.java @@ -1,15 +1,15 @@ package fr.inria.corese.core.print; -import fr.inria.corese.sparql.api.IDatatype; -import fr.inria.corese.sparql.triple.parser.NSManager; -import fr.inria.corese.sparql.triple.parser.ASTQuery; -import fr.inria.corese.kgram.api.core.Node; -import fr.inria.corese.kgram.core.Mappings; -import fr.inria.corese.kgram.core.Query; -import fr.inria.corese.core.Graph; import java.util.HashMap; import java.util.Map; + +import fr.inria.corese.core.Graph; import fr.inria.corese.kgram.api.core.Edge; +import fr.inria.corese.kgram.api.core.Node; +import fr.inria.corese.kgram.core.Mappings; +import fr.inria.corese.kgram.core.Query; +import fr.inria.corese.sparql.api.IDatatype; +import fr.inria.corese.sparql.triple.parser.NSManager; public class JSOND3Format extends RDFFormat { @@ -86,17 +86,17 @@ public StringBuilder getStringBuilder() { return sb; } -// if (isGraph) { -// graphNodes(); -// } else { -// nodes(); -// } + // if (isGraph) { + // graphNodes(); + // } else { + // nodes(); + // } StringBuilder bb = new StringBuilder(); -// header(bb); + // header(bb); bb.append(OOBJ); bb.append(NL); -// bb.append(TAB); + // bb.append(TAB); bb.append(" \"nodes\" : [ "); bb.append(NL); d3Nodes(); @@ -104,7 +104,7 @@ public StringBuilder getStringBuilder() { bb.append("] ,"); bb.append(NL); -// bb.append(TAB); + // bb.append(TAB); bb.append(" \"edges\" : [ "); bb.append(NL); d3Edges(); @@ -117,7 +117,7 @@ public StringBuilder getStringBuilder() { } void d3Nodes() { - + for (Node node : graph.getRBNodes()) { int group = 1; if (node.isBlank()) { @@ -125,12 +125,12 @@ void d3Nodes() { } else if (node.toString().contains("/sparql")) { group = 2; } - + sdisplay(TAB); sdisplay(OOBJ); sdisplay("\"name\" : "); sdisplay(DQUOTE); - + sdisplay(JSONFormat.addJSONEscapes(node.toString())); sdisplay(DQUOTE); sdisplay(V); @@ -144,12 +144,12 @@ void d3Nodes() { } for (Node node : graph.getLiteralNodes()) { -// for (Entity e : graph.getRBNodes()) { + // for (Entity e : graph.getRBNodes()) { sdisplay(TAB); sdisplay(OOBJ); sdisplay("\"name\" : "); sdisplay(DQUOTE); - + sdisplay(JSONFormat.addJSONEscapes(node.toString())); sdisplay(DQUOTE); sdisplay(V); @@ -171,7 +171,9 @@ void d3Edges() { for (Edge e : graph.getEdges()) { - Edge edge = e; + // Create a new clean iterable (because corse iterable does not have a perfectly + // defined behavior for optimization reasons) + Edge edge = this.graph.getEdgeFactory().copy(e); sdisplay(TAB); sdisplay(OOBJ); @@ -194,8 +196,7 @@ void d3Edges() { sb.deleteCharAt(sb.lastIndexOf(V)); } } - - + void nodes() { for (Node node : getNodes()) { print(null, node); diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java index 1fb450bfd..174ff09bb 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java @@ -1,6 +1,7 @@ package fr.inria.corese.core.print; import fr.inria.corese.core.Graph; +import fr.inria.corese.kgram.api.core.Edge; import fr.inria.corese.kgram.api.core.ExpType; import fr.inria.corese.kgram.core.Mappings; @@ -27,7 +28,12 @@ public static NQuadsFormat create(Mappings map) { public String toString() { StringBuilder sb = new StringBuilder(); - for (var edge : graph.getEdges()) { + for (Edge e : graph.getEdges()) { + + // Create a new clean iterable (because corse iterable does not have a perfectly + // defined behavior for optimization reasons) + Edge edge = this.graph.getEdgeFactory().copy(e); + sb.append(printNode(edge.getNode(0))) .append(" ") .append(printNode(edge.getEdgeNode())) diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java index 5010a93bd..6ec529d81 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java @@ -6,6 +6,7 @@ import java.net.URISyntaxException; import fr.inria.corese.core.Graph; +import fr.inria.corese.kgram.api.core.Edge; import fr.inria.corese.kgram.api.core.Node; import fr.inria.corese.kgram.core.Mappings; import fr.inria.corese.sparql.triple.parser.NSManager; @@ -60,7 +61,12 @@ public static NTriplesFormat create(Mappings map) { public String toString() { StringBuilder sb = new StringBuilder(); - for (var edge : graph.getEdges()) { + for (Edge e : graph.getEdges()) { + + // Create a new clean iterable (because corse iterable does not have a perfectly + // defined behavior for optimization reasons) + Edge edge = this.graph.getEdgeFactory().copy(e); + sb.append(printNode(edge.getNode(0))) .append(" ") .append(printNode(edge.getEdgeNode())) From 29c5fc61ee127db1cc5d48d591ddaed639bbdee1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 2 Feb 2024 10:49:47 +0100 Subject: [PATCH 009/146] Update OPTIONS variable in corese-server.sh fix #169 --- corese-server/build-docker/corese/corese-server.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/corese-server/build-docker/corese/corese-server.sh b/corese-server/build-docker/corese/corese-server.sh index a60a8498d..5c045370f 100644 --- a/corese-server/build-docker/corese/corese-server.sh +++ b/corese-server/build-docker/corese/corese-server.sh @@ -77,4 +77,4 @@ java \ -lp \ -pp file://$PROFILE \ -init $PROPERTIES \ - "$OPTIONS" + $OPTIONS From 5f4ee0ce5d64bc1b4f5ce0be70c2f53a3c856c3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Thu, 8 Feb 2024 12:05:33 +0100 Subject: [PATCH 010/146] Refactor sparql_service_to_dataframe function to use SPARQLWrapper.get_sparql_dataframe --- .../Corese-server with Python.md | 32 +++++-------------- 1 file changed, 8 insertions(+), 24 deletions(-) diff --git a/docs/corese-python/Corese-server with Python.md b/docs/corese-python/Corese-server with Python.md index 652d3fb7b..1e1115b80 100644 --- a/docs/corese-python/Corese-server with Python.md +++ b/docs/corese-python/Corese-server with Python.md @@ -65,34 +65,18 @@ Done ### 2.3. Execute a select query ```python -import json +from SPARQLWrapper import get_sparql_dataframe -import pandas as pd -from SPARQLWrapper import JSON, SPARQLWrapper - -def sparql_service_to_dataframe(service, query): +def sparql_service_to_dataframe(endpoint, query): """ - Helper function to convert SPARQL results into a Pandas DataFrame. - - Credit to Ted Lawless https://lawlesst.github.io/notebook/sparql-dataframe.html + Query the given endpoint with the given query and return the result as a pandas DataFrame. + :param endpoint: The SPARQL endpoint to query + :param query: The SPARQL query + :return: A pandas DataFrame containing the query result """ - sparql = SPARQLWrapper(service) - sparql.setQuery(query) - sparql.setReturnFormat(JSON) - result = sparql.query() - - processed_results = json.load(result.response) - cols = processed_results['head']['vars'] - - out = [] - for row in processed_results['results']['bindings']: - item = [] - for c in cols: - item.append(row.get(c, {}).get('value')) - out.append(item) - - return pd.DataFrame(out, columns=cols) + df = get_sparql_dataframe(endpoint, query) + return df query = ''' From 0abd7cde8a97ff8178e4d4cd0ed33b1cd761ff6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Thu, 8 Feb 2024 19:06:58 +0100 Subject: [PATCH 011/146] Update table of contents in Getting Started With Corese-server.md --- .../Getting Started With Corese-server.md | 42 +++++++++++++++---- 1 file changed, 35 insertions(+), 7 deletions(-) diff --git a/docs/getting started/Getting Started With Corese-server.md b/docs/getting started/Getting Started With Corese-server.md index 22ae69021..6cfe6c91a 100644 --- a/docs/getting started/Getting Started With Corese-server.md +++ b/docs/getting started/Getting Started With Corese-server.md @@ -6,11 +6,19 @@ This tutorial shows how to use the basic features of the Corese-server framework 1. [1. Installation](#1-installation) 2. [2. Load data](#2-load-data) 1. [2.1. Command line](#21-command-line) - 2. [2.2. Profile file](#22-profile-file) - 3. [3. Create multiple endpoints](#3-create-multiple-endpoints) - 1. [3.1. Multiple endpoints with different data](#31-multiple-endpoints-with-different-data) - 4. [4. Restrict access to external endpoints](#4-restrict-access-to-external-endpoints) - 5. [5. To go deeper](#5-to-go-deeper) + 3. [3. Profile file](#3-profile-file) + 1. [3.1 Create multiple endpoints](#31-create-multiple-endpoints) + 1. [3.1.1 Multiple endpoints with different data](#311-multiple-endpoints-with-different-data) + 2. [3.2 Restrict access to external endpoints](#32-restrict-access-to-external-endpoints) + 4. [4. Property configuration file](#4-property-configuration-file) + 1. [4.1. Blank node format](#41-blank-node-format) + 2. [4.2. Loading in the default graph](#42-loading-in-the-default-graph) + 1. [4.3. RDF\* (RDF Star)](#43-rdf-rdf-star) + 3. [4.4. OWL utilities](#44-owl-utilities) + 4. [4.5. SPARQL engine behavior](#45-sparql-engine-behavior) + 5. [4.6. SPARQL federation behavior](#46-sparql-federation-behavior) + 6. [4.7. SPARQL LOAD parameters](#47-sparql-load-parameters) + 5. [6. To go deeper](#6-to-go-deeper) ## 1. Installation @@ -157,22 +165,28 @@ An example of properties file is available on the [Corese-Command GitHub reposit Here we list only some of the most commonly used properties. ### 4.1. Blank node format + ```properties BLANK_NODE = _:b ``` + `BLANK_NODE` specifies the format of blank nodes. The default value is `_:b`. ### 4.2. Loading in the default graph + ```properties LOAD_IN_DEFAULT_GRAPH = true ``` + By default, the data is loaded into the default graph. If `LOAD_IN_DEFAULT_GRAPH` is set to `false`, the data is loaded into a named graph whose name is the path of the file. Note that internally, the default graph of the Corese server is named `http://ns.inria.fr/corese/kgram/default`, or `kg:default`. #### 4.3. RDF* (RDF Star) + ```properties RDF_STAR = false ``` + Corese implements a prototype extension for the RDF* specification. `RDF_STAR` enables this extension. ### 4.4. OWL utilities @@ -180,23 +194,29 @@ Corese implements a prototype extension for the RDF* specification. `RDF_STAR` e ```properties DISABLE_OWL_AUTO_IMPORT = true ``` + By default, when a triple with the predicate `owl:imports` is loaded, the Corese-server automatically loads the ontology specified in the object of the triple. If `DISABLE_OWL_AUTO_IMPORT` is set to `true`, the Corese-server does not load the ontology specified in the object of the triple. ### 4.5. SPARQL engine behavior + ```properties SPARQL_COMPLIANT = false ``` + `SPARQL_COMPLIANT` specifies the behavior of the SPARQL engine. If `SPARQL_COMPLIANT` is set to `true`, the SPARQL engine is compliant with the W3C test cases. In practice, this means that the SPARQL engine will consider that two literals are different if they have the same value but different types (E.g: `1` and `"1"^^xsd:integer`). ```properties REENTRANT_QUERY = false ``` + `REENRANT_QUERY` enables the update during a query. This option was implemented in cooperation with the [SPARQL micro-service project](https://github.com/frmichel/sparql-micro-service). ### 4.6. SPARQL federation behavior + ```properties SERVICE_BINDING = values ``` + When binding values between clauses from different endpoints, the Corese-server uses the `SERVICE_BINDING` property to specify the method to use. The default value is `values`. The other possible value is `filter`. For example, with the following data in the local endpoint: @@ -206,6 +226,7 @@ For example, with the following data in the local endpoint: ex:John :name "John" . ``` + if the following query is executed: ```sparql @@ -233,6 +254,7 @@ SELECT * { ```properties SERVICE_SLICE = 20 ``` + `SERVICE_SLICE` specifies the number of bindings to send to a remote endpoint. The default value is `20`. ```properties @@ -250,35 +272,41 @@ SELECT * { LIMIT 1000 } ``` + Corese will try to obtain the next 1000 results by sending the same query with the `OFFSET` clause. ```properties SERVICE_TIMEOUT = 2000 ``` + `SERVICE_TIMEOUT` specifies the timeout in milliseconds for a remote endpoint. The default value is `10000`. ### 4.7. SPARQL LOAD parameters + ```properties LOAD_LIMIT = 10 + ``` + `LOAD_LIMIT` specifies the maximum number of triples to load from a file. This feature is not enabled by default. ```properties LOAD_WITH_PARAMETER = true ``` + `LOAD_WITH_PARAMETER` enables the use of the `LOAD` clause with a parameter. This feature is not enabled by default. ```properties LOAD_FORMAT = text/turtle;q=1.0, application/rdf+xml;q=0.9, application/ld+json;q=0.7; application/json;q=0.6 ``` + ```properties LOAD_FORMAT = application/rdf+xml ``` + If `LOAD_WITH_PARAMETER` is enabled, `LOAD_FORMAT` can be used to specify which mime type should be resquest as format for the loaded data. ## 6. To go deeper - [Technical documentation](https://files.inria.fr/corese/doc/server.html) - [Storage](https://github.com/Wimmics/corese/blob/master/docs/storage/Configuring%20and%20Connecting%20to%20Different%20Storage%20Systems%20in%20Corese.md#configuring-and-connecting-to-different-storage-systems-in-corese) - - From 50a3b63a8e760f717d1796957f2e4a3cb3fa2aef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Tue, 13 Feb 2024 11:15:38 +0100 Subject: [PATCH 012/146] Add support for Canonical RDF 1.0 format --- .../core/print/CanonicalRdf10Format.java | 58 ++ .../corese/core/print/NTriplesFormat.java | 2 +- .../core/print/rdfc10/CanonicalIssuer.java | 103 ++++ .../core/print/rdfc10/CanonicalRdf10.java | 577 ++++++++++++++++++ .../print/rdfc10/CanonicalizationState.java | 193 ++++++ .../print/rdfc10/CanonicalizedDataset.java | 164 +++++ .../core/print/rdfc10/HashingUtility.java | 45 ++ .../corese/core/print/rdfc10/ListMap.java | 106 ++++ corese-core/src/main/java/module-info.java | 2 + 9 files changed, 1249 insertions(+), 1 deletion(-) create mode 100644 corese-core/src/main/java/fr/inria/corese/core/print/CanonicalRdf10Format.java create mode 100644 corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalIssuer.java create mode 100644 corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalRdf10.java create mode 100644 corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java create mode 100644 corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizedDataset.java create mode 100644 corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/HashingUtility.java create mode 100644 corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/ListMap.java diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/CanonicalRdf10Format.java b/corese-core/src/main/java/fr/inria/corese/core/print/CanonicalRdf10Format.java new file mode 100644 index 000000000..ce231864e --- /dev/null +++ b/corese-core/src/main/java/fr/inria/corese/core/print/CanonicalRdf10Format.java @@ -0,0 +1,58 @@ +package fr.inria.corese.core.print; + +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.print.rdfc10.CanonicalRdf10; +import fr.inria.corese.core.print.rdfc10.CanonicalizedDataset; +import fr.inria.corese.kgram.api.core.Node; +import fr.inria.corese.kgram.core.Mappings; + +/** + * This class provides functionality to convert a Graph object to a string in + * Canonical RDF 1.0 format. + */ +public class CanonicalRdf10Format extends NQuadsFormat { + + private CanonicalizedDataset canonicalizedDataset; + + public CanonicalRdf10Format(Graph graph) { + super(graph); + this.canonicalizedDataset = CanonicalRdf10.create(graph).canonicalRdf10(); + } + + public static CanonicalRdf10Format create(Graph graph) { + return new CanonicalRdf10Format(graph); + } + + public static CanonicalRdf10Format create(Mappings map) { + return new CanonicalRdf10Format((Graph) map.getGraph()); + } + + /** + * Converts the graph to a string in Canonical RDF 1.0 format. + * + * @return a string representation of the graph in Canonical RDF 1.0 format + */ + @Override + public String toString() { + String nquads = super.toString(); + + // Sort in codepoint order by line + String[] lines = nquads.split("\n"); + java.util.Arrays.sort(lines); + + // Concatenate lines + StringBuilder sb = new StringBuilder(); + for (String line : lines) { + sb.append(line).append("\n"); + } + + return sb.toString(); + } + + @Override + protected String printBlank(Node node) { + String identifier = this.canonicalizedDataset.getIdentifierForBlankNode(node); + return "_:" + this.canonicalizedDataset.getIssuedIdentifier(identifier); + } + +} diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java index 6ec529d81..1df8f13a9 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java @@ -149,7 +149,7 @@ private String printDatatype(Node node) { * @param node the blank node to be formatted * @return a string representation of the blank node */ - private String printBlank(Node node) { + protected String printBlank(Node node) { return node.getLabel(); } diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalIssuer.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalIssuer.java new file mode 100644 index 000000000..adc010219 --- /dev/null +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalIssuer.java @@ -0,0 +1,103 @@ +package fr.inria.corese.core.print.rdfc10; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +/** + * This class manages the issuance of canonical identifiers for blank nodes. + */ +public class CanonicalIssuer { + + private final String IDPREFIX; + private int idCounter; + // Maps blank node identifiers to their canonical identifiers + private final Map issuedIdentifierMap; + + ///////////////// + // Constructor // + ///////////////// + + /** + * Constructs a new CanonicalIssuer instance. + * + * @param idPrefix The prefix to be used for identifiers issued by this + */ + public CanonicalIssuer(String idPrefix) { + this.IDPREFIX = idPrefix; + this.idCounter = 0; + this.issuedIdentifierMap = new HashMap<>(); + } + + /** + * Constructs a new CanonicalIssuer instance as a copy of another. + * + * @param ci The CanonicalIssuer to copy. + */ + public CanonicalIssuer(CanonicalIssuer ci) { + this.IDPREFIX = ci.IDPREFIX; + this.idCounter = ci.idCounter; + this.issuedIdentifierMap = new HashMap<>(ci.issuedIdentifierMap); + } + + ///////////// + // Methods // + ///////////// + + /** + * Issues a new canonical identifier for a blank node or returns an existing one + * if already issued. + * + * @return The canonical identifier for the blank node. + */ + public String issueCanonicalIdentifier(String blankNodeId) { + if (this.issuedIdentifierMap.containsKey(blankNodeId)) { + return this.issuedIdentifierMap.get(blankNodeId); + } + String issuedIdentifier = this.IDPREFIX + this.idCounter; + this.idCounter++; + this.issuedIdentifierMap.put(blankNodeId, issuedIdentifier); + return issuedIdentifier; + } + + /** + * Retrieves the canonical identifier for a blank node. + * + * @param blankNodeId The identifier of the blank node. + * @return The canonical identifier for the blank node. + */ + public String getCanonicalIdentifier(String blankNodeId) { + return this.issuedIdentifierMap.get(blankNodeId); + } + + /** + * Retrieves a set of all issued blank node identifiers. + * + * @return A set of all issued blank node identifiers. + */ + public Set getBlankNodeIdentifiers() { + return Collections.unmodifiableSet(this.issuedIdentifierMap.keySet()); + } + + /** + * Tests whether a blank node has a canonical identifier. + * + * @param blankNodeId The identifier of the blank node. + * @return True if the blank node has a canonical identifier + * false otherwise. + */ + public boolean hasCanonicalIdentifier(String blankNodeId) { + return this.issuedIdentifierMap.containsKey(blankNodeId); + } + + /** + * Retrieves the issued identifier map. + * + * @return The issued identifier map. + */ + public Map getIssuedIdentifierMap() { + return Collections.unmodifiableMap(this.issuedIdentifierMap); + } + +} diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalRdf10.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalRdf10.java new file mode 100644 index 000000000..6ea6a6a64 --- /dev/null +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalRdf10.java @@ -0,0 +1,577 @@ +package fr.inria.corese.core.print.rdfc10; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +import org.apache.commons.lang3.tuple.Pair; + +import fr.inria.corese.core.EdgeFactory; +import fr.inria.corese.core.Graph; +import fr.inria.corese.kgram.api.core.Edge; +import fr.inria.corese.kgram.api.core.ExpType; +import fr.inria.corese.kgram.api.core.Node; +import fr.inria.corese.kgram.core.Mappings; + +/** + * The {@code CanonicalRdf10Format} class extends {@code RDFFormat} to provide + * RDF canonicalization in alignment with the RDF 1.0 specification. This class + * manages the process of transforming RDF graphs into a canonical form. + * + * @see RDF Dataset + * Canonicalization + * + */ +public class CanonicalRdf10 { + + private CanonicalizationState canonicalizationState; + private CanonicalizedDataset canonicalizedDataset; + + // + private EdgeFactory edgeFactory = Graph.create().getEdgeFactory(); + + ////////////////// + // Constructors // + ////////////////// + + /** + * Constructs a new {@code CanonicalRdf10Format} with the specified RDF graph. + * Initializes the canonicalization state and dataset for the graph. + * + * @param graph the RDF graph to be canonicalized + */ + private CanonicalRdf10(Graph graph) { + this.canonicalizationState = new CanonicalizationState(); + this.canonicalizedDataset = new CanonicalizedDataset(graph); + canonicalRdf10(); + } + + /** + * Constructs a new {@code CanonicalRdf10Format} with the specified RDF graph + * and a map of blank nodes to identifiers. Initializes the canonicalization + * state and dataset for the graph. + * + * @param graph the RDF graph to be canonicalized + * @param blankNodesToIdentifiers a map of blank nodes to their identifiers + */ + private CanonicalRdf10(Graph graph, Map blankNodesToIdentifiers) { + this.canonicalizationState = new CanonicalizationState(); + this.canonicalizedDataset = new CanonicalizedDataset(graph, blankNodesToIdentifiers); + canonicalRdf10(); + } + + ///////////////////// + // Factory methods // + ///////////////////// + + /** + * Creates a new {@code CanonicalRdf10Format} instance for the given graph. + * + * @param graph the RDF graph to be canonicalized + * @return a new instance of {@code CanonicalRdf10Format} + */ + public static CanonicalRdf10 create(Graph graph) { + return new CanonicalRdf10(graph); + } + + /** + * Creates a new {@code CanonicalRdf10Format} instance for the graph associated + * with the given mappings. + * + * @param map the mappings containing the RDF graph to be canonicalized + * @return a new instance of {@code CanonicalRdf10Format} + */ + public static CanonicalRdf10 create(Mappings map) { + return new CanonicalRdf10((Graph) map.getGraph()); + } + + /** + * Creates a new {@code CanonicalRdf10Format} instance for the given graph and + * map of blank nodes to identifiers. + * + * @param graph the RDF graph to be canonicalized + * @param blankNodesToIdentifiers a map of blank nodes to their identifiers + * @return a new instance of {@code CanonicalRdf10Format} + */ + public static CanonicalRdf10 create(Graph graph, Map blankNodesToIdentifiers) { + return new CanonicalRdf10(graph, blankNodesToIdentifiers); + } + + /** + * Creates a new {@code CanonicalRdf10Format} instance for the graph associated + * with the given mappings and map of blank nodes to identifiers. + * + * @param map the mappings containing the RDF graph to be + * canonicalized + * @param blankNodesToIdentifiers a map of blank nodes to their identifiers + * @return a new instance of {@code CanonicalRdf10Format} + */ + public static CanonicalRdf10 create(Mappings map, Map blankNodesToIdentifiers) { + return new CanonicalRdf10((Graph) map.getGraph(), blankNodesToIdentifiers); + } + + //////////////////// + // Main algorithm // + //////////////////// + + /** + * Performs the canonicalization of an RDF 1.0 dataset. + * + * @see CanonicalizationAlgorithm + */ + public CanonicalizedDataset canonicalRdf10() { + // Build blank nodes to identifiers if not already done + // Build blank nodes identifiers to quads + // 4.4.3) Step 1, 2 + this.extractQuadsForBlankNodes(); + + // Build first degree hash for each blank node + // 4.4.3) Step 3 + for (String blankNodeIdentifier : this.canonicalizedDataset.getBlankNodeIdentifiers()) { + // 4.4.3) Step 3.1 + String hash = this.hashFirstDegreeQuads(blankNodeIdentifier); + // 4.4.3) Step 3.2 + this.canonicalizationState.associateHashWithBlankNode(hash, blankNodeIdentifier); + } + + // Generate canonical identifiers for blank nodes with a unique first degree + // hash + // 4.4.3) Step 4 + for (String hash : this.canonicalizationState.getHashesSorted()) { + // 4.4.3) Step 4.1 + if (this.canonicalizationState.getBlankNodeForHash(hash).size() > 1) { + continue; + } + + // 4.4.3) Step 4.2 + String blankNodeIdentifier = this.canonicalizationState.getBlankNodeForHash(hash).get(0); + // 4.5.2) Step 1 2, 3, 4, 5 + this.canonicalizationState.issueCanonicalBlankNodeIdFor(blankNodeIdentifier); + + // 4.4.3) Step 4.3 + this.canonicalizationState.removeHash(hash); // Can be removed inside the loop because + // this.canonicalizationState.getHashesSorted() is a copy of + // the original list + } + + // Generate canonical identifiers for blank nodes with multiple first degree + // hashes + // 4.4.3) Step 5 + for (String hash : this.canonicalizationState.getHashesSorted()) { + // 4.4.3) Step 5.1 + List> hashPathList = new ArrayList<>(); + + // 4.4.3) Step 5.2 + for (String blankNodeIdentifier : this.canonicalizationState.getBlankNodeForHash(hash)) { + + // 4.4.3) Step 5.2.1 + if (this.canonicalizationState.hasCanonicalIdentifier(blankNodeIdentifier)) { + continue; + } + + // 4.4.3) Step 5.2.2 + CanonicalIssuer tempIssuer = new CanonicalIssuer("b"); + + // 4.4.3) Step 5.2.3 + tempIssuer.issueCanonicalIdentifier(blankNodeIdentifier); + + // 4.4.3) Step 5.2.4 + Pair result = this.hashNdegreeQuads(tempIssuer, blankNodeIdentifier); + hashPathList.add(result); + } + + // 4.4.3) Step 5.3 + + // sort the list by the hash + hashPathList.sort((p1, p2) -> p1.getLeft().compareTo(p2.getLeft())); + + for (Pair result : hashPathList) { + CanonicalIssuer issuer = result.getRight(); + + // 4.4.3) Step 5.3.1 + for (String existingIdentifier : issuer.getBlankNodeIdentifiers()) { + this.canonicalizationState.issueCanonicalBlankNodeIdFor(existingIdentifier); + } + } + } + + // 4.4.3) Step 6 + // Add the issued identifiers map from the canonical issuer to the canonicalized + // dataset. + this.canonicalizedDataset.setIssuedIdentifierMap(this.canonicalizationState.getIssuedIdentifierMap()); + + return this.canonicalizedDataset; + } + + //////////////////// + // Initialization // + //////////////////// + + /** + * Extracts the quads for blank nodes from the RDF graph and adds them to the + * canonicalization state (BlankNodeIdentifier -> List). + * Also adds the blank nodes to identifiers to the canonicalized dataset if not + * already done in the constructor of the class (BlankNode -> + * BlankNodeIdentifier). + */ + private void extractQuadsForBlankNodes() { + Iterable edges = this.canonicalizedDataset.getDataset().getEdges(); + + for (Edge e : edges) { + + // Create a new clean iterable (because corse iterable does not have a perfectly + // defined behavior for optimization reasons) + Edge edge = this.edgeFactory.copy(e); + + Node subject = edge.getSubjectNode(); + Node object = edge.getObjectNode(); + Node graph = edge.getGraph(); + processAndMapBlankNode(subject, edge); + processAndMapBlankNode(object, edge); + processAndMapBlankNode(graph, edge); + } + } + + /** + * Processes a given blank node by mapping to an identifier if not already done + * and adding the associated quad to the canonicalization state. If the node is + * not a blank node, the method does nothing. + * + * @param node the node to be processed and mapped + * @param edge the edge associated with the node + */ + private void processAndMapBlankNode(Node node, Edge edge) { + if (node.isBlank()) { + // Add blank node to identifiers if not already done + this.canonicalizedDataset.associateBlankNodeWithIdentifier(node); + + // Add quad to blank node identifier + // 4.4.3) Step 2.1 + this.canonicalizationState + .associateBlankNodeWithQuad(this.canonicalizedDataset.getIdentifierForBlankNode(node), edge); + } + } + + ////////////////////////// + // HashFirstDegreeQuads // + ////////////////////////// + + /** + * Hashes the first degree quads for a given blank node identifier. + * + * @param blankNodeIdentifier the identifier of the blank node + * @return the hash of the first degree quads for the given blank node + * + * @see Hashing the + * First Degree Quads + */ + private String hashFirstDegreeQuads(String blankNodeIdentifier) { + // 4.6.3) Step 1 + List nquads = new ArrayList<>(); + + // 4.6.3) Step 2, 3 + for (Edge quad : this.canonicalizationState.getQuadsForBlankNode(blankNodeIdentifier)) { + nquads.add(serializeQuad(quad, blankNodeIdentifier)); + } + + // 4.6.3) Step 4 + nquads.sort(String::compareTo); + return HashingUtility.sha256(String.join("\n", nquads) + "\n"); + } + + /** + * Serializes a quad in N-Quads format. The method replaces the blank node + * identifier of the reference blank node with "_:a" and all other blank node + * identifiers with "_:z". + * + * @param quad the quad to be serialized + * @param referenceBlankNodeIdentifier the identifier of the blank node to be + * referenced + * @return the serialized quad + */ + private String serializeQuad(Edge quad, String referenceBlankNodeIdentifier) { + Node subject = quad.getSubjectNode(); + Node predicate = quad.getEdgeNode(); + Node object = quad.getObjectNode(); + Node graph = quad.getGraph(); + + boolean isDefaultGraph = graph.getLabel().equals(ExpType.DEFAULT_GRAPH); + + String subjectString = getNodeString(subject, referenceBlankNodeIdentifier); + String predicateString = predicate.toString(); // Predicates cannot be blank nodes + String objectString = getNodeString(object, referenceBlankNodeIdentifier); + String graphString = isDefaultGraph ? "" : getNodeString(graph, referenceBlankNodeIdentifier); + + return subjectString + " " + predicateString + " " + objectString + (isDefaultGraph ? "" : " " + graphString) + + " ."; + } + + /** + * Returns the string representation of a node. If the node is a blank node, the + * method returns "_:a" if the node is the reference blank node identifier and + * "_:z" otherwise. + * + * @param node the node to be serialized + * @param referenceBlankNodeIdentifier the identifier of the blank node to be + * referenced + * @return the string representation of the node + */ + private String getNodeString(Node node, String referenceBlankNodeIdentifier) { + if (node.isBlank()) { + return this.canonicalizedDataset.getIdentifierForBlankNode(node).equals(referenceBlankNodeIdentifier) + ? "_:a" + : "_:z"; + } else { + return node.toString(); + } + + } + + /////////////////////// + // HashN-DegreeQuads // + /////////////////////// + + /** + * Hashes the N-degree quads for a given blank node identifier. + * + * @param issuer the canonical issuer + * @param blankNodeId the identifier of the blank node + * @return a pair containing the hash of the N-degree quads for the given blank + * node and the canonical issuer + * + * @see Hashing + * N-degree Quads + * + */ + private Pair hashNdegreeQuads(CanonicalIssuer issuer, String blankNodeId) { + // Reference issuer because issuer is modified in the method body + // in step 4.8.3) Step 5.6 + CanonicalIssuer refIssuer = issuer; + + // 4.8.3) Step 1 + // Use a tree map to ensure that the hashes are sorted + Map relatedHashToRelatedBNIdMap = new TreeMap<>(); + + // 4.8.3) Step 2 + List quads = this.canonicalizationState.getQuadsForBlankNode(blankNodeId); + + // 4.8.3) Step 3 + for (Edge quad : quads) { + processQuadEntry(quad, refIssuer, blankNodeId, relatedHashToRelatedBNIdMap, "s", quad.getSubjectNode()); + processQuadEntry(quad, refIssuer, blankNodeId, relatedHashToRelatedBNIdMap, "o", quad.getObjectNode()); + processQuadEntry(quad, refIssuer, blankNodeId, relatedHashToRelatedBNIdMap, "g", quad.getGraph()); + } + + // 4.8.3) Step 4 + StringBuilder data = new StringBuilder(); + + // 4.8.3) Step 5 + // Hash are sorted by the tree map + for (String hash : relatedHashToRelatedBNIdMap.keySet()) { + + // 4.8.3) Step 5.1 + data.append(hash); + + // 4.8.3) Step 5.2 + String chosenPath = ""; + + // 4.8.3) Step 5.3 + CanonicalIssuer chosenIssuer = null; + + // 4.8.3) Step 5.4 + for (List permutation : this.permute(new ArrayList<>(relatedHashToRelatedBNIdMap.values()))) { + + // 4.8.3) Step 5.4.1 + CanonicalIssuer issuerCopy = new CanonicalIssuer(refIssuer); + + // 4.8.3) Step 5.4.2 + String path = ""; + + // 4.8.3) Step 5.4.3 + List recursionList = new ArrayList<>(); + + // 4.8.3) Step 5.4.4 + for (String relatedBNId : permutation) { + + // 4.8.3) Step 5.4.4.1 + if (this.canonicalizationState.hasCanonicalIdentifier(relatedBNId)) { + path += "_:" + this.canonicalizationState.getCanonicalIdentifierFor(relatedBNId); + } + // 4.8.3) Step 5.4.4.2 + else { + // 4.8.3) Step 5.4.4.2.1 + if (!issuerCopy.hasCanonicalIdentifier(relatedBNId)) { + recursionList.add(relatedBNId); + } + // 4.8.3) Step 5.4.4.2.2 + path += "_:" + issuerCopy.issueCanonicalIdentifier(relatedBNId); + } + + // 4.8.3) Step 5.4.4.3 + if (!chosenPath.isEmpty() && path.length() >= chosenPath.length() + && path.compareTo(chosenPath) > 0) { + break; + } + } + + // 4.8.3) Step 5.4.5 + for (String relatedBNId : recursionList) { + // 4.8.3) Step 5.4.5.1 + Pair result = this.hashNdegreeQuads(issuerCopy, relatedBNId); + + // 4.8.3) Step 5.4.5.2 + path += "_:" + issuerCopy.issueCanonicalIdentifier(relatedBNId); + + // 4.8.3) Step 5.4.5.3 + path += "<" + result + ">"; + + // 4.8.3) Step 5.4.5.4 + issuerCopy = result.getRight(); + + // 4.8.3) Step 5.4.5.5 + if (!chosenPath.isEmpty() && path.length() >= chosenPath.length() + && path.compareTo(chosenPath) > 0) { + break; + } + } + + // 4.8.3) Step 5.4.6 + if (chosenPath.isEmpty() || path.compareTo(chosenPath) < 0) { + chosenPath = path; + chosenIssuer = issuerCopy; + } + } + + // 4.8.3) Step 5.5 + data.append(chosenPath); + + // 4.8.3) Step 5.6 + refIssuer = chosenIssuer; + } + + // 4.8.3) Step 6 + return Pair.of(HashingUtility.sha256(data.toString()), refIssuer); + } + + /** + * Generates all possible permutations of a given list. + * + * @param original The original list to be permuted. + * @param The type of elements in the list. + * @return A list of lists, where each inner list represents a permutation of + * the original list. + */ + private List> permute(List original) { + if (original.isEmpty()) { + List> result = new ArrayList<>(); + result.add(new ArrayList<>()); + return result; + } + + T firstElement = original.remove(0); + List> returnValue = new ArrayList<>(); + List> permutations = permute(original); + + for (List smallerPermutated : permutations) { + for (int index = 0; index <= smallerPermutated.size(); index++) { + List temp = new ArrayList<>(smallerPermutated); + temp.add(index, firstElement); + returnValue.add(temp); + } + } + return returnValue; + } + + /** + * Processes a quad entry by generating a hash for the related blank node and + * updating the hash-to-blank-node map. + * + * @param quad The quad edge to process. + * @param issuer The canonical issuer. + * @param blankNodeId The identifier for the current blank node. + * @param relatedHashToRelatedBNIdMap The map that stores the hash-to-blank-node + * mappings. + * @param position The position of the quad entry. + * @param relatedBN The related blank node. + */ + private void processQuadEntry(Edge quad, CanonicalIssuer issuer, String blankNodeId, + Map relatedHashToRelatedBNIdMap, String position, Node relatedBN) { + String relatedBNId = this.canonicalizedDataset.getIdentifierForBlankNode(relatedBN); + + if (relatedBN.isBlank() && !relatedBNId.equals(blankNodeId)) { + // 4.8.3) Step 3.1.1 + String relatedHash = this.hashRelatedBlankNode(relatedBNId, quad, issuer, position); + + // 4.8.3) Step 3.1.2 + relatedHashToRelatedBNIdMap.put(relatedHash, relatedBNId); + } + } + + ////////////////////////// + // HashRelatedBlankNode // + ////////////////////////// + + /** + * Hashes a related blank node. + * + * @param relatedBNId the identifier of the related blank node + * @param quad the quad to be associated with the blank node + * @param issuer the canonical issuer + * @param position the position of the related blank node + * @return the related hash for the related blank node + * + * @see Hashing + * a Related Blank Node + * + */ + private String hashRelatedBlankNode(String relatedBNId, Edge quad, CanonicalIssuer issuer, + String position) { + // 4.7.3) Step 1 + StringBuilder input = new StringBuilder(); + input.append(position); + + // 4.7.3) Step 2 + // Append predicate value if position is not 'g' + if (!position.equals("g")) { + input.append(quad.getPredicateValue().toString()); + } + + // 4.7.3) Step 3 + // If there is a canonical identifier for relatedBNId, use it; otherwise, use + // the issuer's identifier. + if (this.canonicalizationState.hasCanonicalIdentifier(relatedBNId) + || issuer.hasCanonicalIdentifier(relatedBNId)) { + + input.append("_:" + (this.canonicalizationState.hasCanonicalIdentifier(relatedBNId) + ? this.canonicalizationState.getCanonicalIdentifierFor(relatedBNId) + : issuer.getCanonicalIdentifier(relatedBNId))); + } + // 4.7.3) Step 4 + // Append hash for blank node as fallback + else { + input.append(this.canonicalizationState.getHashForBlankNode(relatedBNId)); + } + + // 4.7.3) Step 5 + return HashingUtility.sha256(input.toString()); + } + + ///////////////////////// + // Overriding toString // + ///////////////////////// + + /** + * Returns a string representation of the RDF graph in canonical form. + * + * @return a string representation of the RDF graph in canonical form + */ + @Override + public String toString() { + return super.toString(); + } + +} diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java new file mode 100644 index 000000000..1f052f9f3 --- /dev/null +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java @@ -0,0 +1,193 @@ +package fr.inria.corese.core.print.rdfc10; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import fr.inria.corese.kgram.api.core.Edge; + +/** + * This class manages the state of canonicalization, particularly handling + * the associations between blank nodes and their corresponding quads, + * maintaining a mapping from hash values to blank nodes and maintaining a + * mapping from blank node identifiers to canonical blank node identifiers. + */ +public class CanonicalizationState { + + private final ListMap blankNodesToQuad = new ListMap<>(); + private final ListMap hashToBlankNode = new ListMap<>(); + private final CanonicalIssuer canonicalIssuer = new CanonicalIssuer("c14n"); + + ///////////////// + // Constructor // + ///////////////// + + /** + * Constructs a new CanonicalizationState instance. + */ + public CanonicalizationState() { + } + + /////////////////////////////////// + // Quad to Blank Node Management // + /////////////////////////////////// + + /** + * Maps a blank node identifier to a specific quad. + * + * @param blankNodeId The identifier of the blank node. + * @param quad The quad to be associated with the blank node. + */ + public void associateBlankNodeWithQuad(String blankNodeId, Edge quad) { + this.blankNodesToQuad.put(blankNodeId, quad); + } + + /** + * Retrieves the list of quads associated with a specific blank node. + * + * @param blankNodeId The identifier of the blank node. + * @return A list of quads associated with the blank node. + */ + public List getQuadsForBlankNode(String blankNodeId) { + return Collections.unmodifiableList(this.blankNodesToQuad.get(blankNodeId)); + } + + /////////////////////////////////// + // Hash to Blank Node Management // + /////////////////////////////////// + + /** + * Maps a hash value to a specific blank node identifier. + * + * @param hash The hash value. + * @param blankNodeId The identifier of the blank node. + */ + public void associateHashWithBlankNode(String hash, String blankNodeId) { + this.hashToBlankNode.put(hash, blankNodeId); + } + + /** + * Retrieves blanks nodes associated with a specific hash value. + * + * @param hash The hash value. + * @return A list of blank nodes associated with the hash value. + */ + public void removeHash(String hash) { + this.hashToBlankNode.remove(hash); + } + + /** + * Retrieves blanks nodes associated with a specific hash value. + * + * @param hash The hash value. + * @return A list of blank nodes associated with the hash value. + */ + public List getBlankNodeForHash(String hash) { + return Collections.unmodifiableList(this.hashToBlankNode.get(hash)); + } + + /** + * Retrieves the hash value associated with a specific blank node identifier. + * + * @param blankNodeId The identifier of the blank node. + * @return The hash value associated with the blank node or null if no hash + * value is associated. + */ + public String getHashForBlankNode(String blankNodeId) { + for (String hash : hashToBlankNode.keySet()) { + if (hashToBlankNode.get(hash).contains(blankNodeId)) { + return hash; + } + } + return null; + } + + /** + * Retrieves a sorted list of hashes. + * + * @return A copy of the list of hashes sorted in code point order. + */ + public List getHashesSorted() { + // hash are sorted in code point order by the ListMap implementation + List sortedHashes = new ArrayList<>(hashToBlankNode.keySet()); + return Collections.unmodifiableList(sortedHashes); + } + + //////////////////////////////////////// + // Canonical Blank Node ID Management // + //////////////////////////////////////// + + /** + * Issues a canonical blank node identifier for a given blank node identifier. + * If a canonical blank node identifier has already been issued for the given + * blank node identifier, the previously issued identifier is returned. + * + * @param blankNodeId The blank node identifier. + * @return The canonical blank node identifier. + */ + public String issueCanonicalBlankNodeIdFor(String blankNodeId) { + return this.canonicalIssuer.issueCanonicalIdentifier(blankNodeId); + } + + /** + * Tests whether a canonical blank node identifier has been issued for a given + * blank node identifier. + * + * @param blankNodeId The blank node identifier. + * @return True if a canonical blank node identifier has been issued for the + * given + */ + public boolean hasCanonicalIdentifier(String blankNodeId) { + return this.canonicalIssuer.hasCanonicalIdentifier(blankNodeId); + } + + /** + * Retrieves the canonical blank node identifier for a given blank node + * identifier. + * + * @param blankNodeId The blank node identifier. + * @return The canonical blank node identifier. + */ + public String getCanonicalIdentifierFor(String blankNodeId) { + return this.canonicalIssuer.getCanonicalIdentifier(blankNodeId); + } + + /** + * Retrieves the issued identifier map. + * + * @return A unmodifiable map of issued blank node identifiers to canonical + */ + public Map getIssuedIdentifierMap() { + return this.canonicalIssuer.getIssuedIdentifierMap(); + } + + /////////////// + // To String // + /////////////// + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + + sb.append("Blank Nodes to Quads Mapping: \n"); + this.blankNodesToQuad.forEach((key, value) -> sb.append(key).append(" -> ").append(value).append("\n")); + + sb.append("\n"); + + sb.append("Hash to Blank Node Mapping: \n"); + this.hashToBlankNode.forEach((key, value) -> sb.append(key).append(" -> ").append(value).append("\n")); + + sb.append("\n"); + + sb.append("Blank Node to Canonical Blank Node Mapping: \n"); + this.canonicalIssuer.getBlankNodeIdentifiers().forEach(identifier -> { + sb.append(identifier); + sb.append(" -> "); + sb.append(this.canonicalIssuer.issueCanonicalIdentifier(identifier)); + sb.append("\n"); + }); + + return sb.toString(); + } +} diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizedDataset.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizedDataset.java new file mode 100644 index 000000000..72e30b6f9 --- /dev/null +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizedDataset.java @@ -0,0 +1,164 @@ +package fr.inria.corese.core.print.rdfc10; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import fr.inria.corese.core.Graph; +import fr.inria.corese.kgram.api.core.Node; + +/** + * Represents a dataset that has undergone canonicalization. + * This class manages a graph and the mapping of blank nodes to their + * identifiers. + */ +public class CanonicalizedDataset { + + private final Graph dataset; + private Map blankNodesToIdentifiers = new HashMap<>(); + private final boolean blankNodesToIdentifiersInitialized; + private Map issuedIdentifierMap = new HashMap<>(); + + ///////////////// + // Constructor // + ///////////////// + + /** + * Constructs a CanonicalizedDataset with a given graph. + * Initializes the blank node to identifier mapping as uninitialized. + * + * @param graph The graph to be associated with this dataset. + */ + public CanonicalizedDataset(Graph graph) { + this.dataset = graph; + this.blankNodesToIdentifiersInitialized = false; + } + + /** + * Constructs a CanonicalizedDataset with a given graph and a pre-defined + * mapping of blank nodes to identifiers. + * + * @param graph The graph to be associated with this dataset. + * @param blankNodesToIdentifiers The pre-defined mapping of blank nodes to + * their identifiers. + */ + public CanonicalizedDataset(Graph graph, Map blankNodesToIdentifiers) { + this.dataset = graph; + this.blankNodesToIdentifiers = blankNodesToIdentifiers; + this.blankNodesToIdentifiersInitialized = true; + } + + //////////////////////// + // Dataset Management // + //////////////////////// + + /** + * Retrieves the dataset associated with this CanonicalizedDataset. + * + * @return The associated graph. + */ + public Graph getDataset() { + return dataset; + } + + /////////////////////////////////////////////////// + // Blank Nodes to Identifiers Mapping Management // + /////////////////////////////////////////////////// + + /** + * Adds a blank node and its identifier to the mapping. + * Only adds the blank node identifier if the mapping has not been initialized. + * + * @param blankNode The blank node to be added. + * @throws IllegalArgumentException if the node is not a blank node. + */ + public void associateBlankNodeWithIdentifier(Node blankNode) { + if (!blankNode.isBlank()) { + throw new IllegalArgumentException("Node is not blank"); + } + + if (this.blankNodesToIdentifiersInitialized) { + return; + } + + String identifier = blankNode.getLabel(); + this.blankNodesToIdentifiers.put(blankNode, identifier); + } + + /** + * Retrieves the identifier associated with a given blank node. + * + * @param blankNode The blank node. + * @return The identifier associated with the blank node. + */ + public String getIdentifierForBlankNode(Node blankNode) { + return blankNodesToIdentifiers.get(blankNode); + } + + /** + * Retrieves the mapping of blank nodes to identifiers. + * + * @return The mapping of blank nodes to identifiers. + */ + public Collection getBlankNodeIdentifiers() { + return Collections.unmodifiableCollection(blankNodesToIdentifiers.values()); + } + + ////////////////////////////////////// + // Issued Identifier Map Management // + ////////////////////////////////////// + + /** + * Sets the issued identifier map. + * + * @param issuedIdentifierMap The issued identifier map. + */ + public void setIssuedIdentifierMap(Map issuedIdentifierMap) { + this.issuedIdentifierMap = issuedIdentifierMap; + } + + /** + * Retrieves the issued identifier map. + * + * @return The issued identifier map. + */ + public String getIssuedIdentifier(String blankNodeId) { + return issuedIdentifierMap.get(blankNodeId); + } + + /////////////// + // To String // + /////////////// + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + + sb.append("Dataset: \n"); + sb.append(dataset.size()); + sb.append(" triples\n"); + + sb.append("\n"); + + sb.append("Blank Nodes to Identifiers Mapping: \n"); + blankNodesToIdentifiers.forEach((blankNode, identifier) -> { + sb.append(blankNode); + sb.append(" -> "); + sb.append(identifier); + sb.append("\n"); + }); + + sb.append("\n"); + + sb.append("Issued Identifier Map: \n"); + issuedIdentifierMap.forEach((blankNodeId, issuedIdentifier) -> { + sb.append(blankNodeId); + sb.append(" -> "); + sb.append(issuedIdentifier); + sb.append("\n"); + }); + + return sb.toString(); + } +} diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/HashingUtility.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/HashingUtility.java new file mode 100644 index 000000000..074403d11 --- /dev/null +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/HashingUtility.java @@ -0,0 +1,45 @@ +package fr.inria.corese.core.print.rdfc10; + +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; + +/** + * Utility class for hashing functionalities. + */ +public class HashingUtility { + + /** + * Generates a SHA-256 hash of the input string. + * + * @param input the input string to hash + * @return the hashed string in hexadecimal format + */ + public static String sha256(String input) { + try { + MessageDigest digest = MessageDigest.getInstance("SHA-256"); + byte[] encodedhash = digest.digest(input.getBytes(StandardCharsets.UTF_8)); + return toHexString(encodedhash); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException("SHA-256 algorithm not found", e); + } + } + + /** + * Converts a byte array to a hexadecimal string. + * + * @param hash the byte array to convert + * @return the hexadecimal string + */ + private static String toHexString(byte[] hash) { + StringBuilder hexString = new StringBuilder(2 * hash.length); + for (byte b : hash) { + String hex = Integer.toHexString(0xff & b); + if (hex.length() == 1) { + hexString.append('0'); + } + hexString.append(hex); + } + return hexString.toString(); + } +} diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/ListMap.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/ListMap.java new file mode 100644 index 000000000..aaba2b9d3 --- /dev/null +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/ListMap.java @@ -0,0 +1,106 @@ +package fr.inria.corese.core.print.rdfc10; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; + +/** + * A map structure where each key is associated with a list of values. + * This class facilitates the storage of multiple values per key. + * + * @param the type of keys maintained by this map + * @param the type of mapped values + */ +public class ListMap implements Map> { + private final Map> map = new TreeMap<>(); + + @Override + public void clear() { + map.clear(); + } + + @Override + public boolean containsKey(Object key) { + return map.containsKey(key); + } + + @Override + public boolean containsValue(Object value) { + return map.containsValue(value); + } + + @Override + public Set>> entrySet() { + return map.entrySet(); + } + + @Override + public List get(Object key) { + return map.get(key); + } + + @Override + public boolean isEmpty() { + return map.isEmpty(); + } + + /** + * Returns a set view of the keys contained in this map. + * The set is ordered according to the natural ordering of its elements, + * which is determined by the internal TreeMap used in the implementation. + * + * @return a set view of the keys contained in this map, ordered according to + * the natural ordering of its elements + */ + @Override + public Set keySet() { + return map.keySet(); + } + + @Override + public List put(K key, List value) { + return map.put(key, value); + } + + @Override + public void putAll(Map> m) { + map.putAll(m); + } + + @Override + public List remove(Object key) { + return map.remove(key); + } + + @Override + public int size() { + return map.size(); + } + + @Override + public Collection> values() { + return map.values(); + } + + /** + * Adds a value to the list associated with a specific key. + * + * @param key the key with which the specified value is to be associated + * @param value the value to be associated with the specified key + */ + public void put(K key, V value) { + map.computeIfAbsent(key, k -> new ArrayList<>()).add(value); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("{\n"); + map.forEach((key, value) -> sb.append("\t").append(key).append(" -> ").append(value).append("\n")); + sb.append("}"); + + return sb.toString(); + } +} diff --git a/corese-core/src/main/java/module-info.java b/corese-core/src/main/java/module-info.java index f2b33b0f9..0da70623e 100644 --- a/corese-core/src/main/java/module-info.java +++ b/corese-core/src/main/java/module-info.java @@ -15,6 +15,7 @@ requires semargl.rdfa; requires jdk.management; requires org.json; + requires org.apache.commons.lang3; exports fr.inria.corese.core.load; exports fr.inria.corese.core.load.result; @@ -26,6 +27,7 @@ exports fr.inria.corese.core.util; exports fr.inria.corese.core.index; exports fr.inria.corese.core.print; + exports fr.inria.corese.core.print.rdfc10; exports fr.inria.corese.core.api; exports fr.inria.corese.core.edge; exports fr.inria.corese.core.logic; From 6f074696e59ca18e2e7695c31c56e127870b001c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Tue, 13 Feb 2024 14:16:27 +0100 Subject: [PATCH 013/146] Fix method name in CanonicalizationState and ListMap classes --- .../inria/corese/core/print/rdfc10/CanonicalizationState.java | 4 ++-- .../main/java/fr/inria/corese/core/print/rdfc10/ListMap.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java index 1f052f9f3..afd04b097 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java @@ -40,7 +40,7 @@ public CanonicalizationState() { * @param quad The quad to be associated with the blank node. */ public void associateBlankNodeWithQuad(String blankNodeId, Edge quad) { - this.blankNodesToQuad.put(blankNodeId, quad); + this.blankNodesToQuad.add(blankNodeId, quad); } /** @@ -64,7 +64,7 @@ public List getQuadsForBlankNode(String blankNodeId) { * @param blankNodeId The identifier of the blank node. */ public void associateHashWithBlankNode(String hash, String blankNodeId) { - this.hashToBlankNode.put(hash, blankNodeId); + this.hashToBlankNode.add(hash, blankNodeId); } /** diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/ListMap.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/ListMap.java index aaba2b9d3..57c75391c 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/ListMap.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/ListMap.java @@ -91,7 +91,7 @@ public Collection> values() { * @param key the key with which the specified value is to be associated * @param value the value to be associated with the specified key */ - public void put(K key, V value) { + public void add(K key, V value) { map.computeIfAbsent(key, k -> new ArrayList<>()).add(value); } From 7366eb28c669f308d6512670e5bc62f46839a962 Mon Sep 17 00:00:00 2001 From: Pierre Maillot Date: Tue, 13 Feb 2024 16:58:44 +0100 Subject: [PATCH 014/146] Removing println message --- .../main/java/fr/inria/corese/core/api/DataBrokerConstruct.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/api/DataBrokerConstruct.java b/corese-core/src/main/java/fr/inria/corese/core/api/DataBrokerConstruct.java index 06d9f3939..d6b354f32 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/api/DataBrokerConstruct.java +++ b/corese-core/src/main/java/fr/inria/corese/core/api/DataBrokerConstruct.java @@ -23,7 +23,7 @@ public interface DataBrokerConstruct extends DataBroker { default void startRuleEngine() { - System.out.println("DataBrokerConstruct startRuleEngine"); + // System.out.println("DataBrokerConstruct startRuleEngine"); } default void endRuleEngine() { From cea87b242430bf249095d22ca267d8ff02c5bd95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Tue, 13 Feb 2024 17:40:38 +0100 Subject: [PATCH 015/146] Update Corese-server documentation --- .../Getting Started With Corese-server.md | 37 +++++++++++-------- 1 file changed, 22 insertions(+), 15 deletions(-) diff --git a/docs/getting started/Getting Started With Corese-server.md b/docs/getting started/Getting Started With Corese-server.md index 6cfe6c91a..a98167c50 100644 --- a/docs/getting started/Getting Started With Corese-server.md +++ b/docs/getting started/Getting Started With Corese-server.md @@ -18,11 +18,11 @@ This tutorial shows how to use the basic features of the Corese-server framework 4. [4.5. SPARQL engine behavior](#45-sparql-engine-behavior) 5. [4.6. SPARQL federation behavior](#46-sparql-federation-behavior) 6. [4.7. SPARQL LOAD parameters](#47-sparql-load-parameters) - 5. [6. To go deeper](#6-to-go-deeper) + 5. [5. To go deeper](#5-to-go-deeper) ## 1. Installation -Installations instructions are available on the [Corese-Command GitHub repository](https://github.com/Wimmics/corese). +Installation instructions are available on the [Corese-Command GitHub repository](https://github.com/Wimmics/corese). ## 2. Load data @@ -71,7 +71,7 @@ st:user a st:Server; ``` The keyword `st:user` designates the default endpoint available in . -In this example, we add on the default endpoint the workflow named `<#loadBeatles>` which loads the file "beatles.tll". +In this example, we add on the default endpoint the workflow named `<#loadBeatles>` which loads the file "beatles.ttl". There can be several load in a workflow body. To load Corese-server with a profile, use the options `-lp -pp "profileFile"`. @@ -167,7 +167,7 @@ Here we list only some of the most commonly used properties. ### 4.1. Blank node format ```properties -BLANK_NODE = _:b +BLANK_NODE = _:b ``` `BLANK_NODE` specifies the format of blank nodes. The default value is `_:b`. @@ -175,7 +175,7 @@ BLANK_NODE = _:b ### 4.2. Loading in the default graph ```properties -LOAD_IN_DEFAULT_GRAPH = true +LOAD_IN_DEFAULT_GRAPH = true ``` By default, the data is loaded into the default graph. If `LOAD_IN_DEFAULT_GRAPH` is set to `false`, the data is loaded into a named graph whose name is the path of the file. @@ -184,10 +184,10 @@ Note that internally, the default graph of the Corese server is named `http://ns #### 4.3. RDF* (RDF Star) ```properties -RDF_STAR = false +RDF_STAR = false ``` -Corese implements a prototype extension for the RDF* specification. `RDF_STAR` enables this extension. +Corese implements a prototype extension for the RDF* specification. RDF_STAR enables this extension. ### 4.4. OWL utilities @@ -200,21 +200,21 @@ By default, when a triple with the predicate `owl:imports` is loaded, the Corese ### 4.5. SPARQL engine behavior ```properties -SPARQL_COMPLIANT = false +SPARQL_COMPLIANT = false ``` `SPARQL_COMPLIANT` specifies the behavior of the SPARQL engine. If `SPARQL_COMPLIANT` is set to `true`, the SPARQL engine is compliant with the W3C test cases. In practice, this means that the SPARQL engine will consider that two literals are different if they have the same value but different types (E.g: `1` and `"1"^^xsd:integer`). ```properties -REENTRANT_QUERY = false +REENTRANT_QUERY = false ``` -`REENRANT_QUERY` enables the update during a query. This option was implemented in cooperation with the [SPARQL micro-service project](https://github.com/frmichel/sparql-micro-service). +`REENRANT_QUERY` enables the update during a query. This option was implemented in cooperation with the [SPARQL micro-service project](https://github.com/frmichel/sparql-micro-service). It is equivalent to using `-re` argument. ### 4.6. SPARQL federation behavior ```properties -SERVICE_BINDING = values +SERVICE_BINDING = values ``` When binding values between clauses from different endpoints, the Corese-server uses the `SERVICE_BINDING` property to specify the method to use. The default value is `values`. The other possible value is `filter`. @@ -251,14 +251,18 @@ SELECT * { } ``` +This is equivalent to add `@binding values` in the query. If `SERVICE_BINDING` is defined in the properties file and `@binding` is also defined in the query, then the value of `@binding` in the query is used. + ```properties -SERVICE_SLICE = 20 +SERVICE_SLICE = 20 ``` `SERVICE_SLICE` specifies the number of bindings to send to a remote endpoint. The default value is `20`. +This is equivalent to add `@slice 20` in the query. If `SERVICE_SLICE` is defined in the properties file and `@slice` is also defined in the query, then the value of `@slice` in the query is used. + ```properties -SERVICE_LIMIT = 1000 +SERVICE_LIMIT = 1000 ``` `SERVICE_LIMIT` specifies the maximum number of results to return from a remote endpoint. The default value is `1000`. In the previous example, the query sent to the remote endpoint should actually be: @@ -273,6 +277,8 @@ SELECT * { } ``` +This is equivalent to add `@limit 1000` in the query. If `SERVICE_LIMIT` is defined in the properties file and `@limit` is also defined in the query, then the value of `@limit` in the query is used. + Corese will try to obtain the next 1000 results by sending the same query with the `OFFSET` clause. ```properties @@ -281,11 +287,12 @@ SERVICE_TIMEOUT = 2000 `SERVICE_TIMEOUT` specifies the timeout in milliseconds for a remote endpoint. The default value is `10000`. +This is equivalent to add `@timeout 2000` in the query. If `SERVICE_TIMEOUT` is defined in the properties file and `@timeout` is also defined in the query, then the value of `@timeout` in the query is used. + ### 4.7. SPARQL LOAD parameters ```properties LOAD_LIMIT = 10 - ``` `LOAD_LIMIT` specifies the maximum number of triples to load from a file. This feature is not enabled by default. @@ -306,7 +313,7 @@ LOAD_FORMAT = application/rdf+xml If `LOAD_WITH_PARAMETER` is enabled, `LOAD_FORMAT` can be used to specify which mime type should be resquest as format for the loaded data. -## 6. To go deeper +## 5. To go deeper - [Technical documentation](https://files.inria.fr/corese/doc/server.html) - [Storage](https://github.com/Wimmics/corese/blob/master/docs/storage/Configuring%20and%20Connecting%20to%20Different%20Storage%20Systems%20in%20Corese.md#configuring-and-connecting-to-different-storage-systems-in-corese) From e174416067f490204654949a4aabef747b8f4bfc Mon Sep 17 00:00:00 2001 From: corby Date: Wed, 14 Feb 2024 09:43:44 +0100 Subject: [PATCH 016/146] Leverage xt:read() access level --- .../fr/inria/corese/sparql/triple/parser/Access.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Access.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Access.java index 1b3ba2601..78b3fd107 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Access.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Access.java @@ -451,7 +451,7 @@ void initServer() { deny(READ_WRITE); deny(WRITE); deny(SUPER_WRITE); - deny(READ_FILE); + //deny(READ_FILE); deny(LOAD_FILE); deny(JAVA_FUNCTION); // user query on protected server have USER access level @@ -464,7 +464,7 @@ void initServer() { // draft test for st:logger set(LDSCRIPT_SPARQL, RESTRICTED); set(DEFINE_FUNCTION, RESTRICTED); - set(READ, RESTRICTED); + //set(READ, RESTRICTED); } /** @@ -475,12 +475,14 @@ void initServer() { */ void init() { deny(LINKED_FUNCTION); - // xt:read st:format cannot read the file system + // xt:read cannot read the file system // use case: server mode - deny(READ_FILE); + //deny(READ_FILE); set(LDSCRIPT, PUBLIC); // authorize server for query + transform when transform is authorized set(LINKED_TRANSFORMATION, PUBLIC); + // read authorized source is allowed + set(READ, PUBLIC); } /** From 248b31fe5b0f00096f95ef6387a7a607fddabb1d Mon Sep 17 00:00:00 2001 From: corby Date: Wed, 14 Feb 2024 09:43:54 +0100 Subject: [PATCH 017/146] Leverage xt:read() access level --- .../triple/function/proxy/GraphSpecificFunction.java | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/function/proxy/GraphSpecificFunction.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/function/proxy/GraphSpecificFunction.java index 28efdb575..12cbadc5a 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/function/proxy/GraphSpecificFunction.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/function/proxy/GraphSpecificFunction.java @@ -234,10 +234,18 @@ public IDatatype io(Computer eval, Binding b, Environment env, Producer p, IData } case READ: - check(Feature.READ, b, path, READ_MESS); if (isFile(path)) { - check(Feature.READ_FILE, b, path, READ_MESS); + // do not accept file path when accept list is empty + Access.check(Feature.READ, b.getAccessLevel(), path, READ_MESS, false); + } else { + // may accept url path when accept list is empty + check(Feature.READ, b, path, READ_MESS); } + +// check(Feature.READ, b, path, READ_MESS); +// if (isFile(path)) { +// check(Feature.READ_FILE, b, path, READ_MESS); +// } IDatatype res = proc.read(dt); return res; From 07376b6ce6393a1bc649d749b66b4fb57f3f7b4e Mon Sep 17 00:00:00 2001 From: corby Date: Wed, 14 Feb 2024 10:45:52 +0100 Subject: [PATCH 018/146] DefaultResultWhenEmptyAccept Overload default behaviour when namespace accept list is empty --- .../corese/sparql/triple/parser/Access.java | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Access.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Access.java index 78b3fd107..9e965485e 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Access.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Access.java @@ -28,7 +28,8 @@ public class Access { // throw exception for coalesce // false -> sparql semantics, coalesce trap error public static boolean COALESCE_EXCEPTION = false; - + // by default, access is denied when accept list is empty + private static boolean DefaultResultWhenEmptyAccept = false; // true -> skip access control public static boolean SKIP = false; @@ -62,6 +63,7 @@ public enum Level { public static Level USER = PUBLIC; // deny access to feature public static Level DENY = DENIED; + private Level(int n) { value = n; @@ -313,8 +315,12 @@ public static List selectNamespace(Feature feature, Level level, List Date: Wed, 14 Feb 2024 10:46:24 +0100 Subject: [PATCH 019/146] Overload default behaviour when namespace accept list is empty --- .../inria/corese/core/storage/DataManagerJava.java | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/storage/DataManagerJava.java b/corese-core/src/main/java/fr/inria/corese/core/storage/DataManagerJava.java index b7d6d2410..39e87b69c 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/storage/DataManagerJava.java +++ b/corese-core/src/main/java/fr/inria/corese/core/storage/DataManagerJava.java @@ -149,9 +149,10 @@ void initgraph() { Load ld = Load.create(getGraph()); ld.setDataManager(this); // temporary authorize xt:read file to read e.g. json document - Level read = Access.setValue(Feature.READ, Level.DEFAULT); - Level readFile = Access.setValue(Feature.READ_FILE, Level.DEFAULT); - + //Level read = Access.setValue(Feature.READ, Level.DEFAULT); + // Level readFile = Access.setValue(Feature.READ_FILE, Level.DEFAULT); + // authorize xt:read() because accept list is empty during this initialization + Access.setDefaultResultWhenEmptyAccept(true); try { if (getLoad()!=null) { for (String name : getLoad()) { @@ -186,8 +187,9 @@ void initgraph() { logger.error(ex.getMessage()); } finally { - Access.set(Feature.READ, read); - Access.set(Feature.READ_FILE, readFile); + //Access.set(Feature.READ, read); + //Access.set(Feature.READ_FILE, readFile); + Access.setDefaultResultWhenEmptyAccept(false); } } From 9a14706b579fc95e979bac21a40b1389f94cc34f Mon Sep 17 00:00:00 2001 From: corby Date: Thu, 15 Feb 2024 14:31:28 +0100 Subject: [PATCH 020/146] Leverage bnode as graph identifier --- .../sparql/triple/javacc1/SparqlCorese.java | 231 +++++++++--------- .../sparql/triple/javacc1/sparql_corese.jj | 5 +- 2 files changed, 122 insertions(+), 114 deletions(-) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCorese.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCorese.java index 6ff244ef7..7ee2485d0 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCorese.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCorese.java @@ -510,9 +510,11 @@ final public Exp LoadPattern() throws ParseException { case Q_IRIref: case QNAME_NS: case QNAME: + case BLANK_NODE_LABEL: case VAR1: case VAR2: case GRAPH: + case ANON: case VAR3: switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case GRAPH: @@ -3895,7 +3897,8 @@ final public void GraphGraphPattern(Exp stack) throws ParseException { final public void BasicGraphGraphPattern(Exp stack) throws ParseException { Atom src; Exp e; Source st; boolean state = false, leaf = false, rec = false; - src = VarOrIRIref(stack); + //src = VarOrIRIref(stack) + src = VarOrBlankNodeOrIRIref(stack); handler.graphPattern(src); e = GroupGraphPattern(); st = Source.create(src, e); @@ -8536,7 +8539,7 @@ final public Constant NumericLiteral() throws ParseException { break; case DECIMAL: t = jj_consume_token(DECIMAL); - datatype = RDFS.qxsdDecimal; + datatype = RDFS.qxsdDecimal; break; case DOUBLE: t = jj_consume_token(DOUBLE); @@ -8698,11 +8701,13 @@ final public Variable BlankNode(Exp stack) throws ParseException { t = jj_consume_token(BLANK_NODE_LABEL); // check if the blank node is already in the table v = astq.newBlankNode(stack, t.image); + handler.declareBlankNode(t); break; case ANON: t = jj_consume_token(ANON); // create a new blank node v = astq.newBlankNode(); + handler.declareBlankNode(t); break; default: jj_la1[272] = jj_gen; @@ -8856,11 +8861,6 @@ final private boolean jj_2_19(int xla) { finally { jj_save(18, xla); } } - final private boolean jj_3R_108() { - if (jj_3R_142()) return true; - return false; - } - final private boolean jj_3R_107() { if (jj_3R_141()) return true; return false; @@ -8925,18 +8925,13 @@ final private boolean jj_3R_121() { return false; } - final private boolean jj_3R_182() { - if (jj_scan_token(SELECT)) return true; - return false; - } - final private boolean jj_3R_185() { if (jj_scan_token(NOT)) return true; return false; } - final private boolean jj_3_8() { - if (jj_3R_73()) return true; + final private boolean jj_3R_182() { + if (jj_scan_token(SELECT)) return true; return false; } @@ -8945,6 +8940,11 @@ final private boolean jj_3R_86() { return false; } + final private boolean jj_3_8() { + if (jj_3R_73()) return true; + return false; + } + final private boolean jj_3R_88() { if (jj_3R_122()) return true; return false; @@ -9512,18 +9512,13 @@ final private boolean jj_3R_264() { return false; } - final private boolean jj_3R_177() { - if (jj_scan_token(GROUP)) return true; - return false; - } - final private boolean jj_3R_256() { if (jj_scan_token(MINUS)) return true; return false; } - final private boolean jj_3R_154() { - if (jj_3R_178()) return true; + final private boolean jj_3R_177() { + if (jj_scan_token(GROUP)) return true; return false; } @@ -9532,8 +9527,8 @@ final private boolean jj_3R_255() { return false; } - final private boolean jj_3R_153() { - if (jj_3R_177()) return true; + final private boolean jj_3R_154() { + if (jj_3R_178()) return true; return false; } @@ -9542,8 +9537,8 @@ final private boolean jj_3R_254() { return false; } - final private boolean jj_3R_152() { - if (jj_3R_176()) return true; + final private boolean jj_3R_153() { + if (jj_3R_177()) return true; return false; } @@ -9552,6 +9547,11 @@ final private boolean jj_3R_245() { return false; } + final private boolean jj_3R_152() { + if (jj_3R_176()) return true; + return false; + } + final private boolean jj_3R_259() { Token xsp; xsp = jj_scanpos; @@ -9655,13 +9655,13 @@ final private boolean jj_3R_258() { return false; } - final private boolean jj_3R_249() { - if (jj_scan_token(VALUES)) return true; + final private boolean jj_3R_220() { + if (jj_3R_236()) return true; return false; } - final private boolean jj_3R_220() { - if (jj_3R_236()) return true; + final private boolean jj_3R_249() { + if (jj_scan_token(VALUES)) return true; return false; } @@ -9675,13 +9675,13 @@ final private boolean jj_3R_251() { return false; } - final private boolean jj_3R_246() { - if (jj_scan_token(SCOPE)) return true; + final private boolean jj_3_18() { + if (jj_3R_83()) return true; return false; } - final private boolean jj_3_18() { - if (jj_3R_83()) return true; + final private boolean jj_3R_246() { + if (jj_scan_token(SCOPE)) return true; return false; } @@ -9731,14 +9731,6 @@ final private boolean jj_3R_94() { return false; } - final private boolean jj_3R_66() { - Token xsp; - xsp = jj_scanpos; - if (jj_3R_94()) jj_scanpos = xsp; - if (jj_3R_95()) return true; - return false; - } - final private boolean jj_3R_227() { Token xsp; xsp = jj_scanpos; @@ -9749,8 +9741,11 @@ final private boolean jj_3R_227() { return false; } - final private boolean jj_3R_243() { - if (jj_scan_token(MINUSP)) return true; + final private boolean jj_3R_66() { + Token xsp; + xsp = jj_scanpos; + if (jj_3R_94()) jj_scanpos = xsp; + if (jj_3R_95()) return true; return false; } @@ -9759,6 +9754,11 @@ final private boolean jj_3R_184() { return false; } + final private boolean jj_3R_243() { + if (jj_scan_token(MINUSP)) return true; + return false; + } + final private boolean jj_3R_93() { if (jj_scan_token(NAMED)) return true; return false; @@ -10169,18 +10169,13 @@ final private boolean jj_3R_223() { return false; } - final private boolean jj_3R_235() { - if (jj_3R_249()) return true; - return false; - } - final private boolean jj_3R_113() { if (jj_3R_67()) return true; return false; } - final private boolean jj_3R_234() { - if (jj_3R_248()) return true; + final private boolean jj_3R_235() { + if (jj_3R_249()) return true; return false; } @@ -10202,8 +10197,8 @@ final private boolean jj_3R_208() { return false; } - final private boolean jj_3R_233() { - if (jj_3R_247()) return true; + final private boolean jj_3R_234() { + if (jj_3R_248()) return true; return false; } @@ -10217,6 +10212,11 @@ final private boolean jj_3R_76() { return false; } + final private boolean jj_3R_233() { + if (jj_3R_247()) return true; + return false; + } + final private boolean jj_3R_232() { if (jj_3R_246()) return true; return false; @@ -10252,6 +10252,16 @@ final private boolean jj_3R_229() { return false; } + final private boolean jj_3R_212() { + Token xsp; + xsp = jj_scanpos; + if (jj_3_17()) { + jj_scanpos = xsp; + if (jj_3R_226()) return true; + } + return false; + } + final private boolean jj_3R_228() { if (jj_3R_242()) return true; return false; @@ -10288,16 +10298,6 @@ final private boolean jj_3R_219() { return false; } - final private boolean jj_3R_212() { - Token xsp; - xsp = jj_scanpos; - if (jj_3_17()) { - jj_scanpos = xsp; - if (jj_3R_226()) return true; - } - return false; - } - final private boolean jj_3R_148() { if (jj_3R_173()) return true; return false; @@ -10308,13 +10308,13 @@ final private boolean jj_3R_276() { return false; } - final private boolean jj_3R_124() { - if (jj_scan_token(WHERE)) return true; + final private boolean jj_3R_275() { + if (jj_3R_278()) return true; return false; } - final private boolean jj_3R_275() { - if (jj_3R_278()) return true; + final private boolean jj_3R_124() { + if (jj_scan_token(WHERE)) return true; return false; } @@ -10368,23 +10368,18 @@ final private boolean jj_3R_269() { return false; } - final private boolean jj_3R_123() { - if (jj_scan_token(DATA)) return true; - return false; - } - final private boolean jj_3R_268() { if (jj_3R_100()) return true; return false; } - final private boolean jj_3R_267() { - if (jj_3R_139()) return true; + final private boolean jj_3R_123() { + if (jj_scan_token(DATA)) return true; return false; } - final private boolean jj_3R_64() { - if (jj_3R_91()) return true; + final private boolean jj_3R_267() { + if (jj_3R_139()) return true; return false; } @@ -10394,6 +10389,11 @@ final private boolean jj_3R_78() { return false; } + final private boolean jj_3R_64() { + if (jj_3R_91()) return true; + return false; + } + final private boolean jj_3R_63() { if (jj_3R_67()) return true; return false; @@ -10486,11 +10486,6 @@ final private boolean jj_3_13() { return false; } - final private boolean jj_3_5() { - if (jj_3R_66()) return true; - return false; - } - final private boolean jj_3R_169() { if (jj_scan_token(LOOP)) return true; return false; @@ -10501,6 +10496,11 @@ final private boolean jj_3R_151() { return false; } + final private boolean jj_3_5() { + if (jj_3R_66()) return true; + return false; + } + final private boolean jj_3R_204() { if (jj_3R_219()) return true; return false; @@ -10511,14 +10511,14 @@ final private boolean jj_3_4() { return false; } - final private boolean jj_3R_89() { - if (jj_scan_token(INSERT)) return true; - if (jj_scan_token(DATA)) return true; + final private boolean jj_3R_77() { + if (jj_3R_78()) return true; return false; } - final private boolean jj_3R_77() { - if (jj_3R_78()) return true; + final private boolean jj_3R_89() { + if (jj_scan_token(INSERT)) return true; + if (jj_scan_token(DATA)) return true; return false; } @@ -10537,13 +10537,13 @@ final private boolean jj_3_3() { return false; } - final private boolean jj_3R_202() { - if (jj_3R_61()) return true; + final private boolean jj_3R_175() { + if (jj_3R_99()) return true; return false; } - final private boolean jj_3R_175() { - if (jj_3R_99()) return true; + final private boolean jj_3R_202() { + if (jj_3R_61()) return true; return false; } @@ -10633,13 +10633,18 @@ final private boolean jj_3R_118() { return false; } + final private boolean jj_3R_116() { + if (jj_scan_token(DISTINCT)) return true; + return false; + } + final private boolean jj_3_2() { if (jj_3R_62()) return true; return false; } - final private boolean jj_3R_116() { - if (jj_scan_token(DISTINCT)) return true; + final private boolean jj_3R_158() { + if (jj_scan_token(TUPLE)) return true; return false; } @@ -10655,11 +10660,6 @@ final private boolean jj_3R_95() { return false; } - final private boolean jj_3R_158() { - if (jj_scan_token(TUPLE)) return true; - return false; - } - final private boolean jj_3R_122() { Token xsp; xsp = jj_scanpos; @@ -10682,13 +10682,13 @@ final private boolean jj_3R_140() { return false; } - final private boolean jj_3R_165() { - if (jj_scan_token(DESC)) return true; + final private boolean jj_3R_201() { + if (jj_3R_157()) return true; return false; } - final private boolean jj_3R_201() { - if (jj_3R_157()) return true; + final private boolean jj_3R_165() { + if (jj_scan_token(DESC)) return true; return false; } @@ -10710,22 +10710,22 @@ final private boolean jj_3R_83() { return false; } - final private boolean jj_3R_138() { + final private boolean jj_3R_181() { Token xsp; xsp = jj_scanpos; - if (jj_scan_token(38)) { + if (jj_3R_200()) { jj_scanpos = xsp; - if (jj_3R_165()) return true; + if (jj_3R_201()) return true; } return false; } - final private boolean jj_3R_181() { + final private boolean jj_3R_138() { Token xsp; xsp = jj_scanpos; - if (jj_3R_200()) { + if (jj_scan_token(38)) { jj_scanpos = xsp; - if (jj_3R_201()) return true; + if (jj_3R_165()) return true; } return false; } @@ -10746,17 +10746,17 @@ final private boolean jj_3R_180() { return false; } - final private boolean jj_3_9() { - if (jj_3R_74()) return true; - return false; - } - final private boolean jj_3R_146() { if (jj_scan_token(QUERY)) return true; if (jj_scan_token(LPAREN)) return true; return false; } + final private boolean jj_3_9() { + if (jj_3R_74()) return true; + return false; + } + final private boolean jj_3R_104() { if (jj_3R_101()) return true; return false; @@ -10832,6 +10832,11 @@ final private boolean jj_3R_109() { return false; } + final private boolean jj_3R_108() { + if (jj_3R_142()) return true; + return false; + } + public SparqlCoreseTokenManager token_source; JavaCharStream jj_input_stream; public Token token, jj_nt; @@ -10863,7 +10868,7 @@ final private boolean jj_3R_109() { jj_la1_8(); } private static void jj_la1_0() { - jj_la1_0 = new int[] {0x3f000000,0x0,0x3f000000,0x3f060000,0x60000,0x0,0x404000,0x808000,0xf00,0xf00,0xf00,0xc3ff00,0x0,0x0,0xc0c000,0x3700,0xc0c000,0xc0c000,0x404000,0x0,0x808000,0x0,0x0,0xc0c000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x700,0x0,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x700,0x0,0x33f00,0x0,0x33f00,0x0,0x700,0x0,0x0,0x0,0x3f00,0x0,0x3f00,0x0,0x40000000,0x3f00,0x0,0x3f00,0x0,0x3f00,0x0,0x0,0x0,0x3f00,0x0,0x8000000,0x3000,0x1000000,0x0,0x0,0x0,0x3700,0x3700,0x0,0x0,0x0,0x40000000,0x0,0x40000000,0x3000,0x0,0x3000,0x0,0x3000,0x0,0x3000,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x3000,0x0,0x700,0x0,0x700,0x0,0x3000,0x700,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3700,0x0,0x0,0x3700,0x0,0x1000000,0x33f00,0x33f00,0x0,0x33f00,0x0,0x0,0x0,0x33f00,0x0,0x0,0x0,0x3000,0x0,0x3700,0x0,0x700,0x33f00,0x0,0x33f00,0x0,0x33f00,0x30000,0x3f00,0x0,0x33f00,0x0,0x0,0x0,0x0,0x33f00,0x203700,0x3700,0x0,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x700,0x0,0x700,0x0,0x0,0x0,0x30000,0x20000,0x30000,0x30000,0x33f00,0x33f00,0x30000,0x3f00,0x3f00,0x3700,0x3000,0x0,0x700,0xf00,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3f00,0x0,0x3f00,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xd000000,0x0,0x0,0x0,0x0,0x0,0x3000,0x0,0x3000,0xd063f00,0x1063000,0xd000000,0x700,0x700,0x700,0x0,0x0,0x0,0x700,0x3700,0xd063f00,0x1063700,0x700,0x0,0x700,0x0,0x700,0x3700,0x0,0x0,0x0,0x3000,0x0,0x3000,0x0,0x0,0x0,0x3000,0x3000,0x0,0x3000,0x60000,0x60000,0x700,0x700,0x0,0x3f00,0x3f00,0x0,0x3f00,0x0,0x60000,0x40000000,0x0,0x3f00,0x3f00,0x0,0x3f00,0x700,0x0,0x40000,0x40000,0x0,0x0,0x700,0x700,0x600,0x800,}; + jj_la1_0 = new int[] {0x3f000000,0x0,0x3f000000,0x3f060000,0x60000,0x0,0x404000,0x808000,0xf00,0xf00,0xf00,0xc3ff00,0x0,0x0,0xc0c000,0x3f00,0xc0c000,0xc0c000,0x404000,0x0,0x808000,0x0,0x0,0xc0c000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x700,0x0,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x700,0x0,0x33f00,0x0,0x33f00,0x0,0x700,0x0,0x0,0x0,0x3f00,0x0,0x3f00,0x0,0x40000000,0x3f00,0x0,0x3f00,0x0,0x3f00,0x0,0x0,0x0,0x3f00,0x0,0x8000000,0x3000,0x1000000,0x0,0x0,0x0,0x3700,0x3700,0x0,0x0,0x0,0x40000000,0x0,0x40000000,0x3000,0x0,0x3000,0x0,0x3000,0x0,0x3000,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x3000,0x0,0x700,0x0,0x700,0x0,0x3000,0x700,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3700,0x0,0x0,0x3700,0x0,0x1000000,0x33f00,0x33f00,0x0,0x33f00,0x0,0x0,0x0,0x33f00,0x0,0x0,0x0,0x3000,0x0,0x3700,0x0,0x700,0x33f00,0x0,0x33f00,0x0,0x33f00,0x30000,0x3f00,0x0,0x33f00,0x0,0x0,0x0,0x0,0x33f00,0x203700,0x3700,0x0,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x700,0x0,0x700,0x0,0x0,0x0,0x30000,0x20000,0x30000,0x30000,0x33f00,0x33f00,0x30000,0x3f00,0x3f00,0x3700,0x3000,0x0,0x700,0xf00,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3f00,0x0,0x3f00,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xd000000,0x0,0x0,0x0,0x0,0x0,0x3000,0x0,0x3000,0xd063f00,0x1063000,0xd000000,0x700,0x700,0x700,0x0,0x0,0x0,0x700,0x3700,0xd063f00,0x1063700,0x700,0x0,0x700,0x0,0x700,0x3700,0x0,0x0,0x0,0x3000,0x0,0x3000,0x0,0x0,0x0,0x3000,0x3000,0x0,0x3000,0x60000,0x60000,0x700,0x700,0x0,0x3f00,0x3f00,0x0,0x3f00,0x0,0x60000,0x40000000,0x0,0x3f00,0x3f00,0x0,0x3f00,0x700,0x0,0x40000,0x40000,0x0,0x0,0x700,0x700,0x600,0x800,}; } private static void jj_la1_1() { jj_la1_1 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800,0x0,0x800,0x0,0x800,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x900,0x0,0x0,0x0,0x800,0x800,0x800,0x800,0x0,0x0,0x400,0x0,0x0,0x0,0x0,0x0,0x100,0x0,0x0,0x800,0x0,0x200,0x0,0x0,0x0,0x0,0x9ff50000,0x0,0x9ff50000,0x0,0x0,0x9ff50000,0x0,0x9ff50000,0x0,0x9ff50000,0x0,0x0,0x0,0x9ff50000,0x200,0x0,0x0,0x0,0x200,0x200,0x600,0x0,0x0,0x200,0x0,0x0,0x1,0x0,0x1,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x100,0x400,0x20,0x8,0x6,0x0,0x0,0x0,0x0,0x0,0x80310000,0x0,0x80310000,0x0,0x0,0x0,0x4,0x2,0x6,0x8,0x0,0x0,0x0,0x8,0x0,0x9ff50000,0xc0,0xc0,0x80310000,0xc0,0x0,0x8002b800,0x8002b800,0x0,0x0,0x0,0x0,0x0,0x0,0xa000,0x80021800,0x0,0x0,0x0,0x0,0x4000,0x9ff50000,0x800,0x0,0x800,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x10000,0x10000,0x10000,0x0,0x0,0x0,0x0,0x9ff50000,0x0,0x9ff50000,0x200000,0x0,0x0,0x0,0x100000,0x0,0x80310000,0x10000,0x1fc40000,0x0,0x0,0x10000000,0x40000000,0x0,0xc000000,0x0,0x0,0x0,0x9ff51000,0x0,0x1000,0x0,0x0,0x0,0xc00000,0x0,0x3000000,0x0,0x0,0x9ff51000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x9ff50000,0x9ff50000,0x0,0x9ff50000,0x0,0x0,0x0,0x0,0x9ff50000,0x9ff50000,0x0,0x9ff50000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; @@ -10881,7 +10886,7 @@ private static void jj_la1_5() { jj_la1_5 = new int[] {0x0,0x40000000,0x40000000,0x40000000,0x40000000,0x40000000,0x0,0x0,0x6,0x6,0x6,0xc00006,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x10,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800,0xc00006,0x0,0xc00006,0x0,0x0,0x0,0x0,0x0,0xff8043e7,0x0,0xff8043e7,0x3e0,0x0,0xff8043e7,0x0,0xff8043e7,0x380,0xff8043e7,0x0,0x0,0x0,0xff8043e7,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800,0x8,0x0,0x20000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x420,0x0,0x800,0x10,0x0,0x0,0x2006,0x3f804001,0x2006,0x3f804001,0x0,0x2006,0x0,0x0,0x0,0x0,0x0,0x400,0x20,0x400,0x420,0x10000,0xff804001,0x0,0x0,0x3f804001,0x0,0x0,0xc01806,0xc01806,0x0,0xc00006,0x0,0x0,0x0,0xc00006,0x0,0x1800,0x0,0x0,0x0,0x0,0x0,0xff804001,0xc00006,0x0,0xc00006,0x0,0xc00006,0xc00000,0x6,0xc00000,0x6,0x0,0x0,0x0,0x0,0x6,0x20000000,0x20000000,0x200000,0x20000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x6,0x6,0x0,0x6,0x0,0x0,0x0,0x200000,0x6,0x6,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xff804007,0x0,0xff804007,0x0,0xf000000,0x4001,0x30000000,0x800000,0x0,0x3f804001,0x0,0xc0000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xff804007,0x0,0x0,0x6,0x6,0x6,0x0,0x0,0x0,0x0,0x0,0xff804007,0x0,0x0,0xc0000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x6,0x6,0x0,0xff8043e7,0xff8043e7,0x0,0xff8043e7,0x0,0x40000000,0x0,0x0,0xff804007,0xff804007,0x0,0xff804007,0x6,0x0,0x0,0x0,0x6,0x0,0x0,0x0,0x0,0x0,}; } private static void jj_la1_6() { - jj_la1_6 = new int[] {0x0,0x4000002,0x4000002,0x4000002,0x4000002,0x4000002,0x0,0x0,0x413c3800,0x413c3800,0x413c3800,0x513c3800,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x80000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x513c3800,0x0,0x513c3800,0x0,0x0,0x0,0x1000000,0x10000000,0x413c3801,0x80000000,0x413c3801,0x0,0x0,0x413c3801,0x80000000,0x413c3801,0x0,0x413c3801,0x1000000,0x13c0000,0x1000000,0x413c3801,0x0,0x0,0x1000000,0x0,0x0,0x0,0x4000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000000,0x1000000,0x1000000,0x1000000,0x1000000,0x0,0x1000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000000,0x3c3800,0x0,0x3c3800,0x0,0x1000000,0x3c3800,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000001,0x0,0x0,0x0,0x1000000,0x0,0x553c3800,0x553c3800,0x0,0x513c3800,0x0,0x0,0x0,0x513c3800,0x0,0x0,0x0,0x1000000,0x0,0x0,0x0,0x1000001,0x513c3800,0x0,0x513c3800,0x0,0x513c3800,0x11000000,0x413c3800,0x0,0x513c3800,0x0,0x0,0x0,0x0,0x513c3800,0x1000020,0x1000020,0x0,0x1000020,0x0,0x0,0x0,0x800,0x0,0x800,0x4000000,0x4000000,0x1000020,0x0,0x1000000,0x14000800,0x38,0x3c0,0x11000000,0x0,0x0,0x0,0x513c3800,0x513c3800,0x11000000,0x413c3800,0x40000000,0x0,0x0,0x0,0x13c3800,0x413c3800,0x0,0x0,0x0,0x3c,0x3c,0x0,0x0,0x0,0x0,0x413c3801,0x0,0x413c3801,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0x4000000,0x0,0x2000000,0x0,0x0,0x0,0x1000000,0x413c3801,0x1000000,0x0,0x13c3800,0x13c3800,0x3c3800,0x0,0x0,0x0,0x0,0x1000000,0x413c3801,0x1000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000000,0x0,0x0,0x0,0x1000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3c3800,0x3c3800,0x0,0x413c3801,0x413c3801,0x80000000,0x493c3801,0x2,0x0,0x0,0x0,0x413c3801,0x413c3801,0x80000000,0x413c3801,0x3c3800,0x3800,0x0,0x0,0x0,0x3c0000,0x0,0x0,0x0,0x40000000,}; + jj_la1_6 = new int[] {0x0,0x4000002,0x4000002,0x4000002,0x4000002,0x4000002,0x0,0x0,0x413c3800,0x413c3800,0x413c3800,0x513c3800,0x0,0x0,0x0,0x40000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x80000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x513c3800,0x0,0x513c3800,0x0,0x0,0x0,0x1000000,0x10000000,0x413c3801,0x80000000,0x413c3801,0x0,0x0,0x413c3801,0x80000000,0x413c3801,0x0,0x413c3801,0x1000000,0x13c0000,0x1000000,0x413c3801,0x0,0x0,0x1000000,0x0,0x0,0x0,0x4000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000000,0x1000000,0x1000000,0x1000000,0x1000000,0x0,0x1000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000000,0x3c3800,0x0,0x3c3800,0x0,0x1000000,0x3c3800,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000001,0x0,0x0,0x0,0x1000000,0x0,0x553c3800,0x553c3800,0x0,0x513c3800,0x0,0x0,0x0,0x513c3800,0x0,0x0,0x0,0x1000000,0x0,0x0,0x0,0x1000001,0x513c3800,0x0,0x513c3800,0x0,0x513c3800,0x11000000,0x413c3800,0x0,0x513c3800,0x0,0x0,0x0,0x0,0x513c3800,0x1000020,0x1000020,0x0,0x1000020,0x0,0x0,0x0,0x800,0x0,0x800,0x4000000,0x4000000,0x1000020,0x0,0x1000000,0x14000800,0x38,0x3c0,0x11000000,0x0,0x0,0x0,0x513c3800,0x513c3800,0x11000000,0x413c3800,0x40000000,0x0,0x0,0x0,0x13c3800,0x413c3800,0x0,0x0,0x0,0x3c,0x3c,0x0,0x0,0x0,0x0,0x413c3801,0x0,0x413c3801,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0x4000000,0x0,0x2000000,0x0,0x0,0x0,0x1000000,0x413c3801,0x1000000,0x0,0x13c3800,0x13c3800,0x3c3800,0x0,0x0,0x0,0x0,0x1000000,0x413c3801,0x1000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000000,0x0,0x0,0x0,0x1000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3c3800,0x3c3800,0x0,0x413c3801,0x413c3801,0x80000000,0x493c3801,0x2,0x0,0x0,0x0,0x413c3801,0x413c3801,0x80000000,0x413c3801,0x3c3800,0x3800,0x0,0x0,0x0,0x3c0000,0x0,0x0,0x0,0x40000000,}; } private static void jj_la1_7() { jj_la1_7 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x10,0x0,0x10,0xc00010,0x2,0x0,0x0,0x800000,0x0,0x0,0x0,0x2,0x0,0x2,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xc00010,0x0,0xc00010,0x0,0x0,0x1,0x0,0x0,0xc00010,0x0,0xc00010,0x0,0x0,0xc00010,0x0,0xc00010,0x0,0xc00010,0x0,0x0,0x0,0xc00010,0x0,0x0,0x820000,0x0,0x0,0x0,0x0,0x800000,0x820000,0x0,0x0,0x0,0x0,0x0,0x0,0x820000,0x0,0x800000,0x0,0x820000,0x1,0x800000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800000,0x0,0x10,0x0,0x10,0x0,0x800000,0x10,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800000,0x0,0x0,0x800000,0x0,0x0,0xc00010,0xc00010,0x2,0xc00010,0x2,0x0,0x2,0xc00010,0x0,0x0,0x1,0x800000,0x0,0x800000,0x0,0x0,0xc00010,0x2,0xc00010,0x2,0xc00010,0x400010,0x800010,0x0,0xc00000,0x100000,0x100000,0x2,0x1,0xc00010,0x800400,0x800400,0x0,0x400,0x102000,0x102000,0x40000,0x0,0x1,0x1,0xa8000,0xa8000,0x400,0xa8000,0x0,0x0,0xbcc,0x0,0x400000,0x400000,0x400000,0x400000,0xc00010,0xc00010,0x400000,0x800000,0x800000,0x800000,0x800000,0x0,0x0,0x0,0x2000,0x4000,0x0,0xbcc,0xbcc,0x18000,0x18000,0x60000,0x60000,0x818410,0x0,0x818410,0x0,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0x1,0x0,0x800001,0x1,0x800000,0xc18410,0x800000,0x400000,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0x800000,0xc18410,0x800000,0x0,0x0,0x0,0x1,0x0,0x800000,0x1,0x0,0x1,0x800000,0x1,0x900002,0x100000,0x2,0x2,0x900002,0x800001,0x1,0x800000,0x0,0x0,0x800000,0x800000,0x20000,0x818410,0x818410,0x0,0x818410,0x0,0x0,0x0,0x1,0x838410,0x838410,0x0,0x800010,0x0,0x0,0x200000,0x200000,0x0,0x0,0x0,0x0,0x0,0x0,}; diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/sparql_corese.jj b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/sparql_corese.jj index eff4f192d..588deb186 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/sparql_corese.jj +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/sparql_corese.jj @@ -1371,7 +1371,8 @@ void BasicGraphGraphPattern(Exp stack) : { Atom src; Exp e; Source st; boolean state = false, leaf = false, rec = false;} { - src = VarOrIRIref(stack) + //src = VarOrIRIref(stack) + src = VarOrBlankNodeOrIRIref(stack) { handler.graphPattern(src); } e = GroupGraphPattern() { @@ -2853,12 +2854,14 @@ Variable BlankNode(Exp stack): { Token t; Variable v; } { // check if the blank node is already in the table v = astq.newBlankNode(stack, t.image); + handler.declareBlankNode(t); } | t = { // create a new blank node v = astq.newBlankNode(); + handler.declareBlankNode(t); } ) { From 91c3f806ea4805f2b91087a4b365f94fe351bdfc Mon Sep 17 00:00:00 2001 From: corby Date: Thu, 15 Feb 2024 14:31:37 +0100 Subject: [PATCH 021/146] Leverage bnode as graph identifier --- .../sparql/triple/parser/ParserHandler.java | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/ParserHandler.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/ParserHandler.java index 50df79075..b5df5fb44 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/ParserHandler.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/ParserHandler.java @@ -5,11 +5,8 @@ import fr.inria.corese.sparql.triple.javacc1.ParseException; import fr.inria.corese.sparql.triple.javacc1.SparqlCorese; import fr.inria.corese.sparql.triple.javacc1.Token; -import java.net.URI; -import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; -import java.util.logging.Level; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,6 +28,7 @@ public class ParserHandler { int countWhere = 0; private boolean function = false; boolean turtleLoader = false; + private Token bnode; // Broker to target Graph in Load context // Turtle Loader create Edge(g s p o) directly in the graph @@ -189,11 +187,17 @@ Triple createTriple(ASTQuery ast, Atom p, List list, boolean matchArity, b } } - + public void declareBlankNode(Token id) { + setBnode(id); + } + public void graphPattern(Atom g) { if (isTurtleLoader()) { getCreate().graph(g.getConstant()); } + else if (g.isBlankOrBlankNode()) { + throw new Error("bnode as graph name: " + getBnode() + " Line: " + getBnode().beginLine); + } } public void endGraphPattern(Atom g) { @@ -487,5 +491,13 @@ public Creator getCreate() { public void setCreate(Creator create) { this.create = create; } + + public Token getBnode() { + return bnode; + } + + public void setBnode(Token bnode) { + this.bnode = bnode; + } } From e7a705f18019c2a82a3f8f2006a1e71455ac6561 Mon Sep 17 00:00:00 2001 From: corby Date: Thu, 15 Feb 2024 14:32:15 +0100 Subject: [PATCH 022/146] Leverage bnode as graph identifier --- .../main/java/fr/inria/corese/core/load/CreateImpl.java | 7 ++++++- .../main/java/fr/inria/corese/core/load/CreateTriple.java | 6 +++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/load/CreateImpl.java b/corese-core/src/main/java/fr/inria/corese/core/load/CreateImpl.java index 6d73e69fe..968b4d5b7 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/load/CreateImpl.java +++ b/corese-core/src/main/java/fr/inria/corese/core/load/CreateImpl.java @@ -78,7 +78,12 @@ public static CreateImpl create(Graph g, Load ld) { @Override public void graph(Atom src) { stack.add(source); - source = addGraph(src); + if (src.isBlankOrBlankNode()) { + source = addGraph(getID(src.getLabel()), true); + } + else { + source = addGraph(src); + } } @Override diff --git a/corese-core/src/main/java/fr/inria/corese/core/load/CreateTriple.java b/corese-core/src/main/java/fr/inria/corese/core/load/CreateTriple.java index 8a21132b7..8489faf5d 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/load/CreateTriple.java +++ b/corese-core/src/main/java/fr/inria/corese/core/load/CreateTriple.java @@ -124,8 +124,12 @@ Node addGraph(String src) { return graph.addGraph(src); } + Node addGraph(String src, boolean bnode) { + return graph.addGraph(src, bnode); + } + Node addGraph(Atom src) { - return graph.addGraph(src.getLabel(), src.isBlank()); + return graph.addGraph(src.getLabel(), src.isBlankOrBlankNode()); } Node addDefaultGraphNode() { From 2f3537a7d366315b7bd7492eaeb6664639d63cd3 Mon Sep 17 00:00:00 2001 From: corby Date: Fri, 16 Feb 2024 10:40:59 +0100 Subject: [PATCH 023/146] Leverage access right of loadQuery to authorized namespace --- .../inria/corese/server/webservice/Profile.java | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/corese-server/src/main/java/fr/inria/corese/server/webservice/Profile.java b/corese-server/src/main/java/fr/inria/corese/server/webservice/Profile.java index ba2fe585b..31763a0ff 100644 --- a/corese-server/src/main/java/fr/inria/corese/server/webservice/Profile.java +++ b/corese-server/src/main/java/fr/inria/corese/server/webservice/Profile.java @@ -20,7 +20,9 @@ import fr.inria.corese.core.transform.ContextBuilder; import fr.inria.corese.core.util.Parameter; import fr.inria.corese.kgram.api.core.Edge; +import fr.inria.corese.sparql.exceptions.SafetyException; import fr.inria.corese.sparql.triple.function.term.TermEval; +import static fr.inria.corese.sparql.triple.function.term.TermEval.READ_MESS; import fr.inria.corese.sparql.triple.parser.Access; import fr.inria.corese.sparql.triple.parser.Access.Feature; import fr.inria.corese.sparql.triple.parser.Access.Level; @@ -385,8 +387,18 @@ String read(String path) throws IOException, LoadException { } String loadQuery(String path) throws IOException, LoadException { - if (isProtected && !path.startsWith(getServer())) { - throw new IOException(path); +// if (isProtected && !path.startsWith(getServer())) { +// throw new IOException(path); +// } + if (path.startsWith(getServer())) { + // OK + } else { + try { + // do not accept (file) path when accept list is empty + Access.check(Feature.READ, Access.getQueryAccessLevel(true), path, READ_MESS, false); + } catch (SafetyException ex) { + throw new IOException(path); + } } return read(path); } From 08ce0386aa31303a1f69c978fbaecb75c770184b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 29 Feb 2024 01:21:17 +0000 Subject: [PATCH 024/146] Bump rack in /corese-unit-test/src/test/resources/data/rdf-star-main Bumps [rack](https://github.com/rack/rack) from 2.2.6.4 to 2.2.8.1. - [Release notes](https://github.com/rack/rack/releases) - [Changelog](https://github.com/rack/rack/blob/main/CHANGELOG.md) - [Commits](https://github.com/rack/rack/compare/v2.2.6.4...v2.2.8.1) --- updated-dependencies: - dependency-name: rack dependency-type: indirect ... Signed-off-by: dependabot[bot] --- .../src/test/resources/data/rdf-star-main/Gemfile.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/corese-unit-test/src/test/resources/data/rdf-star-main/Gemfile.lock b/corese-unit-test/src/test/resources/data/rdf-star-main/Gemfile.lock index 53732640d..8d7a39d23 100644 --- a/corese-unit-test/src/test/resources/data/rdf-star-main/Gemfile.lock +++ b/corese-unit-test/src/test/resources/data/rdf-star-main/Gemfile.lock @@ -44,7 +44,7 @@ GEM multi_json (1.15.0) net-http-persistent (4.0.1) connection_pool (~> 2.2) - rack (2.2.6.4) + rack (2.2.8.1) rake (13.0.6) rdf (3.1.15) hamster (~> 3.0) From 5e7b059667dd57abc279f5012ad4cbeba6b11cd4 Mon Sep 17 00:00:00 2001 From: corby Date: Fri, 1 Mar 2024 09:14:27 +0100 Subject: [PATCH 025/146] Take into account character = to (not) rewrite as prefix --- .../inria/corese/sparql/triple/parser/NSManager.java | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/NSManager.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/NSManager.java index 4669cc651..cba05190b 100755 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/NSManager.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/NSManager.java @@ -147,7 +147,7 @@ public class NSManager extends ASTObject { static final String NL = System.getProperty("line.separator"); static final char[] END_CHAR = { '#', '/', '?' }; // , ':'}; // may end an URI ... static final String[] PB_CHAR_NAME = { ".", "\u2013", ":", "#", "(", ")", "'", "\"", ",", ";", "[", "]", "{", "}", - "?", "&" }; + "?", "&", "=" }; static final String[] PB_CHAR_URI = { "(", ")", "'", "\"", ",", ";", "[", "]", "{", "}", "?", "&" }; static final String pchar = ":"; int count = 0; @@ -519,6 +519,11 @@ public String toPrefix(String nsname, boolean skip, boolean xml) { return nsname; } + String name = extractLocalName(nsname, namespace); + if (containsForbiddenCharacters(name)) { + return nsname; + } + String prefix = getPrefix(namespace); if (prefix == null) { if (skip) { @@ -527,10 +532,6 @@ public String toPrefix(String nsname, boolean skip, boolean xml) { prefix = defineDefaultNamespace(namespace); } - String name = extractLocalName(nsname, namespace); - if (containsForbiddenCharacters(name)) { - return nsname; - } String result = assembleResult(prefix, name, xml); record(namespace); From 59bda3be4b589a43da5fef8d22d7ae3ac5d2101b Mon Sep 17 00:00:00 2001 From: corby Date: Fri, 1 Mar 2024 15:11:19 +0100 Subject: [PATCH 026/146] Leverage URI pprint with argument skipUndefPrefix --- .../fr/inria/corese/sparql/api/IDatatype.java | 1 + .../corese/sparql/datatype/CoreseDatatype.java | 17 ++++++++++++----- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/api/IDatatype.java b/sparql/src/main/java/fr/inria/corese/sparql/api/IDatatype.java index f34fd783b..8e6560b53 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/api/IDatatype.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/api/IDatatype.java @@ -388,6 +388,7 @@ default boolean isTripleWithEdge() { String toSparql(boolean prefix, boolean xsd, NSManager nsm); + String toSparql(boolean prefix, boolean xsd, boolean skipUndefPrefix, NSManager nsm); default String trace() { return String.format("trace: %s code: %s datatype: %s label: %s", diff --git a/sparql/src/main/java/fr/inria/corese/sparql/datatype/CoreseDatatype.java b/sparql/src/main/java/fr/inria/corese/sparql/datatype/CoreseDatatype.java index e9f476e77..0cf513ed3 100755 --- a/sparql/src/main/java/fr/inria/corese/sparql/datatype/CoreseDatatype.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/datatype/CoreseDatatype.java @@ -146,20 +146,27 @@ public String toSparql(boolean prefix, boolean xsd) { return toSparql(prefix, xsd, nsm()); } + @Override + public String toSparql(boolean prefixDatatype, boolean displayDatatype, NSManager nsm) { + return toSparql(prefixDatatype, displayDatatype, true, nsm); + } + /** * Overloaded by CoreseExtension + * prefixDatatype: display XSD Datatype with xsd: (default is true) + * displayDatatype: display datatype for boolean and integer (default is false) */ @Override - public String toSparql(boolean prefix, boolean xsd, NSManager nsm) { + public String toSparql(boolean prefixDatatype, boolean displayDatatype, boolean skipUndefPrefix, NSManager nsm) { String value = getLabel(); if (isPointer() && getPointerObject() != null){ value = getPointerObject().getDatatypeLabel(); } - if (getCode() == INTEGER && !xsd && getDatatypeURI().equals(XSD.xsdinteger) + if (getCode() == INTEGER && !displayDatatype && getDatatypeURI().equals(XSD.xsdinteger) && (! (value.startsWith("0") && value.length() > 1))) { // display integer value as is (without datatype) } - else if (getCode() == BOOLEAN && !xsd && + else if (getCode() == BOOLEAN && !displayDatatype && (getLabel().equals(CoreseBoolean.STRUE) || getLabel().equals(CoreseBoolean.SFALSE))) { } else if (getCode() == STRING || (getCode() == LITERAL && !hasLang())) { @@ -167,7 +174,7 @@ else if (getCode() == STRING || (getCode() == LITERAL && !hasLang())) { } else if (getDatatype() != null && !getDatatype().getLabel().equals(RDFS.rdflangString)) { String datatype = getDatatype().getLabel(); - if (prefix && (datatype.startsWith(RDF.XSD)) + if (prefixDatatype && (datatype.startsWith(RDF.XSD)) || datatype.startsWith(RDF.RDF) || datatype.startsWith(NSManager.DT)) { datatype = nsm.toPrefix(datatype); @@ -185,7 +192,7 @@ else if (isLiteral()) { value = protect(value); } else if (isURI()) { if (DISPLAY_AS_PREFIX) { - String str = nsm.toPrefix(value, true); + String str = nsm.toPrefix(value, skipUndefPrefix); if (str == value) { value = String.format("<%s>", value); } else { From 69e4b41be95fce04c202666c4b915dfb1ee93d34 Mon Sep 17 00:00:00 2001 From: corby Date: Fri, 1 Mar 2024 15:12:56 +0100 Subject: [PATCH 027/146] pprint URI using IDatatype pprint hence take into account DISPLAY_URI_AS_PREFIX --- .../inria/corese/core/print/TripleFormat.java | 24 +++++++++++++------ 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java index cbd6e4d1c..011beba6f 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java @@ -1,6 +1,7 @@ package fr.inria.corese.core.print; import fr.inria.corese.core.Graph; +import fr.inria.corese.core.logic.RDF; import fr.inria.corese.kgram.api.core.Edge; import fr.inria.corese.kgram.api.core.Node; import fr.inria.corese.kgram.core.Mappings; @@ -311,14 +312,22 @@ void subject(Edge ent) { } } +// void predicate(Node node) { +// String pred = nsm.toPrefix(node.getLabel(), !addPrefix); +// if (pred.equals(RDF_TYPE)) { +// sdisplay("a"); +// } else if (pred.equals(node.getLabel())) { // Si l'URI n'est pas abrégée +// uri(node.getLabel()); // Utiliser la méthode uri pour ajouter des chevrons si nécessaire +// } else { // Si l'URI est abrégée +// sdisplay(pred); +// } +// } + void predicate(Node node) { - String pred = nsm.toPrefix(node.getLabel(), !addPrefix); - if (pred.equals(RDF_TYPE)) { + if (node.getLabel().equals(RDF.TYPE)) { sdisplay("a"); - } else if (pred.equals(node.getLabel())) { // Si l'URI n'est pas abrégée - uri(node.getLabel()); // Utiliser la méthode uri pour ajouter des chevrons si nécessaire - } else { // Si l'URI est abrégée - sdisplay(pred); + } else { + node(node); } } @@ -336,7 +345,8 @@ void node(Node node, boolean rec) { } else if (dt.isBlank()) { sdisplay(dt.getLabel()); } else { - uri(dt.getLabel()); + //uri(dt.getLabel()); + sdisplay(dt.toSparql(true, false, false, nsm)); } } From b590deb38e57674ec24aa78350dfc257c82e863f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Mon, 4 Mar 2024 17:03:55 +0100 Subject: [PATCH 028/146] Add test generator for W3C tests, and tests for RDFC10. - Add a generator for W3C tests. - Add tests for RDFC10. - Fix some bugs in RDFC10 algorithm. - Add support for hash sha-384. --- .../core/print/CanonicalRdf10Format.java | 30 + .../corese/core/print/NTriplesFormat.java | 5 +- .../core/print/rdfc10/CanonicalIssuer.java | 14 +- .../core/print/rdfc10/CanonicalRdf10.java | 181 +- .../print/rdfc10/CanonicalizationState.java | 7 +- .../print/rdfc10/CanonicalizedDataset.java | 37 +- .../core/print/rdfc10/HashingUtility.java | 43 +- corese-unit-test/pom.xml | 26 +- .../fr/inria/corese/engine/QGVisitor.java | 88 - .../JUnitTestFileGenerator.java | 176 ++ .../inria/corese/w3cTestsGenerator/Main.java | 43 + .../w3cTestsGenerator/W3cTestsGenerator.java | 144 ++ .../w3cTestsGenerator/w3cTests/IW3cTest.java | 23 + .../w3cTests/factory/W3cTestFactory.java | 148 ++ .../implementations/RDFC10EvalTest.java | 94 + .../implementations/RDFC10MapTest.java | 92 + .../RDFC10NegativeEvalTest.java | 68 + .../src/main/java/module-info.java | 3 +- .../src/main/resources/log4j2.xml | 14 +- .../fr/inria/corese/engine/TestQuery1.java | 62 - .../w3c/canonicalRdf/canonicalRdfTest.java | 1909 +++++++++++++++++ .../src/test/resources/log4j2.xml | 18 +- 22 files changed, 2969 insertions(+), 256 deletions(-) delete mode 100644 corese-unit-test/src/main/java/fr/inria/corese/engine/QGVisitor.java create mode 100644 corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/JUnitTestFileGenerator.java create mode 100644 corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/Main.java create mode 100644 corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/W3cTestsGenerator.java create mode 100644 corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/IW3cTest.java create mode 100644 corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/factory/W3cTestFactory.java create mode 100644 corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10EvalTest.java create mode 100644 corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10MapTest.java create mode 100644 corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10NegativeEvalTest.java create mode 100644 corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/CanonicalRdf10Format.java b/corese-core/src/main/java/fr/inria/corese/core/print/CanonicalRdf10Format.java index ce231864e..d7c5a7662 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/CanonicalRdf10Format.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/CanonicalRdf10Format.java @@ -1,8 +1,11 @@ package fr.inria.corese.core.print; +import java.util.Map; + import fr.inria.corese.core.Graph; import fr.inria.corese.core.print.rdfc10.CanonicalRdf10; import fr.inria.corese.core.print.rdfc10.CanonicalizedDataset; +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; import fr.inria.corese.kgram.api.core.Node; import fr.inria.corese.kgram.core.Mappings; @@ -19,14 +22,27 @@ public CanonicalRdf10Format(Graph graph) { this.canonicalizedDataset = CanonicalRdf10.create(graph).canonicalRdf10(); } + public CanonicalRdf10Format(Graph graph, HashAlgorithm hashAlgorithm) { + super(graph); + this.canonicalizedDataset = CanonicalRdf10.create(graph, hashAlgorithm).canonicalRdf10(); + } + public static CanonicalRdf10Format create(Graph graph) { return new CanonicalRdf10Format(graph); } + public static CanonicalRdf10Format create(Graph graph, HashAlgorithm hashAlgorithm) { + return new CanonicalRdf10Format(graph, hashAlgorithm); + } + public static CanonicalRdf10Format create(Mappings map) { return new CanonicalRdf10Format((Graph) map.getGraph()); } + public static CanonicalRdf10Format create(Mappings map, HashAlgorithm hashAlgorithm) { + return new CanonicalRdf10Format((Graph) map.getGraph(), hashAlgorithm); + } + /** * Converts the graph to a string in Canonical RDF 1.0 format. * @@ -36,6 +52,11 @@ public static CanonicalRdf10Format create(Mappings map) { public String toString() { String nquads = super.toString(); + // Check if nquads is empty and return early if it is + if (nquads.isEmpty()) { + return ""; + } + // Sort in codepoint order by line String[] lines = nquads.split("\n"); java.util.Arrays.sort(lines); @@ -55,4 +76,13 @@ protected String printBlank(Node node) { return "_:" + this.canonicalizedDataset.getIssuedIdentifier(identifier); } + /** + * Retrieves the mapping of blank nodes to their identifiers. + * + * @return a map of blank nodes to their identifiers + */ + public Map getIssuedIdentifiersMap() { + return this.canonicalizedDataset.getIssuedIdentifiersMap(); + } + } diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java index 1df8f13a9..51a2f5e3f 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java @@ -9,6 +9,7 @@ import fr.inria.corese.kgram.api.core.Edge; import fr.inria.corese.kgram.api.core.Node; import fr.inria.corese.kgram.core.Mappings; +import fr.inria.corese.sparql.datatype.RDF; import fr.inria.corese.sparql.triple.parser.NSManager; /** @@ -95,7 +96,7 @@ public void write(OutputStream out) throws IOException { * @param node the node to be formatted * @return a string representation of the node */ - protected String printNode(Node node) { + public String printNode(Node node) { if (node.getDatatypeValue().isURI()) { return printURI(node); } else if (node.getDatatypeValue().isLiteral()) { @@ -136,7 +137,7 @@ private String printDatatype(Node node) { if (language != null && !language.isEmpty()) { return "\"" + label + "\"@" + language; - } else if (datatype != null && !datatype.isEmpty()) { + } else if (datatype != null && !datatype.isEmpty() && !datatype.equals(RDF.xsdstring)) { return "\"" + label + "\"^^<" + datatype + ">"; } else { return "\"" + label + "\""; diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalIssuer.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalIssuer.java index adc010219..7bd6d3a15 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalIssuer.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalIssuer.java @@ -1,7 +1,7 @@ package fr.inria.corese.core.print.rdfc10; import java.util.Collections; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; @@ -13,7 +13,8 @@ public class CanonicalIssuer { private final String IDPREFIX; private int idCounter; // Maps blank node identifiers to their canonical identifiers - private final Map issuedIdentifierMap; + // Use LinkedHashMap to preserve insertion order + private final LinkedHashMap issuedIdentifierMap; ///////////////// // Constructor // @@ -27,7 +28,7 @@ public class CanonicalIssuer { public CanonicalIssuer(String idPrefix) { this.IDPREFIX = idPrefix; this.idCounter = 0; - this.issuedIdentifierMap = new HashMap<>(); + this.issuedIdentifierMap = new LinkedHashMap<>(); } /** @@ -38,7 +39,7 @@ public CanonicalIssuer(String idPrefix) { public CanonicalIssuer(CanonicalIssuer ci) { this.IDPREFIX = ci.IDPREFIX; this.idCounter = ci.idCounter; - this.issuedIdentifierMap = new HashMap<>(ci.issuedIdentifierMap); + this.issuedIdentifierMap = new LinkedHashMap<>(ci.issuedIdentifierMap); } ///////////// @@ -100,4 +101,9 @@ public Map getIssuedIdentifierMap() { return Collections.unmodifiableMap(this.issuedIdentifierMap); } + @Override + public String toString() { + return this.issuedIdentifierMap.toString(); + } + } diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalRdf10.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalRdf10.java index 6ea6a6a64..e3745bf5e 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalRdf10.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalRdf10.java @@ -2,13 +2,13 @@ import java.util.ArrayList; import java.util.List; -import java.util.Map; -import java.util.TreeMap; import org.apache.commons.lang3.tuple.Pair; import fr.inria.corese.core.EdgeFactory; import fr.inria.corese.core.Graph; +import fr.inria.corese.core.print.NTriplesFormat; +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; import fr.inria.corese.kgram.api.core.Edge; import fr.inria.corese.kgram.api.core.ExpType; import fr.inria.corese.kgram.api.core.Node; @@ -28,8 +28,13 @@ public class CanonicalRdf10 { private CanonicalizationState canonicalizationState; private CanonicalizedDataset canonicalizedDataset; - // private EdgeFactory edgeFactory = Graph.create().getEdgeFactory(); + private NTriplesFormat ntriplesFormat = NTriplesFormat.create(Graph.create()); + + private HashAlgorithm hashAlgorithm = HashAlgorithm.SHA_256; + + private int depthFactor = 5; + private int permutationLimit = 50000; ////////////////// // Constructors // @@ -44,21 +49,6 @@ public class CanonicalRdf10 { private CanonicalRdf10(Graph graph) { this.canonicalizationState = new CanonicalizationState(); this.canonicalizedDataset = new CanonicalizedDataset(graph); - canonicalRdf10(); - } - - /** - * Constructs a new {@code CanonicalRdf10Format} with the specified RDF graph - * and a map of blank nodes to identifiers. Initializes the canonicalization - * state and dataset for the graph. - * - * @param graph the RDF graph to be canonicalized - * @param blankNodesToIdentifiers a map of blank nodes to their identifiers - */ - private CanonicalRdf10(Graph graph, Map blankNodesToIdentifiers) { - this.canonicalizationState = new CanonicalizationState(); - this.canonicalizedDataset = new CanonicalizedDataset(graph, blankNodesToIdentifiers); - canonicalRdf10(); } ///////////////////// @@ -87,28 +77,92 @@ public static CanonicalRdf10 create(Mappings map) { } /** - * Creates a new {@code CanonicalRdf10Format} instance for the given graph and - * map of blank nodes to identifiers. + * Creates a new {@code CanonicalRdf10Format} instance for the given graph with + * a hash algorithm. * - * @param graph the RDF graph to be canonicalized - * @param blankNodesToIdentifiers a map of blank nodes to their identifiers + * @param graph the RDF graph to be canonicalized + * @param hashAlgorithm the hash algorithm to be used for the canonicalization * @return a new instance of {@code CanonicalRdf10Format} */ - public static CanonicalRdf10 create(Graph graph, Map blankNodesToIdentifiers) { - return new CanonicalRdf10(graph, blankNodesToIdentifiers); + public static CanonicalRdf10 create(Graph graph, HashAlgorithm hashAlgorithm) { + CanonicalRdf10 canonicalRdf10 = new CanonicalRdf10(graph); + canonicalRdf10.setHashAlgorithm(hashAlgorithm); + return canonicalRdf10; } /** * Creates a new {@code CanonicalRdf10Format} instance for the graph associated - * with the given mappings and map of blank nodes to identifiers. + * with the given mappings with a hash algorithm. * - * @param map the mappings containing the RDF graph to be - * canonicalized - * @param blankNodesToIdentifiers a map of blank nodes to their identifiers + * @param map the mappings containing the RDF graph to be + * canonicalized + * @param hashAlgorithm the hash algorithm to be used for the canonicalization * @return a new instance of {@code CanonicalRdf10Format} */ - public static CanonicalRdf10 create(Mappings map, Map blankNodesToIdentifiers) { - return new CanonicalRdf10((Graph) map.getGraph(), blankNodesToIdentifiers); + public static CanonicalRdf10 create(Mappings map, HashAlgorithm hashAlgorithm) { + CanonicalRdf10 canonicalRdf10 = new CanonicalRdf10((Graph) map.getGraph()); + canonicalRdf10.setHashAlgorithm(hashAlgorithm); + return canonicalRdf10; + } + + /////////////// + // Accessors // + /////////////// + + /** + * Returns the depth factor for the canonicalization algorithm. + * + * @return the depth factor for the canonicalization algorithm + */ + public int getDepthFactor() { + return depthFactor; + } + + /** + * Sets the depth factor for the canonicalization algorithm. + * + * @param depthFactor the depth factor for the canonicalization algorithm + */ + public void setDepthFactor(int depthFactor) { + this.depthFactor = depthFactor; + } + + /** + * Returns the permutation limit for the canonicalization algorithm. + * + * @return the permutation limit for the canonicalization algorithm + */ + public int getPermutationLimit() { + return permutationLimit; + } + + /** + * Sets the permutation limit for the canonicalization algorithm. + * + * @param permutationLimit the permutation limit for the canonicalization + * algorithm + */ + public void setPermutationLimit(int permutationLimit) { + this.permutationLimit = permutationLimit; + } + + /** + * Returns the hash algorithm used for the canonicalization algorithm. + * + * @return the hash algorithm used for the canonicalization algorithm + */ + public HashAlgorithm getHashAlgorithm() { + return hashAlgorithm; + } + + /** + * Sets the hash algorithm used for the canonicalization algorithm. + * + * @param hashAlgorithm the hash algorithm used for the canonicalization + * algorithm + */ + public void setHashAlgorithm(HashAlgorithm hashAlgorithm) { + this.hashAlgorithm = hashAlgorithm; } //////////////////// @@ -156,8 +210,7 @@ public CanonicalizedDataset canonicalRdf10() { // the original list } - // Generate canonical identifiers for blank nodes with multiple first degree - // hashes + // Build N-degree hash for each blank node with multiple first degree hash // 4.4.3) Step 5 for (String hash : this.canonicalizationState.getHashesSorted()) { // 4.4.3) Step 5.1 @@ -178,7 +231,7 @@ public CanonicalizedDataset canonicalRdf10() { tempIssuer.issueCanonicalIdentifier(blankNodeIdentifier); // 4.4.3) Step 5.2.4 - Pair result = this.hashNdegreeQuads(tempIssuer, blankNodeIdentifier); + Pair result = this.hashNdegreeQuads(tempIssuer, blankNodeIdentifier, 0); hashPathList.add(result); } @@ -186,7 +239,6 @@ public CanonicalizedDataset canonicalRdf10() { // sort the list by the hash hashPathList.sort((p1, p2) -> p1.getLeft().compareTo(p2.getLeft())); - for (Pair result : hashPathList) { CanonicalIssuer issuer = result.getRight(); @@ -278,7 +330,7 @@ private String hashFirstDegreeQuads(String blankNodeIdentifier) { // 4.6.3) Step 4 nquads.sort(String::compareTo); - return HashingUtility.sha256(String.join("\n", nquads) + "\n"); + return HashingUtility.hash(String.join("\n", nquads) + "\n", this.hashAlgorithm); } /** @@ -300,7 +352,7 @@ private String serializeQuad(Edge quad, String referenceBlankNodeIdentifier) { boolean isDefaultGraph = graph.getLabel().equals(ExpType.DEFAULT_GRAPH); String subjectString = getNodeString(subject, referenceBlankNodeIdentifier); - String predicateString = predicate.toString(); // Predicates cannot be blank nodes + String predicateString = getNodeString(predicate, referenceBlankNodeIdentifier); String objectString = getNodeString(object, referenceBlankNodeIdentifier); String graphString = isDefaultGraph ? "" : getNodeString(graph, referenceBlankNodeIdentifier); @@ -324,7 +376,30 @@ private String getNodeString(Node node, String referenceBlankNodeIdentifier) { ? "_:a" : "_:z"; } else { - return node.toString(); + return this.ntriplesFormat.printNode(node); + } + } + + /////////////// + // Exception // + /////////////// + + /** + * Thrown to indicate that an error occurred during the canonicalization of an + * RDF dataset. + */ + public static class CanonicalizationException extends RuntimeException { + + private static final long serialVersionUID = 1L; + + /** + * Constructs a new {@code CanonicalizationException} with the specified + * detail message. + * + * @param message the detail message + */ + public CanonicalizationException(String message) { + super(message); } } @@ -346,14 +421,19 @@ private String getNodeString(Node node, String referenceBlankNodeIdentifier) { * N-degree Quads * */ - private Pair hashNdegreeQuads(CanonicalIssuer issuer, String blankNodeId) { - // Reference issuer because issuer is modified in the method body + private Pair hashNdegreeQuads(CanonicalIssuer issuer, String blankNodeId, int depth) { + + // Check if depth factor is reached + if (depth >= this.depthFactor * this.canonicalizedDataset.getBlankNodeIdentifiers().size()) { + throw new CanonicalizationException("Depth factor reached, too many recursions"); + } + // in step 4.8.3) Step 5.6 CanonicalIssuer refIssuer = issuer; // 4.8.3) Step 1 // Use a tree map to ensure that the hashes are sorted - Map relatedHashToRelatedBNIdMap = new TreeMap<>(); + ListMap relatedHashToRelatedBNIdMap = new ListMap<>(); // 4.8.3) Step 2 List quads = this.canonicalizationState.getQuadsForBlankNode(blankNodeId); @@ -382,7 +462,14 @@ private Pair hashNdegreeQuads(CanonicalIssuer issuer, S CanonicalIssuer chosenIssuer = null; // 4.8.3) Step 5.4 - for (List permutation : this.permute(new ArrayList<>(relatedHashToRelatedBNIdMap.values()))) { + List> permutations = this.permute(relatedHashToRelatedBNIdMap.get(hash)); + + // Check if the permutation limit is reached + if (permutations.size() > this.permutationLimit) { + throw new CanonicalizationException("Permutation limit reached, too many permutations"); + } + + for (List permutation : permutations) { // 4.8.3) Step 5.4.1 CanonicalIssuer issuerCopy = new CanonicalIssuer(refIssuer); @@ -420,13 +507,13 @@ private Pair hashNdegreeQuads(CanonicalIssuer issuer, S // 4.8.3) Step 5.4.5 for (String relatedBNId : recursionList) { // 4.8.3) Step 5.4.5.1 - Pair result = this.hashNdegreeQuads(issuerCopy, relatedBNId); + Pair result = this.hashNdegreeQuads(issuerCopy, relatedBNId, depth + 1); // 4.8.3) Step 5.4.5.2 path += "_:" + issuerCopy.issueCanonicalIdentifier(relatedBNId); // 4.8.3) Step 5.4.5.3 - path += "<" + result + ">"; + path += "<" + result.getLeft() + ">"; // 4.8.3) Step 5.4.5.4 issuerCopy = result.getRight(); @@ -453,7 +540,7 @@ private Pair hashNdegreeQuads(CanonicalIssuer issuer, S } // 4.8.3) Step 6 - return Pair.of(HashingUtility.sha256(data.toString()), refIssuer); + return Pair.of(HashingUtility.hash(data.toString(), this.hashAlgorithm), refIssuer); } /** @@ -498,7 +585,7 @@ private List> permute(List original) { * @param relatedBN The related blank node. */ private void processQuadEntry(Edge quad, CanonicalIssuer issuer, String blankNodeId, - Map relatedHashToRelatedBNIdMap, String position, Node relatedBN) { + ListMap relatedHashToRelatedBNIdMap, String position, Node relatedBN) { String relatedBNId = this.canonicalizedDataset.getIdentifierForBlankNode(relatedBN); if (relatedBN.isBlank() && !relatedBNId.equals(blankNodeId)) { @@ -506,7 +593,7 @@ private void processQuadEntry(Edge quad, CanonicalIssuer issuer, String blankNod String relatedHash = this.hashRelatedBlankNode(relatedBNId, quad, issuer, position); // 4.8.3) Step 3.1.2 - relatedHashToRelatedBNIdMap.put(relatedHash, relatedBNId); + relatedHashToRelatedBNIdMap.add(relatedHash, relatedBNId); } } @@ -557,7 +644,7 @@ private String hashRelatedBlankNode(String relatedBNId, Edge quad, CanonicalIssu } // 4.7.3) Step 5 - return HashingUtility.sha256(input.toString()); + return HashingUtility.hash(input.toString(), this.hashAlgorithm); } ///////////////////////// diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java index afd04b097..4d83a4fcf 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java @@ -78,13 +78,16 @@ public void removeHash(String hash) { } /** - * Retrieves blanks nodes associated with a specific hash value. + * Retrieves sorted list of blank nodes identifiers associated with a specific + * hash value. * * @param hash The hash value. * @return A list of blank nodes associated with the hash value. */ public List getBlankNodeForHash(String hash) { - return Collections.unmodifiableList(this.hashToBlankNode.get(hash)); + List list = this.hashToBlankNode.get(hash); + Collections.sort(list); + return Collections.unmodifiableList(list); } /** diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizedDataset.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizedDataset.java index 72e30b6f9..faacfc46c 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizedDataset.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizedDataset.java @@ -2,7 +2,7 @@ import java.util.Collection; import java.util.Collections; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.Map; import fr.inria.corese.core.Graph; @@ -16,9 +16,8 @@ public class CanonicalizedDataset { private final Graph dataset; - private Map blankNodesToIdentifiers = new HashMap<>(); - private final boolean blankNodesToIdentifiersInitialized; - private Map issuedIdentifierMap = new HashMap<>(); + private Map blankNodesToIdentifiers = new LinkedHashMap<>(); + private Map issuedIdentifierMap = new LinkedHashMap<>(); ///////////////// // Constructor // @@ -32,21 +31,6 @@ public class CanonicalizedDataset { */ public CanonicalizedDataset(Graph graph) { this.dataset = graph; - this.blankNodesToIdentifiersInitialized = false; - } - - /** - * Constructs a CanonicalizedDataset with a given graph and a pre-defined - * mapping of blank nodes to identifiers. - * - * @param graph The graph to be associated with this dataset. - * @param blankNodesToIdentifiers The pre-defined mapping of blank nodes to - * their identifiers. - */ - public CanonicalizedDataset(Graph graph, Map blankNodesToIdentifiers) { - this.dataset = graph; - this.blankNodesToIdentifiers = blankNodesToIdentifiers; - this.blankNodesToIdentifiersInitialized = true; } //////////////////////// @@ -78,11 +62,7 @@ public void associateBlankNodeWithIdentifier(Node blankNode) { throw new IllegalArgumentException("Node is not blank"); } - if (this.blankNodesToIdentifiersInitialized) { - return; - } - - String identifier = blankNode.getLabel(); + String identifier = blankNode.getLabel().replace("_:", ""); this.blankNodesToIdentifiers.put(blankNode, identifier); } @@ -127,6 +107,15 @@ public String getIssuedIdentifier(String blankNodeId) { return issuedIdentifierMap.get(blankNodeId); } + /** + * Retrieves the issued identifier map. + * + * @return The issued identifier map. + */ + public Map getIssuedIdentifiersMap() { + return Collections.unmodifiableMap(issuedIdentifierMap); + } + /////////////// // To String // /////////////// diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/HashingUtility.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/HashingUtility.java index 074403d11..07cb9add0 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/HashingUtility.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/HashingUtility.java @@ -10,18 +10,49 @@ public class HashingUtility { /** - * Generates a SHA-256 hash of the input string. + * Represents the hash algorithm to use. + */ + public static enum HashAlgorithm { + /** + * Represents the SHA-256 hash algorithm. + */ + SHA_256("SHA-256"), + + /** + * Represents the SHA-384 hash algorithm. + */ + SHA_384("SHA-384"); + + private final String algorithm; + + private HashAlgorithm(String algorithm) { + this.algorithm = algorithm.replace("-", ""); + } + + /** + * Gets the algorithm name. + * + * @return the algorithm name + */ + public String getAlgorithm() { + return algorithm; + } + } + + /** + * Hashes a string using the specified algorithm. * - * @param input the input string to hash - * @return the hashed string in hexadecimal format + * @param input the string to hash + * @param algorithm the algorithm to use + * @return the hash of the input string */ - public static String sha256(String input) { + public static String hash(String input, HashAlgorithm algorithm) { try { - MessageDigest digest = MessageDigest.getInstance("SHA-256"); + MessageDigest digest = MessageDigest.getInstance(algorithm.getAlgorithm()); byte[] encodedhash = digest.digest(input.getBytes(StandardCharsets.UTF_8)); return toHexString(encodedhash); } catch (NoSuchAlgorithmException e) { - throw new RuntimeException("SHA-256 algorithm not found", e); + throw new RuntimeException(algorithm.getAlgorithm() + " algorithm not found", e); } } diff --git a/corese-unit-test/pom.xml b/corese-unit-test/pom.xml index 0acb8d993..e848ce4f1 100644 --- a/corese-unit-test/pom.xml +++ b/corese-unit-test/pom.xml @@ -1,5 +1,7 @@ - + 4.0.0 @@ -67,6 +69,19 @@ 4.8.0 + + org.apache.logging.log4j + log4j-slf4j18-impl + + + org.apache.logging.log4j + log4j-api + + + org.apache.logging.log4j + log4j-core + + org.apache.jena jena-arq @@ -79,12 +94,14 @@ - + maven-clean-plugin 3.1.0 - + maven-resources-plugin 3.0.2 @@ -113,7 +130,8 @@ maven-deploy-plugin 2.8.2 - + maven-site-plugin 3.7.1 diff --git a/corese-unit-test/src/main/java/fr/inria/corese/engine/QGVisitor.java b/corese-unit-test/src/main/java/fr/inria/corese/engine/QGVisitor.java deleted file mode 100644 index 87ed7ca96..000000000 --- a/corese-unit-test/src/main/java/fr/inria/corese/engine/QGVisitor.java +++ /dev/null @@ -1,88 +0,0 @@ -package fr.inria.corese.engine; - -import java.util.HashMap; - -import fr.inria.corese.core.Graph; -import fr.inria.corese.core.api.QueryGraphVisitor; -import fr.inria.corese.core.edge.EdgeImpl; -import fr.inria.corese.sparql.triple.parser.ASTQuery; -import fr.inria.corese.compiler.parser.NodeImpl; -import fr.inria.corese.kgram.api.core.Edge; -import fr.inria.corese.kgram.api.core.Node; -import fr.inria.corese.kgram.core.Query; - -/** - * Example of Query Graph Visitor that replace blank nodes by variables - * and select * - * - */ -public class QGVisitor implements QueryGraphVisitor { - - static final String VAR = "?_kg_var_"; - int count = 0; - - Table table; - - class Table extends HashMap { - - } - - QGVisitor(){ - table = new Table(); - } - - - - - public ASTQuery visit(ASTQuery ast) { - ast.setSelectAll(true); - return ast; - } - - - public Edge visit(Edge ent) { - - if (! (ent.getEdge() instanceof EdgeImpl)){ - return ent; - } - - EdgeImpl edge = (EdgeImpl) ent.getEdge(); - - for (int i = 0; i tests; + + public JUnitTestFileGenerator(String testName, URI manifestUri, List tests) { + this.testName = testName; + this.manifestUri = manifestUri; + this.tests = tests; + } + + /** + * Generates a JUnit test file for the W3C test suite. + * + * @param testsPath The path to the directory where the test file should be + * generated. + */ + public void generate(Path testsPath) { + + // Initialize directories + Path testDirectory = this.createDirectory(testsPath.resolve(testName)); + + // Generate file test + String fileName = testName + "Test.java"; + Path testFile = this.generateTestFile(testDirectory, fileName); + + // Write test file + try { + Files.write(testFile, this.generateTestFileContent(testFile.toString(), fileName).getBytes()); + logger.info("Wrote test file: " + testFile); + } catch (IOException e) { + logger.error("Failed to write test file: " + testFile, e); + } + + } + + /** + * Creates a directory at the specified path if it does not already exist. + * + * @param directoryPath The path to the directory to create. + * @param directoryType The type of directory to create. + */ + private Path createDirectory(Path directoryPath) { + if (!Files.exists(directoryPath)) { + try { + Files.createDirectories(directoryPath); + logger.info("Created directory: " + directoryPath); + } catch (IOException e) { + logger.error("Failed to create directory: " + directoryPath, e); + } + } + return directoryPath; + } + + /** + * Generates a test file at the specified path if it does not already exist. + * + * @param testDirectory The directory where the test file should be generated. + * @param fileName The name of the test file. + * @return The path to the test file. + */ + private Path generateTestFile(Path testDirectory, String fileName) { + Path filePath = testDirectory.resolve(fileName); + + if (Files.exists(filePath)) { + return filePath; + } + + try { + Files.createFile(filePath); + logger.info("Created test file: " + filePath); + } catch (IOException e) { + logger.error("Failed to create test file: " + filePath, e); + } + return filePath; + } + + /** + * Generates the content of the test file. + * + * @param path The path of the file. + * @param fileName The name of the file. + * @return The content of the test file. + */ + private String generateTestFileContent(String path, String fileName) { + StringBuilder content = new StringBuilder(); + + // Package + content.append(this.getPackage(path, fileName)); + content.append("\n"); + content.append("\n"); + + // Imports + Set imports = new HashSet<>(); + for (IW3cTest test : tests) { + imports.addAll(test.getImports()); + } + imports.stream().sorted().forEach(imp -> content.append("import ").append(imp).append(";\n")); + content.append("\n"); + + // Class comment + DateTimeFormatter timeFormatter = DateTimeFormatter.ofPattern("HH:mm:ss"); + ZonedDateTime nowWithZone = ZonedDateTime.now(); + + content.append("/**\n"); + content.append(" * Auto-generated JUnit test file for the W3C test suite: "); + content.append(manifestUri); + content.append("\n"); + content.append(" * This file was automatically generated by JUnitTestFileGenerator.java.\n"); + content.append(" * Generation date: "); + content.append(LocalDate.now()); + content.append(", Time: "); + content.append(nowWithZone.format(timeFormatter)); + content.append(" "); + content.append(nowWithZone.getZone()); + content.append("\n"); + content.append(" */\n"); + + // Class declaration + content.append("public class "); + content.append(fileName.substring(0, fileName.indexOf("."))); + content.append(" {"); + content.append("\n"); + content.append("\n"); + + // Test methods + for (IW3cTest test : tests) { + content.append(test.generate()); + content.append("\n"); + } + + // End of class + content.append("}"); + + return content.toString(); + } + + /** + * Returns the package declaration based on the given path and file name. + * + * @param path The path of the file. + * @param fileName The name of the file. + * @return The package declaration. + */ + private String getPackage(String path, String fileName) { + String packagePath = path.substring(path.indexOf("java") + 5) + .replace("/", ".") + .replace("." + fileName, ""); + return "package " + packagePath + ";"; + } + +} diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/Main.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/Main.java new file mode 100644 index 000000000..87f527c6e --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/Main.java @@ -0,0 +1,43 @@ +package fr.inria.corese.w3cTestsGenerator; + +import java.net.URI; +import java.nio.file.Path; +import java.nio.file.Paths; + +/** + * The Main class serves as the entry point for the application. + * It is responsible for initializing and executing the W3cTestsGenerator based + * on predefined paths. + */ +public class Main { + + // Define base directory using system's current directory + private static final Path BASE_PATH = Paths.get(System.getProperty("user.dir")); + + // Specify paths for tests, resources, and the manifest within the project + // structure + private static final Path TESTS_PATH_DIR = BASE_PATH + .resolve("corese-unit-test/src/test/java/fr/inria/corese/w3c"); + + /** + * Main method to execute the application. + * It creates and runs a W3cTestsGenerator with specified directories and + * manifest file. + * + * @param args Command line arguments (not used) + */ + public static void main(String[] args) { + generateW3cTests("canonicalRdf", "https://w3c.github.io/rdf-canon/tests/manifest.ttl"); + } + + /** + * Initializes and runs the W3cTestsGenerator for generating W3C tests. + * + * @param testName The name of the test suite to generate tests for. + * @param manifestPath The path to the manifest file. + */ + private static void generateW3cTests(String testName, String manifestUri) { + W3cTestsGenerator generator = new W3cTestsGenerator(testName, URI.create(manifestUri), TESTS_PATH_DIR); + generator.generate(); + } +} diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/W3cTestsGenerator.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/W3cTestsGenerator.java new file mode 100644 index 000000000..0e605c820 --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/W3cTestsGenerator.java @@ -0,0 +1,144 @@ +package fr.inria.corese.w3cTestsGenerator; + +import java.net.URI; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.load.Load; +import fr.inria.corese.core.query.QueryProcess; +import fr.inria.corese.kgram.core.Mapping; +import fr.inria.corese.kgram.core.Mappings; +import fr.inria.corese.w3cTestsGenerator.w3cTests.IW3cTest; +import fr.inria.corese.w3cTestsGenerator.w3cTests.factory.W3cTestFactory; +import fr.inria.corese.w3cTestsGenerator.w3cTests.factory.W3cTestFactory.TestCreationException; + +/** + * Generates JUnit test cases from W3C test manifest files. + */ +public class W3cTestsGenerator { + + private static final Logger logger = LogManager.getLogger(W3cTestsGenerator.class); + + private final URI manifestUri; + private final Path testsPath; + private final String testName; + + /** + * Constructs a new W3cTestsGenerator with the specified test name, manifest + * file path and test directory path. + * + * @param testName The name of the test. + * @param manifestUri The URI of the manifest file. + * @param testsPath The path to tests directory. + */ + public W3cTestsGenerator(String testName, URI manifestUri, Path testsPath) { + this.testName = testName; + this.manifestUri = manifestUri; + this.testsPath = testsPath; + } + + /** + * Generates JUnit test cases from the W3C test manifest file. + */ + public void generate() { + // Load manifest file + Graph graph = loadManifest(); + + // Generate list of test cases + List testCases = getListOfTestCases(graph); + + // Generate JUnit test file + JUnitTestFileGenerator generator = new JUnitTestFileGenerator(testName, manifestUri, testCases); + generator.generate(testsPath); + } + + //////////////////////// + // Load manifest file // + //////////////////////// + + /** + * Loads the W3C test manifest file into a graph. + * + * @return The graph containing the manifest file. + */ + private Graph loadManifest() { + logger.info("Loading manifest file: " + manifestUri); + Graph graph = Graph.create(); + graph.init(); + Load loader = Load.create(graph); + + try { + loader.parse(manifestUri.toString()); + } catch (Exception e) { + logger.error("Error loading manifest file: " + manifestUri, e); + System.exit(1); + } + + return graph; + } + + //////////////////////////// + // Get list of test cases // + //////////////////////////// + + /** + * Gets the list of test cases from the specified graph. + * + * @param graph The graph containing the test cases. + * @return The list of test cases. + */ + private List getListOfTestCases(Graph graph) { + QueryProcess exec = QueryProcess.create(graph); + String query = buildTestCasesQuery(); + Mappings mappings; + + try { + mappings = exec.query(query); + } catch (Exception e) { + logger.error("Error executing query.", e); + return new ArrayList<>(); + } + + if (mappings == null) { + logger.warn("Query returned null mappings."); + return new ArrayList<>(); + } + + List testCases = new ArrayList<>(); + for (Mapping mapping : mappings) { + String test = mapping.getValue("?test").getLabel(); + String type = mapping.getValue("?type").getLabel(); + try { + testCases.add(W3cTestFactory.createW3cTest(test, type, exec)); + } catch (TestCreationException e) { + logger.error("Error creating test: " + test, e); + System.exit(1); + } + } + + logger.info("Loaded " + testCases.size() + " test cases."); + return testCases; + } + + /** + * Builds a query to retrieve the test cases from the manifest file. + * + * @return The query to retrieve the test cases. + */ + private String buildTestCasesQuery() { + return "PREFIX mf: \n" + + "PREFIX rdf: \n" + + "PREFIX rdfs: \n" + + "\n" + + "SELECT ?type ?test WHERE {\n" + + " ?manifest a mf:Manifest .\n" + + " ?manifest mf:entries/rdf:rest*/rdf:first ?test .\n" + + " ?test rdf:type ?type .\n" + + "} ORDER BY ?test"; + } +} diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/IW3cTest.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/IW3cTest.java new file mode 100644 index 000000000..03ec2cce8 --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/IW3cTest.java @@ -0,0 +1,23 @@ +package fr.inria.corese.w3cTestsGenerator.w3cTests; + +import java.util.Set; + +/** + * Interface for W3C tests. + */ +public interface IW3cTest { + + /** + * Returns the set of imports required for the W3C test. + * + * @return the set of imports + */ + public Set getImports(); + + /** + * Generates the junit test for the W3C test. + * + * @return the junit test in string format + */ + public String generate(); +} diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/factory/W3cTestFactory.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/factory/W3cTestFactory.java new file mode 100644 index 000000000..fb2c59326 --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/factory/W3cTestFactory.java @@ -0,0 +1,148 @@ +package fr.inria.corese.w3cTestsGenerator.w3cTests.factory; + +import java.net.URI; +import java.util.Map; +import java.util.Optional; + +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; +import fr.inria.corese.core.query.QueryProcess; +import fr.inria.corese.kgram.core.Mappings; +import fr.inria.corese.sparql.exceptions.EngineException; +import fr.inria.corese.w3cTestsGenerator.w3cTests.IW3cTest; +import fr.inria.corese.w3cTestsGenerator.w3cTests.implementations.RDFC10EvalTest; +import fr.inria.corese.w3cTestsGenerator.w3cTests.implementations.RDFC10MapTest; +import fr.inria.corese.w3cTestsGenerator.w3cTests.implementations.RDFC10NegativeEvalTest; + +/** + * Factory for creating W3C tests. + */ +public class W3cTestFactory { + + /** + * Map of test type URIs to test types. + */ + private static final Map typeMap = Map.of( + "https://w3c.github.io/rdf-canon/tests/vocab#RDFC10EvalTest", TestType.RDFC10EvalTest, + "https://w3c.github.io/rdf-canon/tests/vocab#RDFC10MapTest", TestType.RDFC10MapTest, + "https://w3c.github.io/rdf-canon/tests/vocab#RDFC10NegativeEvalTest", TestType.RDFC10NegativeEvalTest); + + /** + * Enumeration of test types. + */ + public enum TestType { + RDFC10EvalTest, + RDFC10MapTest, + RDFC10NegativeEvalTest + } + + /** + * Creates a W3C test from the specified test name, type URI, and query process. + * + * @param test The name of the test. + * @param typeUri The URI of the test type. + * @param queryProcess The query process. + * @return The W3C test. + * @throws TestCreationException If an error occurs while creating the test. + */ + public static IW3cTest createW3cTest(String test, String typeUri, QueryProcess queryProcess) + throws TestCreationException { + String query = buildTestDetailQuery(test); + Mappings mappings = executeQuery(queryProcess, query) + .orElseThrow(() -> new TestCreationException("Failed to retrieve test details for: " + test)); + + TestType type = typeMap.get(typeUri); + if (type == null) { + throw new TestCreationException("Unsupported test type URI: " + typeUri); + } + + String name = mappings.getValue("?name").getLabel(); + String comment = mappings.getValue("?comment") != null ? mappings.getValue("?comment").getLabel() : ""; + + HashAlgorithm hashAlgorithm = null; + + if (mappings.getValue("?hashAlgorithm") != null) { + switch (mappings.getValue("?hashAlgorithm").getLabel()) { + case "SHA256": + hashAlgorithm = HashAlgorithm.SHA_256; + break; + case "SHA384": + hashAlgorithm = HashAlgorithm.SHA_384; + break; + default: + throw new TestCreationException( + "Unsupported hash algorithm: " + mappings.getValue("?hashAlgorithm").getLabel()); + } + } + + switch (type) { + case RDFC10EvalTest: + return new RDFC10EvalTest( + test, + name, + comment, + URI.create(mappings.getValue("?action").getLabel()), + URI.create(mappings.getValue("?result").getLabel()), + hashAlgorithm); + case RDFC10MapTest: + return new RDFC10MapTest( + test, + name, + comment, + URI.create(mappings.getValue("?action").getLabel()), + URI.create(mappings.getValue("?result").getLabel()), + hashAlgorithm); + case RDFC10NegativeEvalTest: + return new RDFC10NegativeEvalTest( + test, + name, + comment, + URI.create(mappings.getValue("?action").getLabel())); + default: + throw new TestCreationException("Unsupported test type: " + type); + } + } + + /** + * Builds a query to retrieve the test details from the manifest file. + * + * @return The query to retrieve the test details. + */ + private static String buildTestDetailQuery(String test) { + return "PREFIX mf: \n" + + "PREFIX rdfc: \n" + + "PREFIX rdfs: \n" + + "SELECT ?name ?comment ?action ?result WHERE {" + + " <" + test + "> mf:name ?name ;" + + " mf:action ?action ." + + " optional { <" + test + "> mf:result ?result } ." + + " optional { <" + test + "> rdfs:comment ?comment } ." + + " optional { <" + test + "> rdfc:hashAlgorithm ?hashAlgorithm } ." + + "}"; + } + + /** + * Executes the specified query using the specified query process. + * + * @param queryProcess The query process. + * @param query The query to execute. + * @return The mappings resulting from the query execution, or an empty optional + * if an error occurs. + */ + private static Optional executeQuery(QueryProcess queryProcess, String query) { + try { + return Optional.ofNullable(queryProcess.query(query)); + } catch (EngineException e) { + e.printStackTrace(); + return Optional.empty(); + } + } + + /** + * Exception thrown when an error occurs while creating a test. + */ + public static class TestCreationException extends Exception { + public TestCreationException(String message) { + super(message); + } + } +} diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10EvalTest.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10EvalTest.java new file mode 100644 index 000000000..2fb1b335c --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10EvalTest.java @@ -0,0 +1,94 @@ +package fr.inria.corese.w3cTestsGenerator.w3cTests.implementations; + +import java.net.URI; +import java.util.Set; + +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; +import fr.inria.corese.w3cTestsGenerator.w3cTests.IW3cTest; + +/** + * Represents a test for the RDFC10EvalTest type. + */ +public class RDFC10EvalTest implements IW3cTest { + + private String test; + private String name; + private String comment; + + private URI actionFile; + + private URI resultFile; + + private HashAlgorithm hashAlgorithm; + + public RDFC10EvalTest(String testUri, String name, String comment, URI actionUri, URI resultUri, + HashAlgorithm hashAlgorithm) { + this.test = testUri.split("#")[1]; + this.name = name; + this.comment = comment; + this.actionFile = actionUri; + this.resultFile = resultUri; + this.hashAlgorithm = hashAlgorithm; + } + + @Override + public Set getImports() { + return Set.of("fr.inria.corese.core.Graph", + "fr.inria.corese.core.load.Load", + "fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm", + "fr.inria.corese.core.print.CanonicalRdf10Format", + "static org.junit.Assert.assertEquals", + "java.io.IOException", + "java.net.URISyntaxException", + "org.junit.Test", + "fr.inria.corese.core.load.LoadException", + "java.net.URL", + "java.util.Scanner"); + } + + @Override + public String generate() { + StringBuilder sb = new StringBuilder(); + + // Header of the test + sb.append(" // ").append(this.name).append("\n"); + if (!this.comment.isEmpty()) { + sb.append(" // ").append(this.comment).append("\n"); + } + sb.append(" @Test\n"); + sb.append(" public void ").append(test); + sb.append("() throws IOException, LoadException, URISyntaxException {\n"); + + // Test body + sb.append(" // Create graph and load action file\n"); + sb.append(" Graph graph = Graph.create();\n"); + sb.append(" Load ld = Load.create(graph);\n"); + sb.append(" ld.parse(\"").append(actionFile).append("\");\n"); + sb.append("\n"); + sb.append(" // Create canonical RDF 1.0 format and convert graph to string\n"); + if (hashAlgorithm != null && hashAlgorithm != HashAlgorithm.SHA_256) { + sb.append(" CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph, HashAlgorithm.") + .append(hashAlgorithm).append(");\n"); + } else { + sb.append(" CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph);\n"); + } + sb.append(" String result = rdfc10.toString();\n"); + sb.append("\n"); + sb.append(" // Load expected result file\n"); + sb.append(" URL url = new URL(\"").append(resultFile).append("\");\n"); + sb.append(" Scanner scanner = new Scanner(url.openStream(), \"UTF-8\");\n"); + sb.append(" scanner.useDelimiter(\"\\\\A\");\n"); + sb.append(" String expected = scanner.hasNext() ? scanner.next() : \"\";\n"); + sb.append(" scanner.close();\n"); + sb.append("\n"); + + // Test assertion + sb.append(" assertEquals(expected, result);\n"); + + // Footer of the test + sb.append(" }\n"); + + return sb.toString(); + } + +} diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10MapTest.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10MapTest.java new file mode 100644 index 000000000..043ac93a3 --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10MapTest.java @@ -0,0 +1,92 @@ +package fr.inria.corese.w3cTestsGenerator.w3cTests.implementations; + +import java.net.URI; +import java.util.Set; + +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; +import fr.inria.corese.w3cTestsGenerator.w3cTests.IW3cTest; + +/** + * Represents a test for the RDFC10MapTest type. + */ +public class RDFC10MapTest implements IW3cTest { + + private String test; + private String name; + private String comment; + + private URI actionFile; + + private URI resultFile; + + private HashAlgorithm hashAlgorithm; + + public RDFC10MapTest(String testUri, String name, String comment, URI actionUri, URI resultUri, + HashAlgorithm hashAlgorithm) { + this.test = testUri.split("#")[1]; + this.name = name; + this.comment = comment; + this.actionFile = actionUri; + this.resultFile = resultUri; + this.hashAlgorithm = hashAlgorithm; + } + + @Override + public Set getImports() { + return Set.of("fr.inria.corese.core.Graph", + "fr.inria.corese.core.load.Load", + "fr.inria.corese.core.print.CanonicalRdf10Format", + "fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm", + "java.net.URL", + "com.fasterxml.jackson.core.type.TypeReference", + "com.fasterxml.jackson.databind.ObjectMapper", + "fr.inria.corese.core.load.LoadException", + "java.io.IOException", + "org.junit.Test", + "static org.junit.Assert.assertEquals", + "java.util.Map"); + + } + + @Override + public String generate() { + StringBuilder sb = new StringBuilder(); + + // Header of the test + sb.append(" // ").append(this.name).append("\n"); + if (!this.comment.isEmpty()) { + sb.append(" // ").append(this.comment).append("\n"); + } + sb.append(" @Test\n"); + sb.append(" public void ").append(test); + sb.append("() throws LoadException, IOException {\n"); + + // Test body + sb.append(" // Create graph and load action file\n"); + sb.append(" Graph graph = Graph.create();\n"); + sb.append(" Load ld = Load.create(graph);\n"); + sb.append(" ld.setRenameBlankNode(false);\n"); + sb.append(" ld.parse(\"").append(actionFile).append("\");\n"); + sb.append("\n"); + sb.append(" // Create canonical RDF 1.0 format and get map of issued identifiers\n"); + if (hashAlgorithm != null && hashAlgorithm != HashAlgorithm.SHA_256) { + sb.append(" CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph, HashAlgorithm.") + .append(hashAlgorithm).append(");\n"); + } else { + sb.append(" CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph);\n"); + } + sb.append(" Map result = rdfc10.getIssuedIdentifiersMap();\n"); + sb.append("\n"); + sb.append(" // Load map from result json file\n"); + sb.append(" URL url = new URL(\"").append(resultFile).append("\");\n"); + sb.append( + " Map expect = new ObjectMapper().readValue(url, new TypeReference>(){});\n"); + sb.append("\n"); + sb.append(" // Compare the two maps\n"); + sb.append(" assertEquals(expect, result);\n"); + sb.append(" }\n"); + + return sb.toString(); + } + +} diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10NegativeEvalTest.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10NegativeEvalTest.java new file mode 100644 index 000000000..f24ce9eda --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10NegativeEvalTest.java @@ -0,0 +1,68 @@ +package fr.inria.corese.w3cTestsGenerator.w3cTests.implementations; + +import java.net.URI; +import java.util.Set; + +import fr.inria.corese.w3cTestsGenerator.w3cTests.IW3cTest; + +/** + * Represents a test for the RDFC10NegativeEvalTest type. + */ +public class RDFC10NegativeEvalTest implements IW3cTest { + + private String test; + private String name; + private String comment; + + private URI actionFile; + + public RDFC10NegativeEvalTest(String testUri, String name, String comment, URI actionUri) { + this.test = testUri.split("#")[1]; + this.name = name; + this.comment = comment; + this.actionFile = actionUri; + } + + @Override + public Set getImports() { + return Set.of( + "fr.inria.corese.core.print.rdfc10.CanonicalRdf10.CanonicalizationException", + "java.io.IOException", + "fr.inria.corese.core.load.LoadException", + "fr.inria.corese.core.Graph", + "fr.inria.corese.core.load.Load", + "fr.inria.corese.core.print.CanonicalRdf10Format", + "org.junit.Test"); + } + + @Override + public String generate() { + StringBuilder sb = new StringBuilder(); + + // Header of the test + sb.append(" // ").append(this.name).append("\n"); + if (!this.comment.isEmpty()) { + sb.append(" // ").append(this.comment).append("\n"); + } + sb.append(" @Test(expected = CanonicalizationException.class)\n"); + sb.append(" public void ").append(test); + sb.append("() throws IOException, LoadException {\n"); + + // Test body + sb.append(" // Create graph and load action file\n"); + sb.append(" Graph graph = Graph.create();\n"); + sb.append(" Load ld = Load.create(graph);\n"); + sb.append(" ld.parse(\"").append(actionFile).append("\");\n"); + sb.append("\n"); + sb.append(" // Attempt to create canonical RDF 1.0 format, expecting a failure\n"); + sb.append(" CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph);\n"); + sb.append(" // This line should trigger the CanonicalizationException\n"); + sb.append(" rdfc10.toString();\n"); + + // Footer of the test + sb.append(" }\n"); + + return sb.toString(); + } + +} diff --git a/corese-unit-test/src/main/java/module-info.java b/corese-unit-test/src/main/java/module-info.java index 821b5d80a..b83259263 100644 --- a/corese-unit-test/src/main/java/module-info.java +++ b/corese-unit-test/src/main/java/module-info.java @@ -13,6 +13,5 @@ requires transitive org.apache.jena.iri; requires com.google.common; requires org.slf4j; - - opens fr.inria.corese.engine; + requires org.apache.logging.log4j; } \ No newline at end of file diff --git a/corese-unit-test/src/main/resources/log4j2.xml b/corese-unit-test/src/main/resources/log4j2.xml index 7f2a9bcb4..a2bc1d655 100644 --- a/corese-unit-test/src/main/resources/log4j2.xml +++ b/corese-unit-test/src/main/resources/log4j2.xml @@ -1,17 +1,15 @@ - + - + - - + + - - + - + \ No newline at end of file diff --git a/corese-unit-test/src/test/java/fr/inria/corese/engine/TestQuery1.java b/corese-unit-test/src/test/java/fr/inria/corese/engine/TestQuery1.java index 6c47c504a..7df5d5991 100644 --- a/corese-unit-test/src/test/java/fr/inria/corese/engine/TestQuery1.java +++ b/corese-unit-test/src/test/java/fr/inria/corese/engine/TestQuery1.java @@ -35,7 +35,6 @@ import fr.inria.corese.core.producer.DataFilter; import fr.inria.corese.core.producer.DataFilterFactory; import fr.inria.corese.core.query.QueryEngine; -import fr.inria.corese.core.query.QueryGraph; import fr.inria.corese.core.query.QueryProcess; import fr.inria.corese.core.transform.Loader; import fr.inria.corese.core.transform.Transformer; @@ -8657,67 +8656,6 @@ public void testRelax() { } - /** - * Create a Query graph from an RDF Graph Execute the query Use case: find - * similar Graphs (cf Corentin) - */ - - public void testQueryGraph() { - - Graph graph = createGraph(); - QueryProcess exec = QueryProcess.create(graph); - - String init = "prefix : " - + "" - + "insert data {" - + ":a :p :b, :c ." - + ":b :q :d " - + ":c :q :d " - + ":d :p :e " - + ":e :q :f " - + "" - + "} "; - - String cons = "prefix : " - + "" - + "construct {?x :p []}" - + "where {?x :p ?y}"; - - String init2 = "prefix : " - + "" - + "insert data {" - + ":a :p [] ." - + "}"; - - try { - // create a graph - exec.query(init); - - // create a copy where triple objects (values) are Blank Nodes (aka Variables) - // consider the copy as a Query Graph and execute it - Mappings map = exec.queryGraph(cons); - - assertEquals("Results", 4, map.size()); - - Graph g2 = createGraph(); - QueryProcess exec2 = QueryProcess.create(g2); - exec2.query(init2); - - QueryGraph qg = QueryGraph.create(g2); - new QGVisitor(); - // qg.setVisitor(vis); - qg.setConstruct(true); - map = exec.query(qg); - - Graph res = exec.getGraph(map); - assertEquals("Results", 2, res.size()); - - } catch (EngineException e) { - e.printStackTrace(); - } - - } - @Test public void testOption() { diff --git a/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java b/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java new file mode 100644 index 000000000..e0ca3db50 --- /dev/null +++ b/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java @@ -0,0 +1,1909 @@ +package fr.inria.corese.w3c.canonicalRdf; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.load.Load; +import fr.inria.corese.core.load.LoadException; +import fr.inria.corese.core.print.CanonicalRdf10Format; +import fr.inria.corese.core.print.rdfc10.CanonicalRdf10.CanonicalizationException; +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Map; +import java.util.Scanner; +import org.junit.Test; +import static org.junit.Assert.assertEquals; + +/** + * Auto-generated JUnit test file for the W3C test suite: https://w3c.github.io/rdf-canon/tests/manifest.ttl + * This file was automatically generated by JUnitTestFileGenerator.java. + * Generation date: 2024-03-04, Time: 16:58:04 Europe/Paris + */ +public class canonicalRdfTest { + + // simple id + @Test + public void test001c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test001-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test001-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // duplicate property iri values + @Test + public void test002c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test002-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test002-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // bnode + @Test + public void test003c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test003-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test003-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // bnode (map test) + @Test + public void test003m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test003-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test003-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // bnode plus embed w/subject + @Test + public void test004c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test004-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test004-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // bnode plus embed w/subject (map test) + @Test + public void test004m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test004-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test004-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // bnode embed + @Test + public void test005c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test005-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test005-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // bnode embed (map test) + @Test + public void test005m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test005-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test005-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // multiple rdf types + @Test + public void test006c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test006-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test006-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // single subject complex + @Test + public void test008c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test008-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test008-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // multiple subjects - complex + @Test + public void test009c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test009-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test009-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // type + @Test + public void test010c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test010-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test010-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // type-coerced type + @Test + public void test011c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test011-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test011-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // type-coerced type, cycle + @Test + public void test013c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test013-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test013-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // check types + @Test + public void test014c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test014-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test014-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - dual link - embed + @Test + public void test016c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test016-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test016-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - dual link - embed (map test) + @Test + public void test016m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test016-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test016-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // blank node - dual link - non-embed + @Test + public void test017c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test017-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test017-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - dual link - non-embed (map test) + @Test + public void test017m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test017-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test017-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // blank node - self link + @Test + public void test018c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test018-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test018-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - self link (map test) + @Test + public void test018m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test018-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test018-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // blank node - disjoint self links + @Test + public void test019c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test019-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test019-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - diamond + @Test + public void test020c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test020-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test020-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - diamond (map test) + @Test + public void test020m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test020-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test020-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // blank node - circle of 2 + @Test + public void test021c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test021-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test021-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 2 + @Test + public void test022c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test022-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test022-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - circle of 3 + @Test + public void test023c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test023-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test023-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (0-1-2) + @Test + public void test024c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test024-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test024-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (0-2-1) + @Test + public void test025c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test025-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test025-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (1-0-2) + @Test + public void test026c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test026-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test026-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (1-2-0) + @Test + public void test027c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test027-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test027-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (2-1-0) + @Test + public void test028c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test028-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test028-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (2-0-1) + @Test + public void test029c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test029-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test029-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - point at circle of 3 + @Test + public void test030c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test030-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test030-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - point at circle of 3 (map test) + @Test + public void test030m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test030-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test030-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // disjoint identical subgraphs (1) + @Test + public void test033c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test033-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test033-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // disjoint identical subgraphs (2) + @Test + public void test034c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test034-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test034-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // reordered w/strings (1) + @Test + public void test035c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test035-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test035-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // reordered w/strings (2) + @Test + public void test036c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test036-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test036-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // reordered 4 bnodes, reordered 2 properties (1) + @Test + public void test038c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test038-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test038-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // reordered 4 bnodes, reordered 2 properties (2) + @Test + public void test039c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test039-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test039-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // reordered 6 bnodes (1) + @Test + public void test040c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test040-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test040-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // literal with language + @Test + public void test043c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test043-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test043-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // poison – evil (1) + // A poison graph which is computable given defined limits. + @Test + public void test044c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test044-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test044-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // poison – evil (2) + // A poison graph which is computable given defined limits. + @Test + public void test045c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test045-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test045-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // poison – evil (3) + // A poison graph which is computable given defined limits. + @Test + public void test046c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test046-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test046-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // deep diff (1) + @Test + public void test047c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test047-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test047-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // deep diff (1) (map test) + @Test + public void test047m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test047-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test047-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // deep diff (2) + @Test + public void test048c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test048-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test048-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // deep diff (2) (map test) + @Test + public void test048m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test048-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test048-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // @list + // RDF Collections using rdf:first/rest ladders. + @Test + public void test053c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test053-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test053-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // @list (map test) + // RDF Collections using rdf:first/rest ladders. + @Test + public void test053m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test053-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test053-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // t-graph + @Test + public void test054c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test054-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test054-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // simple reorder (1) + @Test + public void test055c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test055-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test055-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // simple reorder (1) (map test) + @Test + public void test055m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test055-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test055-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // simple reorder (2) + @Test + public void test056c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test056-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test056-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // simple reorder (2) (map test) + @Test + public void test056m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test056-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test056-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // unnamed graph + @Test + public void test057c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test057-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test057-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // unnamed graph (map test) + @Test + public void test057m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test057-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test057-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // unnamed graph with blank node objects + @Test + public void test058c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test058-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test058-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // n-quads parsing + @Test + public void test059c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test059-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test059-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // n-quads escaping + @Test + public void test060c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test060-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test060-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // n-quads escaping (map test) + @Test + public void test060m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test060-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test060-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // same literal value with multiple languages + @Test + public void test061c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test061-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test061-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // same literal value with multiple datatypes + @Test + public void test062c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test062-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test062-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - diamond (with _:b) + // This duplicates #test020, but uses _:b as a blank node prefix + @Test + public void test063c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test063-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test063-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - diamond (with _:b) (map test) + // This duplicates #test020, but uses _:b as a blank node prefix + @Test + public void test063m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test063-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test063-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // blank node - double circle of 3 (0-1-2, reversed) + @Test + public void test064c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test064-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test064-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (0-2-1, reversed) + @Test + public void test065c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test065-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test065-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (1-0-2, reversed) + @Test + public void test066c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test066-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test066-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (1-2-0, reversed) + @Test + public void test067c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test067-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test067-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (2-1-0, reversed) + @Test + public void test068c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test068-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test068-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (2-0-1, reversed) + @Test + public void test069c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test069-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test069-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // dataset - isomorphic default and iri named + // Isomorphic graphs in default and IRI named graph + @Test + public void test070c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test070-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test070-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // dataset - isomorphic default and iri named (map test) + // Isomorphic graphs in default and IRI named graph + @Test + public void test070m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test070-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test070-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // dataset - isomorphic default and node named + // Isomorphic graphs in default and blank node named graph + @Test + public void test071c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test071-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test071-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // dataset - isomorphic default and node named (map test) + // Isomorphic graphs in default and blank node named graph + @Test + public void test071m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test071-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test071-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // dataset - shared blank nodes + // Blank nodes shared in default and named graph + @Test + public void test072c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test072-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test072-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // dataset - shared blank nodes (map test) + // Blank nodes shared in default and named graph + @Test + public void test072m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test072-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test072-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // dataset - referencing graph name + // Default graph with blank node shared with graph name + @Test + public void test073c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test073-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test073-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // dataset - referencing graph name (map test) + // Default graph with blank node shared with graph name + @Test + public void test073m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test073-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test073-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // poison - Clique Graph (negative test) + // A 10-node Clique of blank node resources all inter-related. + @Test(expected = CanonicalizationException.class) + public void test074c() throws IOException, LoadException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test074-in.nq"); + + // Attempt to create canonical RDF 1.0 format, expecting a failure + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + // This line should trigger the CanonicalizationException + rdfc10.toString(); + } + + // blank node - diamond (uses SHA-384) + // Same as test020 except for using SHA-384 + @Test + public void test075c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test075-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph, HashAlgorithm.SHA_384); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test075-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - diamond (uses SHA-384) (map test) + // Same as test020 except for using SHA-384 + @Test + public void test075m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test075-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph, HashAlgorithm.SHA_384); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test075-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // duplicate ground triple in input + // The duplicate triples must be removed + @Test + public void test076c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test076-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test076-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // duplicate triple with blank node in input + // The duplicate triples must be removed + @Test + public void test077c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test077-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test077-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + +} \ No newline at end of file diff --git a/corese-unit-test/src/test/resources/log4j2.xml b/corese-unit-test/src/test/resources/log4j2.xml index 4cff86b10..d88ae47d7 100644 --- a/corese-unit-test/src/test/resources/log4j2.xml +++ b/corese-unit-test/src/test/resources/log4j2.xml @@ -1,17 +1,21 @@ - + - + - - + + + + + + - + - + \ No newline at end of file From ab1d72aa3726a7a09132cd6e6108ada326484d42 Mon Sep 17 00:00:00 2001 From: corby Date: Tue, 5 Mar 2024 12:13:31 +0100 Subject: [PATCH 029/146] Leverage ldscript function parameter number --- .../java/fr/inria/corese/sparql/triple/parser/ASTExtension.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/ASTExtension.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/ASTExtension.java index 897d544b3..62f20e023 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/ASTExtension.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/ASTExtension.java @@ -24,7 +24,7 @@ public class ASTExtension implements Extension { private static ASTExtension singleton; - public static int FUNCTION_PARAMETER_MAX = 15; + public static int FUNCTION_PARAMETER_MAX = 20; private static Logger logger = LoggerFactory.getLogger(ASTExtension.class); static final String NL = System.getProperty("line.separator"); public static final String TYPE = ExpType.TYPE_METADATA; From 23f24673c70d7243c26b4d0f5b528807036fb3ce Mon Sep 17 00:00:00 2001 From: corby Date: Tue, 5 Mar 2024 12:14:27 +0100 Subject: [PATCH 030/146] Leverage bnode as named graph for nquad --- .../java/fr/inria/corese/core/load/CreateImpl.java | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/load/CreateImpl.java b/corese-core/src/main/java/fr/inria/corese/core/load/CreateImpl.java index 968b4d5b7..c3f188f2e 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/load/CreateImpl.java +++ b/corese-core/src/main/java/fr/inria/corese/core/load/CreateImpl.java @@ -102,7 +102,16 @@ public void triple(Atom graph, Atom subject, Atom property, Atom object) { } Node getGraph(Atom graph) { - return graph == null ? addDefaultGraphNode() : addGraph(graph); + //return graph == null ? addDefaultGraphNode() : addGraph(graph); + if (graph == null) { + return addDefaultGraphNode(); + } + else if (graph.isBlankOrBlankNode()) { + return addGraph(getID(graph.getLabel()), true); + } + else { + return addGraph(graph); + } } @Override From e0a7c16941546422b8e2ba45491174380e5c3eda Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Wed, 6 Mar 2024 19:35:59 +0100 Subject: [PATCH 031/146] Refactor TripleFormat class, make addPrefix variable public, fix prefix --- .../inria/corese/core/print/TripleFormat.java | 55 +++---------------- 1 file changed, 8 insertions(+), 47 deletions(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java index 011beba6f..479fc2692 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java @@ -28,7 +28,7 @@ public class TripleFormat extends RDFFormat { static final String RDF_TYPE = "rdf:type"; static final String TAB = " "; - static final boolean addPrefix = true; + public boolean addPrefix = true; boolean isGraph = false; // when true: display default graph kg:default with embedding graph kg:default @@ -216,14 +216,12 @@ void basicGraphNode(Node gNode) { } private boolean isRdfPrefixNeeded() { - //for (Node node : graph.getGraphNodes()) { - for (Edge edge : graph.getEdges()) { - String pred = nsm.toPrefix(edge.getEdgeNode().getLabel(), !addPrefix); - if (pred.startsWith("rdf:") && !pred.equals(RDF_TYPE)) { - return true; - } + for (Edge edge : graph.getEdges()) { + String pred = nsm.toPrefix(edge.getEdgeNode().getLabel(), !addPrefix); + if (pred.startsWith("rdf:") && !pred.equals(RDF_TYPE)) { + return true; } - //} + } return false; } @@ -231,12 +229,6 @@ private boolean isRdfPrefixNeeded() { void header(StringBuilder bb) { link(bb); bb.append(nsm.toString(PREFIX, false, false)); -// if (isRdfPrefixNeeded()) { -// bb.append(nsm.toString(PREFIX, false, false)); -// } else { -// // Si le préfixe rdf: n'est pas nécessaire, supprimez-le de la sortie -// bb.append(nsm.toString(PREFIX, false, false).replaceAll("@prefix rdf:.*\n", "")); -// } } void link(StringBuilder bb) { @@ -266,10 +258,6 @@ void print(Node gNode, Node node) { } if (first) { first = false; -// if (isBlankNode) { -// sdisplay("["); -// } -// else { subject(edge); sdisplay(SPACE); @@ -285,9 +273,6 @@ void print(Node gNode, Node node) { } if (!first) { -// if (isBlankNode) { -// sdisplay("]"); -// } sdisplay(DOT); sdisplay(NL); sdisplay(NL); @@ -312,17 +297,6 @@ void subject(Edge ent) { } } -// void predicate(Node node) { -// String pred = nsm.toPrefix(node.getLabel(), !addPrefix); -// if (pred.equals(RDF_TYPE)) { -// sdisplay("a"); -// } else if (pred.equals(node.getLabel())) { // Si l'URI n'est pas abrégée -// uri(node.getLabel()); // Utiliser la méthode uri pour ajouter des chevrons si nécessaire -// } else { // Si l'URI est abrégée -// sdisplay(pred); -// } -// } - void predicate(Node node) { if (node.getLabel().equals(RDF.TYPE)) { sdisplay("a"); @@ -345,8 +319,8 @@ void node(Node node, boolean rec) { } else if (dt.isBlank()) { sdisplay(dt.getLabel()); } else { - //uri(dt.getLabel()); - sdisplay(dt.toSparql(true, false, false, nsm)); + // uri(dt.getLabel()); + sdisplay(dt.toSparql(true, false, !addPrefix, nsm)); } } @@ -376,19 +350,6 @@ void basicTriple(Node node, Edge edge, boolean rec) { node(edge.getObjectNode(), true); } - // void triple2(Node node, Edge edge, boolean rec) { - // if (edge.isNested() || hasNestedTriple(edge) || rec) { - // nestedTriple(node, edge, rec); - // } else { - // basicTriple(node, edge, rec); - // } - // } - // - - // void basicTriple(Node node, Edge edge) { - // basicTriple(node, edge, false); - // } - boolean hasNestedTriple(Edge edge) { return edge.getSubjectValue().isTripleWithEdge() || edge.getObjectValue().isTripleWithEdge(); } From 3114bbae0c728d74fa9cabbf324c70395068ddff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Wed, 6 Mar 2024 19:37:05 +0100 Subject: [PATCH 032/146] Add an EarlRepportGenerator --- .../EarlRepportGenerator.java | 270 ++++++++++++++++++ 1 file changed, 270 insertions(+) create mode 100644 corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java new file mode 100644 index 000000000..d76b363d1 --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java @@ -0,0 +1,270 @@ +package fr.inria.corese.w3cEarlRepportGenerator; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; + +import org.apache.logging.log4j.Logger; + +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.print.TripleFormat; +import fr.inria.corese.core.query.QueryProcess; +import fr.inria.corese.sparql.exceptions.EngineException; +import fr.inria.corese.sparql.triple.parser.NSManager; + +/** + * This class generates an EARL report for the Corese software. + * + * The EARL report is a RDF document that describes the conformance of the + * Corese software to the W3C standards. + * + * @see Instructions for + * submitting implementation reports + * + */ +public class EarlRepportGenerator { + + private static final Logger logger = org.apache.logging.log4j.LogManager.getLogger(EarlRepportGenerator.class); + + private final Graph graph; + // eg "2023-01-25T10:18:04-08:00" + private final DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssXXX"); + + private final String authorUri = "http://ns.inria.fr/remi.ceres#me"; + private final String athorEmail = "remi.ceres@inria.fr"; + private final String authorName = "Rémi Cérès"; + + private final String organizationHomepage = "https://www.inria.fr/"; + + private final String softwareUri = "https://github.com/Wimmics/corese"; + private final String softwareName = "Corese"; + private final String softwareDescription = "Software platform implementing and extending the standards of the Semantic Web."; + private final String softwareLicense = "http://www.cecill.info/licences/Licence_CeCILL-C_V1-en.html"; + private final String softwareHomepage = "https://project.inria.fr/corese/"; + private final String softwareMailingList = "mailto:corese-users@inria.fr"; + private final String softwareDownload = "https://project.inria.fr/corese/download/"; + private final String softwareBugDatabase = "https://github.com/Wimmics/corese/issues"; + private final String softwareBlog = "https://github.com/Wimmics/corese/discussions/"; + private final String softwareProgrammingLanguage = "Java"; + + private final String releaseURI = "1d76a19dccfbdaecf63544e80a7c7a45e54bbc89"; + private final String releaseDate = "2024-03-05"; + + private final Path reportDir = Path.of("corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf"); + private final Path inputReportPath = reportDir.resolve("testReport.csv"); + private final Path outputReportPath = reportDir.resolve("earlReport.ttl"); + + /** + * Constructor for the EarlRepportGenerator class. + */ + public EarlRepportGenerator() { + this.graph = Graph.create(); + } + + /** + * Generates the EARL report and writes it to the specified output directory. + * + * @param outputDir the output directory where to write the EARL report + */ + public void generate() { + + // Insert the document description in the graph + execSPARQL(insertQueryDescribeDocument()); + + // Insert the developer description in the graph + execSPARQL(insertQueryDescribeDeveloper()); + + // Insert the software description in the graph + execSPARQL(insertQueryDescribeSoftware()); + + // Insert the release description in the graph + execSPARQL(insertQueryDescribeRelease()); + + // Generate the EARL report in turtle format + TripleFormat format = TripleFormat.create(graph, this.getNSM()); + format.addPrefix = false; + + // Add the test results to the EARL report + try { + // read line by line the test report file + // for each line, add the test result to the EARL report + + for (String line : Files.readAllLines(inputReportPath)) { + String[] values = line.split(","); + String testUri = values[0]; + String testTime = values[1]; + String testResult = values[2]; + + execSPARQL(insertQueryDescribeTestResult(testUri, testTime, testResult)); + } + + } catch (IOException e) { + logger.error("Error while reading test report file: " + inputReportPath.toString(), e); + e.printStackTrace(); + } + + // Write the EARL report to the output directory + try { + format.write(outputReportPath.toString()); + } catch (IOException e) { + logger.error("Error while writing EARL report to file: " + outputReportPath.toString(), e); + e.printStackTrace(); + } + + } + + /** + * Returns a NSManager with the prefixes used in the EARL report. + * + * @return a NSManager with the prefixes used in the EARL report + */ + private NSManager getNSM() { + NSManager nsm = NSManager.create(); + nsm.setRecord(true); + nsm.definePrefix("earl", "https://www.w3.org/ns/earl#"); + nsm.definePrefix("dc", "http://purl.org/dc/terms/"); + nsm.definePrefix("foaf", "http://xmlns.com/foaf/0.1/"); + nsm.definePrefix("xsd", "http://www.w3.org/2001/XMLSchema#"); + nsm.definePrefix("doap", "http://usefulinc.com/ns/doap#"); + return nsm; + } + + /** + * Executes a SPARQL query on the graph. + * + * @param query the SPARQL query to execute + */ + private void execSPARQL(String query) { + QueryProcess exec = QueryProcess.create(graph); + try { + exec.query(query); + } catch (EngineException e) { + logger.error("Error while executing SPARQL query: " + query, e); + e.printStackTrace(); + } + } + + /** + * Builds a SPARQL query to insert the document description in the graph. + * + * @return a SPARQL query to insert the document description in the graph + */ + private String insertQueryDescribeDocument() { + + // Calculate the current date and time + String now = this.dtf.format(ZonedDateTime.now()); + + // Build the SPARQL query + StringBuilder sb = new StringBuilder(); + sb.append("PREFIX earl: \n"); + sb.append("PREFIX dc: \n"); + sb.append("PREFIX foaf: \n"); + sb.append("PREFIX xsd: \n"); + sb.append("INSERT DATA {\n"); + sb.append(" <> foaf:primaryTopic <").append(softwareUri).append("> ;\n"); + sb.append(" dc:issued \"").append(now).append("\"^^xsd:dateTime ;\n"); + sb.append(" foaf:maker <").append(authorUri).append("> .\n"); + sb.append("}\n"); + return sb.toString(); + } + + /** + * Builds a SPARQL query to insert the developer description in the graph. + * + * @return a SPARQL query to insert the developer description in the graph + */ + private String insertQueryDescribeDeveloper() { + StringBuilder sb = new StringBuilder(); + sb.append("PREFIX earl: \n"); + sb.append("PREFIX foaf: \n"); + sb.append("INSERT DATA {\n"); + sb.append(" <").append(authorUri).append("> a foaf:Person , earl:Assertor ;\n"); + sb.append(" foaf:name \"").append(authorName).append("\" ;\n"); + sb.append(" foaf:mbox ;\n"); + sb.append(" foaf:workplaceHomepage <").append(organizationHomepage).append("> .\n"); + + sb.append("}\n"); + return sb.toString(); + } + + /** + * Builds a SPARQL query to insert the software description in the graph. + * + * @return a SPARQL query to insert the software description in the graph + */ + private String insertQueryDescribeSoftware() { + StringBuilder sb = new StringBuilder(); + sb.append("PREFIX earl: \n"); + sb.append("PREFIX doap: \n"); + sb.append("INSERT DATA {\n"); + sb.append(" <").append(softwareUri).append("> a doap:Project, earl:Software, earl:TestSubject ;\n"); + sb.append(" doap:name \"").append(softwareName).append("\" ;\n"); + sb.append(" doap:release <").append(softwareUri).append("commit/").append(releaseURI).append("> ;\n"); + sb.append(" doap:developer <").append(authorUri).append("> ;\n"); + sb.append(" doap:homepage <").append(softwareHomepage).append("> ;\n"); + sb.append(" doap:description \"").append(softwareDescription).append("\"@en ;\n"); + sb.append(" doap:license <").append(softwareLicense).append("> ;\n"); + sb.append(" doap:download-page <").append(softwareDownload).append("> ;\n"); + sb.append(" doap:bug-database <").append(softwareBugDatabase).append("> ;\n"); + sb.append(" doap:mailing-list <").append(softwareMailingList).append("> ;\n"); + sb.append(" doap:blog <").append(softwareBlog).append("> ;\n"); + sb.append(" doap:programming-language \"").append(softwareProgrammingLanguage).append("\" .\n"); + + sb.append("}\n"); + return sb.toString(); + } + + /** + * Builds a SPARQL query to insert the release description in the graph. + * + * @return a SPARQL query to insert the release description in the graph + */ + private String insertQueryDescribeRelease() { + StringBuilder sb = new StringBuilder(); + sb.append("PREFIX doap: \n"); + sb.append("INSERT DATA {\n"); + sb.append(" <").append(softwareUri).append("/commit/").append(releaseURI).append("> doap:name \"") + .append(softwareName).append(" #").append(releaseURI.substring(0, 7)).append("\" ;\n"); + sb.append(" doap:revision \"#").append(releaseURI.substring(0, 7)).append("\" ;\n"); + sb.append(" doap:created \"").append(releaseDate).append("\"^^xsd:date ;\n"); + sb.append("}\n"); + return sb.toString(); + } + + /** + * Builds a SPARQL query to insert the test result in the graph. + * + * @param testUri the URI of the test + * @param testTime the time when the test was executed + * @param testResult the result of the test + * @return a SPARQL query to insert the test result in the graph + */ + private String insertQueryDescribeTestResult(String testUri, String testTime, String testResult) { + StringBuilder sb = new StringBuilder(); + sb.append("PREFIX earl: \n"); + sb.append("PREFIX dc: \n"); + sb.append("PREFIX xsd: \n"); + sb.append("INSERT DATA {\n"); + sb.append(" [ a earl:Assertion ;\n"); + sb.append(" earl:assertedBy <").append(authorUri).append("> ;\n"); + sb.append(" earl:subject <").append(softwareUri).append("> ;\n"); + sb.append(" earl:test <").append(testUri).append("> ;\n"); + sb.append(" earl:result [ a earl:TestResult ;\n"); + sb.append(" earl:outcome ").append("<").append(testResult).append(">").append(" ;\n"); + sb.append(" dc:date \"").append(testTime).append("\"^^xsd:dateTime\n"); + sb.append(" ] ;\n"); + sb.append(" earl:mode earl:automatic\n"); + sb.append(" ] .\n"); + sb.append("}\n"); + return sb.toString(); + } + + public static void main(String[] args) { + EarlRepportGenerator earlRepportGenerator = new EarlRepportGenerator(); + earlRepportGenerator.generate(); + } + +} \ No newline at end of file From d0ac8831596ea1783e60d166ab028c8d37426ed3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Wed, 6 Mar 2024 19:38:44 +0100 Subject: [PATCH 033/146] Move w3cJunitTestsGenerator --- .../JUnitTestFileGenerator.java | 106 ++++++++++++++++-- .../Main.java | 2 +- .../W3cTestsGenerator.java | 12 +- .../w3cTests/IW3cTest.java | 2 +- .../w3cTests/factory/W3cTestFactory.java | 10 +- .../implementations/RDFC10EvalTest.java | 4 +- .../implementations/RDFC10MapTest.java | 4 +- .../RDFC10NegativeEvalTest.java | 4 +- 8 files changed, 117 insertions(+), 27 deletions(-) rename corese-unit-test/src/main/java/fr/inria/corese/{w3cTestsGenerator => w3cJunitTestsGenerator}/JUnitTestFileGenerator.java (51%) rename corese-unit-test/src/main/java/fr/inria/corese/{w3cTestsGenerator => w3cJunitTestsGenerator}/Main.java (97%) rename corese-unit-test/src/main/java/fr/inria/corese/{w3cTestsGenerator => w3cJunitTestsGenerator}/W3cTestsGenerator.java (92%) rename corese-unit-test/src/main/java/fr/inria/corese/{w3cTestsGenerator => w3cJunitTestsGenerator}/w3cTests/IW3cTest.java (87%) rename corese-unit-test/src/main/java/fr/inria/corese/{w3cTestsGenerator => w3cJunitTestsGenerator}/w3cTests/factory/W3cTestFactory.java (93%) rename corese-unit-test/src/main/java/fr/inria/corese/{w3cTestsGenerator => w3cJunitTestsGenerator}/w3cTests/implementations/RDFC10EvalTest.java (96%) rename corese-unit-test/src/main/java/fr/inria/corese/{w3cTestsGenerator => w3cJunitTestsGenerator}/w3cTests/implementations/RDFC10MapTest.java (96%) rename corese-unit-test/src/main/java/fr/inria/corese/{w3cTestsGenerator => w3cJunitTestsGenerator}/w3cTests/implementations/RDFC10NegativeEvalTest.java (94%) diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/JUnitTestFileGenerator.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/JUnitTestFileGenerator.java similarity index 51% rename from corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/JUnitTestFileGenerator.java rename to corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/JUnitTestFileGenerator.java index fb61f2d72..c05813618 100644 --- a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/JUnitTestFileGenerator.java +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/JUnitTestFileGenerator.java @@ -1,4 +1,4 @@ -package fr.inria.corese.w3cTestsGenerator; +package fr.inria.corese.w3cJunitTestsGenerator; import java.io.IOException; import java.net.URI; @@ -14,7 +14,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import fr.inria.corese.w3cTestsGenerator.w3cTests.IW3cTest; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.IW3cTest; /** * Generates a JUnit test file for the W3C test suite. @@ -26,23 +26,22 @@ public class JUnitTestFileGenerator { private final URI manifestUri; private final String testName; private final List tests; + private final Path exportPath; - public JUnitTestFileGenerator(String testName, URI manifestUri, List tests) { + public JUnitTestFileGenerator(String testName, URI manifestUri, Path exportPath, List tests) { this.testName = testName; this.manifestUri = manifestUri; + this.exportPath = exportPath.resolve(testName); this.tests = tests; } /** * Generates a JUnit test file for the W3C test suite. - * - * @param testsPath The path to the directory where the test file should be - * generated. */ - public void generate(Path testsPath) { + public void generate() { // Initialize directories - Path testDirectory = this.createDirectory(testsPath.resolve(testName)); + Path testDirectory = this.createDirectory(this.exportPath); // Generate file test String fileName = testName + "Test.java"; @@ -119,6 +118,7 @@ private String generateTestFileContent(String path, String fileName) { for (IW3cTest test : tests) { imports.addAll(test.getImports()); } + imports.addAll(this.defineImports()); imports.stream().sorted().forEach(imp -> content.append("import ").append(imp).append(";\n")); content.append("\n"); @@ -147,6 +147,9 @@ private String generateTestFileContent(String path, String fileName) { content.append("\n"); content.append("\n"); + // Watcher + content.append(this.generateWatcher()); + // Test methods for (IW3cTest test : tests) { content.append(test.generate()); @@ -173,4 +176,91 @@ private String getPackage(String path, String fileName) { return "package " + packagePath + ";"; } + /** + * Generates the watcher for the test file. + * + * @return The watcher for the test file. + */ + private String generateWatcher() { + StringBuilder watcher = new StringBuilder(); + + // Create a file testReport.csv in the directory of the test file + watcher.append(" private static final String TEST_REPORT_FILE = \"" + + this.exportPath.resolve("testReport.csv") + "\";\n"); + watcher.append(" private static final String MANIFEST_URI = \"" + + manifestUri.toString().substring(0, manifestUri.toString().lastIndexOf(".")) + "\";\n"); + watcher.append(" private static final String EARL = \"https://www.w3.org/ns/earl#\";\n"); + watcher.append("\n"); + + // Function to write the test report to the file testReport.csv + // Format: manifestUri#testName, datetime, https://www.w3.org/ns/earl#status + watcher.append(" /**\n"); + watcher.append(" * Writes the test report to the file testReport.csv.\n"); + watcher.append(" *\n"); + watcher.append(" * @param testName The name of the test.\n"); + watcher.append(" * @param success The status of the test.\n"); + watcher.append(" */\n"); + watcher.append(" private void writeTestReport(String testName, String success) {\n"); + watcher.append(" try {\n"); + watcher.append(" Path testReportPath = Paths.get(TEST_REPORT_FILE);\n"); + watcher.append( + " DateTimeFormatter dtf = DateTimeFormatter.ofPattern(\"yyyy-MM-dd'T'HH:mm:ssXXX\");\n"); + watcher.append( + " Files.write(testReportPath, (MANIFEST_URI + \"#\" + testName + \",\" + dtf.format(ZonedDateTime.now()) + \",\" + EARL + success + \"\\n\").getBytes(), StandardOpenOption.APPEND);\n"); + watcher.append(" } catch (IOException e) {\n"); + watcher.append(" e.printStackTrace();\n"); + watcher.append(" }\n"); + watcher.append(" }\n"); + watcher.append("\n"); + + watcher.append(" @Rule\n"); + watcher.append(" public TestWatcher watcher = new TestWatcher() {\n"); + watcher.append("\n"); + watcher.append(" @Override\n"); + watcher.append(" protected void failed(Throwable e, Description description) {\n"); + watcher.append(" writeTestReport(description.getMethodName(), \"failed\");\n"); + watcher.append(" }\n"); + watcher.append("\n"); + watcher.append(" @Override\n"); + watcher.append(" protected void succeeded(Description description) {\n"); + watcher.append(" writeTestReport(description.getMethodName(), \"passed\");\n"); + watcher.append(" }\n"); + watcher.append("\n"); + watcher.append(" @Override\n"); + watcher.append(" protected void skipped(AssumptionViolatedException e, Description description) {\n"); + watcher.append(" writeTestReport(description.getMethodName(), \"untested\");\n"); + watcher.append(" }\n"); + watcher.append(" };\n"); + watcher.append("\n"); + watcher.append(" // Create and clear the test report file\n"); + watcher.append(" @BeforeClass\n"); + watcher.append(" public static void createTestReportFile() {\n"); + watcher.append(" try {\n"); + watcher.append(" Path testReportPath = Paths.get(TEST_REPORT_FILE);\n"); + watcher.append(" Files.write(testReportPath, \"\".getBytes());\n"); + watcher.append(" } catch (IOException e) {\n"); + watcher.append(" e.printStackTrace();\n"); + watcher.append(" }\n"); + watcher.append(" }\n"); + watcher.append("\n"); + + return watcher.toString(); + } + + private Set defineImports() { + Set imports = new HashSet<>(); + imports.add("java.nio.file.Path"); + imports.add("java.nio.file.Paths"); + imports.add("java.nio.file.Files"); + imports.add("java.nio.file.StandardOpenOption"); + imports.add("org.junit.Rule"); + imports.add("org.junit.rules.TestWatcher"); + imports.add("org.junit.runner.Description"); + imports.add("org.junit.AssumptionViolatedException"); + imports.add("org.junit.BeforeClass"); + imports.add("java.time.format.DateTimeFormatter"); + imports.add("java.time.ZonedDateTime"); + return imports; + } + } diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/Main.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/Main.java similarity index 97% rename from corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/Main.java rename to corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/Main.java index 87f527c6e..c2baba4ab 100644 --- a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/Main.java +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/Main.java @@ -1,4 +1,4 @@ -package fr.inria.corese.w3cTestsGenerator; +package fr.inria.corese.w3cJunitTestsGenerator; import java.net.URI; import java.nio.file.Path; diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/W3cTestsGenerator.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/W3cTestsGenerator.java similarity index 92% rename from corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/W3cTestsGenerator.java rename to corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/W3cTestsGenerator.java index 0e605c820..11260ab05 100644 --- a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/W3cTestsGenerator.java +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/W3cTestsGenerator.java @@ -1,4 +1,4 @@ -package fr.inria.corese.w3cTestsGenerator; +package fr.inria.corese.w3cJunitTestsGenerator; import java.net.URI; import java.nio.file.Path; @@ -13,9 +13,9 @@ import fr.inria.corese.core.query.QueryProcess; import fr.inria.corese.kgram.core.Mapping; import fr.inria.corese.kgram.core.Mappings; -import fr.inria.corese.w3cTestsGenerator.w3cTests.IW3cTest; -import fr.inria.corese.w3cTestsGenerator.w3cTests.factory.W3cTestFactory; -import fr.inria.corese.w3cTestsGenerator.w3cTests.factory.W3cTestFactory.TestCreationException; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.IW3cTest; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.factory.W3cTestFactory; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.factory.W3cTestFactory.TestCreationException; /** * Generates JUnit test cases from W3C test manifest files. @@ -53,8 +53,8 @@ public void generate() { List testCases = getListOfTestCases(graph); // Generate JUnit test file - JUnitTestFileGenerator generator = new JUnitTestFileGenerator(testName, manifestUri, testCases); - generator.generate(testsPath); + JUnitTestFileGenerator generator = new JUnitTestFileGenerator(testName, manifestUri, testsPath, testCases); + generator.generate(); } //////////////////////// diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/IW3cTest.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/IW3cTest.java similarity index 87% rename from corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/IW3cTest.java rename to corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/IW3cTest.java index 03ec2cce8..4ca5dc8b7 100644 --- a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/IW3cTest.java +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/IW3cTest.java @@ -1,4 +1,4 @@ -package fr.inria.corese.w3cTestsGenerator.w3cTests; +package fr.inria.corese.w3cJunitTestsGenerator.w3cTests; import java.util.Set; diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/factory/W3cTestFactory.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/factory/W3cTestFactory.java similarity index 93% rename from corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/factory/W3cTestFactory.java rename to corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/factory/W3cTestFactory.java index fb2c59326..222af0443 100644 --- a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/factory/W3cTestFactory.java +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/factory/W3cTestFactory.java @@ -1,4 +1,4 @@ -package fr.inria.corese.w3cTestsGenerator.w3cTests.factory; +package fr.inria.corese.w3cJunitTestsGenerator.w3cTests.factory; import java.net.URI; import java.util.Map; @@ -8,10 +8,10 @@ import fr.inria.corese.core.query.QueryProcess; import fr.inria.corese.kgram.core.Mappings; import fr.inria.corese.sparql.exceptions.EngineException; -import fr.inria.corese.w3cTestsGenerator.w3cTests.IW3cTest; -import fr.inria.corese.w3cTestsGenerator.w3cTests.implementations.RDFC10EvalTest; -import fr.inria.corese.w3cTestsGenerator.w3cTests.implementations.RDFC10MapTest; -import fr.inria.corese.w3cTestsGenerator.w3cTests.implementations.RDFC10NegativeEvalTest; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.IW3cTest; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.implementations.RDFC10EvalTest; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.implementations.RDFC10MapTest; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.implementations.RDFC10NegativeEvalTest; /** * Factory for creating W3C tests. diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10EvalTest.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10EvalTest.java similarity index 96% rename from corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10EvalTest.java rename to corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10EvalTest.java index 2fb1b335c..d5f9a3458 100644 --- a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10EvalTest.java +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10EvalTest.java @@ -1,10 +1,10 @@ -package fr.inria.corese.w3cTestsGenerator.w3cTests.implementations; +package fr.inria.corese.w3cJunitTestsGenerator.w3cTests.implementations; import java.net.URI; import java.util.Set; import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; -import fr.inria.corese.w3cTestsGenerator.w3cTests.IW3cTest; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.IW3cTest; /** * Represents a test for the RDFC10EvalTest type. diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10MapTest.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10MapTest.java similarity index 96% rename from corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10MapTest.java rename to corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10MapTest.java index 043ac93a3..2f90d8e03 100644 --- a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10MapTest.java +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10MapTest.java @@ -1,10 +1,10 @@ -package fr.inria.corese.w3cTestsGenerator.w3cTests.implementations; +package fr.inria.corese.w3cJunitTestsGenerator.w3cTests.implementations; import java.net.URI; import java.util.Set; import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; -import fr.inria.corese.w3cTestsGenerator.w3cTests.IW3cTest; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.IW3cTest; /** * Represents a test for the RDFC10MapTest type. diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10NegativeEvalTest.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10NegativeEvalTest.java similarity index 94% rename from corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10NegativeEvalTest.java rename to corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10NegativeEvalTest.java index f24ce9eda..ae6e7840c 100644 --- a/corese-unit-test/src/main/java/fr/inria/corese/w3cTestsGenerator/w3cTests/implementations/RDFC10NegativeEvalTest.java +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10NegativeEvalTest.java @@ -1,9 +1,9 @@ -package fr.inria.corese.w3cTestsGenerator.w3cTests.implementations; +package fr.inria.corese.w3cJunitTestsGenerator.w3cTests.implementations; import java.net.URI; import java.util.Set; -import fr.inria.corese.w3cTestsGenerator.w3cTests.IW3cTest; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.IW3cTest; /** * Represents a test for the RDFC10NegativeEvalTest type. From 57fc9ba7944c96546c0ffa4c8e9c0b1486b21491 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Wed, 6 Mar 2024 19:38:57 +0100 Subject: [PATCH 034/146] Update log4j2.xml configuration --- corese-unit-test/src/main/resources/log4j2.xml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/corese-unit-test/src/main/resources/log4j2.xml b/corese-unit-test/src/main/resources/log4j2.xml index a2bc1d655..b810acdb3 100644 --- a/corese-unit-test/src/main/resources/log4j2.xml +++ b/corese-unit-test/src/main/resources/log4j2.xml @@ -6,7 +6,14 @@ - + + + + + + + + From 725b0c216fe340d13602f01c9f140352f9d1f484 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Wed, 6 Mar 2024 19:40:22 +0100 Subject: [PATCH 035/146] Add test report generation and update test file --- .../w3c/canonicalRdf/canonicalRdfTest.java | 63 ++++++++++++++++++- 1 file changed, 62 insertions(+), 1 deletion(-) diff --git a/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java b/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java index e0ca3db50..23b015b48 100644 --- a/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java +++ b/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java @@ -11,18 +11,79 @@ import java.io.IOException; import java.net.URISyntaxException; import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.Map; import java.util.Scanner; +import org.junit.AssumptionViolatedException; +import org.junit.BeforeClass; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TestWatcher; +import org.junit.runner.Description; import static org.junit.Assert.assertEquals; /** * Auto-generated JUnit test file for the W3C test suite: https://w3c.github.io/rdf-canon/tests/manifest.ttl * This file was automatically generated by JUnitTestFileGenerator.java. - * Generation date: 2024-03-04, Time: 16:58:04 Europe/Paris + * Generation date: 2024-03-06, Time: 17:24:06 Europe/Paris */ public class canonicalRdfTest { + private static final String TEST_REPORT_FILE = "/home/rceres/Documents/projects/Corese/code/corese/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/testReport.csv"; + private static final String MANIFEST_URI = "https://w3c.github.io/rdf-canon/tests/manifest"; + private static final String EARL = "https://www.w3.org/ns/earl#"; + + /** + * Writes the test report to the file testReport.csv. + * + * @param testName The name of the test. + * @param success The status of the test. + */ + private void writeTestReport(String testName, String success) { + try { + Path testReportPath = Paths.get(TEST_REPORT_FILE); + DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssXXX"); + Files.write(testReportPath, (MANIFEST_URI + "#" + testName + "," + dtf.format(ZonedDateTime.now()) + "," + EARL + success + "\n").getBytes(), StandardOpenOption.APPEND); + } catch (IOException e) { + e.printStackTrace(); + } + } + + @Rule + public TestWatcher watcher = new TestWatcher() { + + @Override + protected void failed(Throwable e, Description description) { + writeTestReport(description.getMethodName(), "failed"); + } + + @Override + protected void succeeded(Description description) { + writeTestReport(description.getMethodName(), "passed"); + } + + @Override + protected void skipped(AssumptionViolatedException e, Description description) { + writeTestReport(description.getMethodName(), "untested"); + } + }; + + // Create and clear the test report file + @BeforeClass + public static void createTestReportFile() { + try { + Path testReportPath = Paths.get(TEST_REPORT_FILE); + Files.write(testReportPath, "".getBytes()); + } catch (IOException e) { + e.printStackTrace(); + } + } + // simple id @Test public void test001c() throws IOException, LoadException, URISyntaxException { From a1aa11ed4eedb88055da41f0f25febbec509f30c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Wed, 6 Mar 2024 19:41:19 +0100 Subject: [PATCH 036/146] Update .gitignore file to include flatpak-builder and test report files --- .gitignore | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 229f1f182..d872bdc3e 100644 --- a/.gitignore +++ b/.gitignore @@ -99,4 +99,10 @@ earl-report-test.ttl ############################## ## flatpak ############################## -.flatpak-builder/ \ No newline at end of file +.flatpak-builder/ + +############################## +## test report +############################## +earlReport.ttl +testReport.csv \ No newline at end of file From fb1aded381b9775882627c7cdb4085c64ae152da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Wed, 6 Mar 2024 19:47:25 +0100 Subject: [PATCH 037/146] Update W3C EARL namespace URL --- .../w3cEarlRepportGenerator/EarlRepportGenerator.java | 10 +++++----- .../w3cJunitTestsGenerator/JUnitTestFileGenerator.java | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java index d76b363d1..3d6086eb2 100644 --- a/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java @@ -124,7 +124,7 @@ public void generate() { private NSManager getNSM() { NSManager nsm = NSManager.create(); nsm.setRecord(true); - nsm.definePrefix("earl", "https://www.w3.org/ns/earl#"); + nsm.definePrefix("earl", "http://www.w3.org/ns/earl#"); nsm.definePrefix("dc", "http://purl.org/dc/terms/"); nsm.definePrefix("foaf", "http://xmlns.com/foaf/0.1/"); nsm.definePrefix("xsd", "http://www.w3.org/2001/XMLSchema#"); @@ -159,7 +159,7 @@ private String insertQueryDescribeDocument() { // Build the SPARQL query StringBuilder sb = new StringBuilder(); - sb.append("PREFIX earl: \n"); + sb.append("PREFIX earl: \n"); sb.append("PREFIX dc: \n"); sb.append("PREFIX foaf: \n"); sb.append("PREFIX xsd: \n"); @@ -178,7 +178,7 @@ private String insertQueryDescribeDocument() { */ private String insertQueryDescribeDeveloper() { StringBuilder sb = new StringBuilder(); - sb.append("PREFIX earl: \n"); + sb.append("PREFIX earl: \n"); sb.append("PREFIX foaf: \n"); sb.append("INSERT DATA {\n"); sb.append(" <").append(authorUri).append("> a foaf:Person , earl:Assertor ;\n"); @@ -197,7 +197,7 @@ private String insertQueryDescribeDeveloper() { */ private String insertQueryDescribeSoftware() { StringBuilder sb = new StringBuilder(); - sb.append("PREFIX earl: \n"); + sb.append("PREFIX earl: \n"); sb.append("PREFIX doap: \n"); sb.append("INSERT DATA {\n"); sb.append(" <").append(softwareUri).append("> a doap:Project, earl:Software, earl:TestSubject ;\n"); @@ -244,7 +244,7 @@ private String insertQueryDescribeRelease() { */ private String insertQueryDescribeTestResult(String testUri, String testTime, String testResult) { StringBuilder sb = new StringBuilder(); - sb.append("PREFIX earl: \n"); + sb.append("PREFIX earl: \n"); sb.append("PREFIX dc: \n"); sb.append("PREFIX xsd: \n"); sb.append("INSERT DATA {\n"); diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/JUnitTestFileGenerator.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/JUnitTestFileGenerator.java index c05813618..9c6878a80 100644 --- a/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/JUnitTestFileGenerator.java +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/JUnitTestFileGenerator.java @@ -189,11 +189,11 @@ private String generateWatcher() { + this.exportPath.resolve("testReport.csv") + "\";\n"); watcher.append(" private static final String MANIFEST_URI = \"" + manifestUri.toString().substring(0, manifestUri.toString().lastIndexOf(".")) + "\";\n"); - watcher.append(" private static final String EARL = \"https://www.w3.org/ns/earl#\";\n"); + watcher.append(" private static final String EARL = \"http://www.w3.org/ns/earl#\";\n"); watcher.append("\n"); // Function to write the test report to the file testReport.csv - // Format: manifestUri#testName, datetime, https://www.w3.org/ns/earl#status + // Format: manifestUri#testName, datetime, http://www.w3.org/ns/earl#status watcher.append(" /**\n"); watcher.append(" * Writes the test report to the file testReport.csv.\n"); watcher.append(" *\n"); From 160645274c021c59eaebc113054ec55d0be8995b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Thu, 7 Mar 2024 09:19:09 +0100 Subject: [PATCH 038/146] Update author information and release details --- .../EarlRepportGenerator.java | 15 +++++---------- .../corese/w3c/canonicalRdf/canonicalRdfTest.java | 4 ++-- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java index 3d6086eb2..da11cf1c6 100644 --- a/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java @@ -33,11 +33,8 @@ public class EarlRepportGenerator { // eg "2023-01-25T10:18:04-08:00" private final DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssXXX"); - private final String authorUri = "http://ns.inria.fr/remi.ceres#me"; - private final String athorEmail = "remi.ceres@inria.fr"; - private final String authorName = "Rémi Cérès"; - - private final String organizationHomepage = "https://www.inria.fr/"; + private final String authorUri = "https://team.inria.fr/wimmics"; + private final String authorName = "Wimmics Team"; private final String softwareUri = "https://github.com/Wimmics/corese"; private final String softwareName = "Corese"; @@ -50,8 +47,8 @@ public class EarlRepportGenerator { private final String softwareBlog = "https://github.com/Wimmics/corese/discussions/"; private final String softwareProgrammingLanguage = "Java"; - private final String releaseURI = "1d76a19dccfbdaecf63544e80a7c7a45e54bbc89"; - private final String releaseDate = "2024-03-05"; + private final String releaseURI = "fb1aded381b9775882627c7cdb4085c64ae152da"; + private final String releaseDate = "2024-03-06"; private final Path reportDir = Path.of("corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf"); private final Path inputReportPath = reportDir.resolve("testReport.csv"); @@ -183,8 +180,6 @@ private String insertQueryDescribeDeveloper() { sb.append("INSERT DATA {\n"); sb.append(" <").append(authorUri).append("> a foaf:Person , earl:Assertor ;\n"); sb.append(" foaf:name \"").append(authorName).append("\" ;\n"); - sb.append(" foaf:mbox ;\n"); - sb.append(" foaf:workplaceHomepage <").append(organizationHomepage).append("> .\n"); sb.append("}\n"); return sb.toString(); @@ -202,7 +197,7 @@ private String insertQueryDescribeSoftware() { sb.append("INSERT DATA {\n"); sb.append(" <").append(softwareUri).append("> a doap:Project, earl:Software, earl:TestSubject ;\n"); sb.append(" doap:name \"").append(softwareName).append("\" ;\n"); - sb.append(" doap:release <").append(softwareUri).append("commit/").append(releaseURI).append("> ;\n"); + sb.append(" doap:release <").append(softwareUri).append("/commit/").append(releaseURI).append("> ;\n"); sb.append(" doap:developer <").append(authorUri).append("> ;\n"); sb.append(" doap:homepage <").append(softwareHomepage).append("> ;\n"); sb.append(" doap:description \"").append(softwareDescription).append("\"@en ;\n"); diff --git a/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java b/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java index 23b015b48..6aeab7b97 100644 --- a/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java +++ b/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java @@ -30,13 +30,13 @@ /** * Auto-generated JUnit test file for the W3C test suite: https://w3c.github.io/rdf-canon/tests/manifest.ttl * This file was automatically generated by JUnitTestFileGenerator.java. - * Generation date: 2024-03-06, Time: 17:24:06 Europe/Paris + * Generation date: 2024-03-06, Time: 19:49:53 Europe/Paris */ public class canonicalRdfTest { private static final String TEST_REPORT_FILE = "/home/rceres/Documents/projects/Corese/code/corese/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/testReport.csv"; private static final String MANIFEST_URI = "https://w3c.github.io/rdf-canon/tests/manifest"; - private static final String EARL = "https://www.w3.org/ns/earl#"; + private static final String EARL = "http://www.w3.org/ns/earl#"; /** * Writes the test report to the file testReport.csv. From b9dcbd049ce6d2879b71644b856d34d4b0ce862d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Thu, 7 Mar 2024 17:42:44 +0100 Subject: [PATCH 039/146] Update release URI and date --- .../corese/w3cEarlRepportGenerator/EarlRepportGenerator.java | 4 ++-- .../fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java index da11cf1c6..c8e0eeccd 100644 --- a/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java @@ -47,8 +47,8 @@ public class EarlRepportGenerator { private final String softwareBlog = "https://github.com/Wimmics/corese/discussions/"; private final String softwareProgrammingLanguage = "Java"; - private final String releaseURI = "fb1aded381b9775882627c7cdb4085c64ae152da"; - private final String releaseDate = "2024-03-06"; + private final String releaseURI = "160645274c021c59eaebc113054ec55d0be8995b"; + private final String releaseDate = "2024-03-07"; private final Path reportDir = Path.of("corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf"); private final Path inputReportPath = reportDir.resolve("testReport.csv"); diff --git a/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java b/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java index 6aeab7b97..59b1d548c 100644 --- a/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java +++ b/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java @@ -30,7 +30,7 @@ /** * Auto-generated JUnit test file for the W3C test suite: https://w3c.github.io/rdf-canon/tests/manifest.ttl * This file was automatically generated by JUnitTestFileGenerator.java. - * Generation date: 2024-03-06, Time: 19:49:53 Europe/Paris + * Generation date: 2024-03-07, Time: 09:21:34 Europe/Paris */ public class canonicalRdfTest { From 4d95f61b3fe2c272b26a2fadf636a396bed040c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Thu, 7 Mar 2024 17:45:30 +0100 Subject: [PATCH 040/146] Update citation link in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2bb1b72ef..3d33d207b 100644 --- a/README.md +++ b/README.md @@ -153,7 +153,7 @@ mvn clean install -DskipTests ## How to cite Corese -Use the "Cite this repository" option on the right side of this page. +Use the "Cite this repository" option on the right side of this page or Hal [hal-04170333](https://hal.science/hal-04170333). ## Contributions and discussions From 2df2fbac0cfb1e0db6ab4c58c1aaeaae37c1c655 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 15 Mar 2024 17:36:08 +0100 Subject: [PATCH 041/146] Add support for CanonicalRdf10 format --- .../inria/corese/command/utils/format/EnumOutputFormat.java | 4 +++- .../fr/inria/corese/command/utils/rdf/RdfDataExporter.java | 4 ++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java b/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java index 6a5b29cf8..8e57b4b4b 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java @@ -24,7 +24,9 @@ public enum EnumOutputFormat { NQUADS(7, "nquads", "nq"), NQ(7, "nq", "nq"), - APPLICATION_NQUADS(7, "application/n-quads", "nq"); + APPLICATION_NQUADS(7, "application/n-quads", "nq"), + + CANONICAL10(8, "canonical", "nq"); private final int value; private final String name; diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java b/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java index 5fdc22a16..f12cfff62 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java @@ -6,6 +6,7 @@ import fr.inria.corese.command.utils.format.EnumOutputFormat; import fr.inria.corese.core.Graph; +import fr.inria.corese.core.print.CanonicalRdf10Format; import fr.inria.corese.core.print.JSONLDFormat; import fr.inria.corese.core.print.NQuadsFormat; import fr.inria.corese.core.print.NTriplesFormat; @@ -114,6 +115,9 @@ private static void exportToOutputStream( case APPLICATION_NQUADS: NQuadsFormat.create(graph).write(outputStream); break; + case CANONICAL10: + CanonicalRdf10Format.create(graph).write(outputStream); + break; default: throw new IllegalArgumentException("Unsupported output format: " + outputFormat); } From 2d38d96b5543172a87ca0f2d53d4d505203a28ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Tue, 19 Mar 2024 17:02:47 +0100 Subject: [PATCH 042/146] Add SHACL validation and export functionality --- .../Corese-library with Python.md | 101 +++++++++++++++--- 1 file changed, 88 insertions(+), 13 deletions(-) diff --git a/docs/corese-python/Corese-library with Python.md b/docs/corese-python/Corese-library with Python.md index 5f6e9cf58..396ca669a 100644 --- a/docs/corese-python/Corese-library with Python.md +++ b/docs/corese-python/Corese-library with Python.md @@ -76,9 +76,14 @@ atexit.register(exit_handler) # Import of class Graph = gateway.jvm.fr.inria.corese.core.Graph Load = gateway.jvm.fr.inria.corese.core.load.Load -Transformer = gateway.jvm.fr.inria.corese.core.transform.Transformer QueryProcess = gateway.jvm.fr.inria.corese.core.query.QueryProcess RDF = gateway.jvm.fr.inria.corese.core.logic.RDF +TripleFormat = gateway.jvm.fr.inria.corese.core.print.TripleFormat +RDFFormat = gateway.jvm.fr.inria.corese.core.print.RDFFormat +JSONLDFormat = gateway.jvm.fr.inria.corese.core.print.JSONLDFormat +NTripleFormat = gateway.jvm.fr.inria.corese.core.print.NTripleFormat +NQuadsFormat = gateway.jvm.fr.inria.corese.core.print.NQuadsFormat +Shacl = gateway.jvm.fr.inria.corese.core.shacl.Shacl ############### # Build Graph # @@ -118,22 +123,58 @@ def sparqlQuery(graph, query): exec = QueryProcess.create(graph) return exec.query(query) +######### +# SHACL # +######### + +def shaclValidation(graph, shacl): + """Run a SHACL validation on a graph + + :param graph: the graph on which the SHACL validation is executed + :param shacl: the SHACL graph + :returns: SHACL validation report + """ + shacl = Shacl(graph, shacl) + result = shacl.eval() + return result ################# # Load / Export # ################# -def exportToFile(graph, format, path): +def serialize(graph, format): """Export a graph to a file :param graph: graph to export :param format: format of export - :param path: path of the exported file + :returns: the graph export """ - transformer = Transformer.create(graph, format) - transformer.write(path) - + if format == 'turtle': + content = TripleFormat.create(graph).toString() + elif format == 'rdfxml': + content = RDFFormat.create(graph).toString() + elif format == 'jsonld': + content = JSONLDFormat.create(graph).toString() + elif format == 'n3': + content = NTripleFormat.create(graph).toString() + elif format == 'n4': + content = NQuadsFormat.create(graph).toString() + else: + raise Exception('Format not supported : ' + format) + + return content + +def writeToFile(content, path): + """Write content to a file + + :param content: content to write + :param path: path of the file + :returns: the file write + """ + with open(path, "w") as file: + file.write(content) + return file def load(path): """Load a graph from a local file or a URL @@ -148,7 +189,6 @@ def load(path): return graph - ######## # Main # ######## @@ -172,7 +212,7 @@ graph = BuildGraphCoreseApi() print("Graph build ! (" + str(graph.size()) + " triplets)") print("\nPrint Graph:") -print(graph.display()) +print(serialize(graph, 'n4')) ### @@ -181,7 +221,7 @@ print(graph.display()) printTitle("SPARQL Query") graph = load( - "https://raw.githubusercontent.com/stardog-union/stardog-tutorials/master/music/beatles.ttl") + "https://raw.githubusercontent.com/stardog-union/stardog-tutorials/master/music/beatles.ttl") # Uri or path to the graph print("Graph load ! (" + str(graph.size()) + " triplets)") # List of U2 albums @@ -198,6 +238,23 @@ map = sparqlQuery(graph, query) print("\nQuery result ! (List of members of bands \"The Beatles\"): ") print(map) +### +# SHACL Validation +### +printTitle("SHACL Validation") + +graph = load( + "https://files.inria.fr/corese/data/unit-test/beatles.ttl") +print("Graph load ! (" + str(graph.size()) + " triplets)") + +shacl = load( + "https://files.inria.fr/corese/data/unit-test/beatles-validator.ttl") +print("SHACL load ! (" + str(shacl.size()) + " triplets)") + +result = shaclValidation(graph, shacl) +print("SHACL validation report: ") +print(serialize(result, 'turtle')) + ### # Load / Export @@ -209,14 +266,16 @@ graph = load( print("Graph load ! (" + str(graph.size()) + " triplets)") path_export_file = "export.rdf" -exportToFile(graph, Transformer.RDFXML, path_export_file) +writeToFile(serialize(graph, 'turtle'), path_export_file) print("Graph Export in file (" + path_export_file + ")") + ``` Results : ```plaintext -Gateway Server Started +Loaded default config +CoresePy4j gateway server started on port 25333 @@ -226,8 +285,7 @@ Gateway Server Started Graph build ! (1 triplets) Print Graph: -predicate rdf:type [1] -00 kg:default rdf:type + . @@ -246,6 +304,23 @@ Query result ! (List of members of bands "The Beatles"): +====================== +== SHACL Validation == +====================== +Graph load ! (28 triplets) +SHACL load ! (46 triplets) +SHACL validation report: +@prefix xsh: . +@prefix sh: . +@prefix rdf: . + +_:b8 a sh:ValidationReport ; + sh:conforms true . + + + + + =================== == Load / Export == =================== From fc1825918302fec47852dc1f73ad1175c84fd7d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Thu, 11 Apr 2024 15:42:15 +0200 Subject: [PATCH 043/146] Correction of escape characters in the analyser and serialiser --- .../corese/core/print/NTriplesFormat.java | 63 +- .../sparql/triple/javacc1/JavaCharStream.java | 698 +- .../sparql/triple/javacc1/ParseException.java | 97 +- .../triple/javacc1/SimpleCharStream.java | 472 ++ .../sparql/triple/javacc1/SparqlCorese.java | 6333 ++++++++++------- .../triple/javacc1/SparqlCoreseConstants.java | 259 + .../javacc1/SparqlCoreseTokenManager.java | 1565 ++-- .../corese/sparql/triple/javacc1/Token.java | 85 +- .../sparql/triple/javacc1/TokenMgrError.java | 245 +- .../sparql/triple/javacc1/sparql_corese.jj | 84 +- .../sparql/triple/parser/ParserHandler.java | 195 +- 11 files changed, 6183 insertions(+), 3913 deletions(-) create mode 100644 sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SimpleCharStream.java diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java index 51a2f5e3f..a7d1a317d 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java @@ -2,8 +2,6 @@ import java.io.IOException; import java.io.OutputStream; -import java.net.URI; -import java.net.URISyntaxException; import fr.inria.corese.core.Graph; import fr.inria.corese.kgram.api.core.Edge; @@ -115,13 +113,7 @@ public String printNode(Node node) { * @return a string representation of the URI node */ private String printURI(Node node) { - try { - // Validate URI and percent-encode if necessary - URI uri = new URI(node.getLabel()); - return "<" + uri.toASCIIString() + ">"; - } catch (URISyntaxException e) { - throw new IllegalArgumentException("Invalid URI: " + node.getLabel(), e); - } + return "<" + node.getLabel() + ">"; } /** @@ -163,29 +155,38 @@ protected String printBlank(Node node) { private String escape(String str) { StringBuilder escaped = new StringBuilder(); for (char ch : str.toCharArray()) { - if (ch >= 0x00 && ch <= 0x1F || ch >= 0x7F && ch <= 0x9F) { - escaped.append(String.format("\\u%04x", (int) ch)); - } else { - switch (ch) { - case '\\': - escaped.append("\\\\"); - break; - case '\"': - escaped.append("\\\""); - break; - case '\n': - escaped.append("\\n"); - break; - case '\r': - escaped.append("\\r"); - break; - case '\t': - escaped.append("\\t"); - break; - default: + switch (ch) { + case '\\': // Backslash + escaped.append("\\\\"); + break; + case '\"': // Double quote + escaped.append("\\\""); + break; + case '\n': // Line Feed + escaped.append("\\n"); + break; + case '\r': // Carriage Return + escaped.append("\\r"); + break; + case '\t': // Horizontal Tab + escaped.append("\\t"); + break; + case '\b': // Backspace + escaped.append("\\b"); + break; + case '\f': // Form Feed + escaped.append("\\f"); + break; + default: + // Uses UCHAR for specific characters and those outside the Char production of + // XML 1.1 + if ((ch >= '\u0000' && ch <= '\u0007') || ch == '\u000B' || (ch >= '\u000E' && ch <= '\u001F') + || ch == '\u007F') { + escaped.append(String.format("\\u%04X", (int) ch)); + } else { + // Uses the native representation for all other characters escaped.append(ch); - break; - } + } } } return escaped.toString(); diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/JavaCharStream.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/JavaCharStream.java index 17528999c..85b669fa9 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/JavaCharStream.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/JavaCharStream.java @@ -1,4 +1,5 @@ -/* Generated By:JavaCC: Do not edit this line. JavaCharStream.java Version 4.0 */ +/* Generated By:JavaCC: Do not edit this line. JavaCharStream.java Version 7.0 */ +/* JavaCCOptions:STATIC=false,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ package fr.inria.corese.sparql.triple.javacc1; /** @@ -6,9 +7,12 @@ * contain only ASCII characters (with java-like unicode escape processing). */ -public class JavaCharStream +public +class JavaCharStream { + /** Whether parser is static. */ public static final boolean staticFlag = false; + static final int hexval(char c) throws java.io.IOException { switch(c) { @@ -56,6 +60,7 @@ static final int hexval(char c) throws java.io.IOException { throw new java.io.IOException(); // Should never come here } +/* Position in buffer. */ public int bufpos = -1; int bufsize; int available; @@ -76,311 +81,331 @@ static final int hexval(char c) throws java.io.IOException { protected int maxNextCharInd = 0; protected int nextCharInd = -1; protected int inBuf = 0; - protected int tabSize = 8; + protected int tabSize = 1; + protected boolean trackLineColumn = true; - protected void setTabSize(int i) { tabSize = i; } - protected int getTabSize(int i) { return tabSize; } + public void setTabSize(int i) { tabSize = i; } + public int getTabSize() { return tabSize; } protected void ExpandBuff(boolean wrapAround) { - char[] newbuffer = new char[bufsize + 2048]; - int newbufline[] = new int[bufsize + 2048]; - int newbufcolumn[] = new int[bufsize + 2048]; + char[] newbuffer = new char[bufsize + 2048]; + int newbufline[] = new int[bufsize + 2048]; + int newbufcolumn[] = new int[bufsize + 2048]; - try - { - if (wrapAround) - { - System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); - System.arraycopy(buffer, 0, newbuffer, - bufsize - tokenBegin, bufpos); - buffer = newbuffer; + try + { + if (wrapAround) + { + System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); + System.arraycopy(buffer, 0, newbuffer, bufsize - tokenBegin, bufpos); + buffer = newbuffer; - System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); - System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos); - bufline = newbufline; + System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); + System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos); + bufline = newbufline; - System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); - System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos); - bufcolumn = newbufcolumn; + System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); + System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos); + bufcolumn = newbufcolumn; - bufpos += (bufsize - tokenBegin); - } - else - { - System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); - buffer = newbuffer; + bufpos += (bufsize - tokenBegin); + } + else + { + System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); + buffer = newbuffer; - System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); - bufline = newbufline; + System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); + bufline = newbufline; - System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); - bufcolumn = newbufcolumn; + System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); + bufcolumn = newbufcolumn; - bufpos -= tokenBegin; - } - } - catch (Throwable t) - { - throw new Error(t.getMessage()); - } + bufpos -= tokenBegin; + } + } + catch (Throwable t) + { + throw new Error(t.getMessage()); + } - available = (bufsize += 2048); - tokenBegin = 0; + available = (bufsize += 2048); + tokenBegin = 0; } protected void FillBuff() throws java.io.IOException { - int i; - if (maxNextCharInd == 4096) - maxNextCharInd = nextCharInd = 0; - - try { - if ((i = inputStream.read(nextCharBuf, maxNextCharInd, - 4096 - maxNextCharInd)) == -1) - { - inputStream.close(); - throw new java.io.IOException(); - } - else - maxNextCharInd += i; - return; - } - catch(java.io.IOException e) { - if (bufpos != 0) - { - --bufpos; - backup(0); - } - else - { - bufline[bufpos] = line; - bufcolumn[bufpos] = column; - } - throw e; - } + int i; + if (maxNextCharInd == 4096) + maxNextCharInd = nextCharInd = 0; + + try { + if ((i = inputStream.read(nextCharBuf, maxNextCharInd, + 4096 - maxNextCharInd)) == -1) + { + inputStream.close(); + throw new java.io.IOException(); + } + else + maxNextCharInd += i; + return; + } + catch(java.io.IOException e) { + if (bufpos != 0) + { + --bufpos; + backup(0); + } + else + { + bufline[bufpos] = line; + bufcolumn[bufpos] = column; + } + throw e; + } } protected char ReadByte() throws java.io.IOException { - if (++nextCharInd >= maxNextCharInd) - FillBuff(); + if (++nextCharInd >= maxNextCharInd) + FillBuff(); - return nextCharBuf[nextCharInd]; + return nextCharBuf[nextCharInd]; } +/* @return starting character for token. */ public char BeginToken() throws java.io.IOException - { - if (inBuf > 0) - { - --inBuf; + { + if (inBuf > 0) + { + --inBuf; - if (++bufpos == bufsize) - bufpos = 0; + if (++bufpos == bufsize) + bufpos = 0; - tokenBegin = bufpos; - return buffer[bufpos]; - } + tokenBegin = bufpos; + return buffer[bufpos]; + } - tokenBegin = 0; - bufpos = -1; + tokenBegin = 0; + bufpos = -1; - return readChar(); - } + return readChar(); + } protected void AdjustBuffSize() { - if (available == bufsize) - { - if (tokenBegin > 2048) - { - bufpos = 0; - available = tokenBegin; - } - else - ExpandBuff(false); - } - else if (available > tokenBegin) - available = bufsize; - else if ((tokenBegin - available) < 2048) - ExpandBuff(true); - else + if (available == bufsize) + { + if (tokenBegin > 2048) + { + bufpos = 0; available = tokenBegin; + } + else + ExpandBuff(false); + } + else if (available > tokenBegin) + available = bufsize; + else if ((tokenBegin - available) < 2048) + ExpandBuff(true); + else + available = tokenBegin; } protected void UpdateLineColumn(char c) { - column++; + column++; - if (prevCharIsLF) - { - prevCharIsLF = false; + if (prevCharIsLF) + { + prevCharIsLF = false; + line += (column = 1); + } + else if (prevCharIsCR) + { + prevCharIsCR = false; + if (c == '\n') + { + prevCharIsLF = true; + } + else line += (column = 1); - } - else if (prevCharIsCR) - { - prevCharIsCR = false; - if (c == '\n') - { - prevCharIsLF = true; - } - else - line += (column = 1); - } - - switch (c) - { - case '\r' : - prevCharIsCR = true; - break; - case '\n' : - prevCharIsLF = true; - break; - case '\t' : - column--; - column += (tabSize - (column % tabSize)); - break; - default : - break; - } - - bufline[bufpos] = line; - bufcolumn[bufpos] = column; + } + + switch (c) + { + case '\r' : + prevCharIsCR = true; + break; + case '\n' : + prevCharIsLF = true; + break; + case '\t' : + column--; + column += (tabSize - (column % tabSize)); + break; + default : + break; + } + + bufline[bufpos] = line; + bufcolumn[bufpos] = column; } +/* Read a character. */ public char readChar() throws java.io.IOException { - if (inBuf > 0) - { - --inBuf; + if (inBuf > 0) + { + --inBuf; - if (++bufpos == bufsize) - bufpos = 0; + if (++bufpos == bufsize) + bufpos = 0; - return buffer[bufpos]; - } + return buffer[bufpos]; + } - char c; + char c; - if (++bufpos == available) - AdjustBuffSize(); + if (++bufpos == available) + AdjustBuffSize(); - if ((buffer[bufpos] = c = ReadByte()) == '\\') - { - UpdateLineColumn(c); + if ((buffer[bufpos] = c = ReadByte()) == '\\') + { + if (trackLineColumn) { UpdateLineColumn(c); } - int backSlashCnt = 1; + int backSlashCnt = 1; - for (;;) // Read all the backslashes - { - if (++bufpos == available) - AdjustBuffSize(); - - try - { - if ((buffer[bufpos] = c = ReadByte()) != '\\') - { - UpdateLineColumn(c); - // found a non-backslash char. - if ((c == 'u') && ((backSlashCnt & 1) == 1)) - { - if (--bufpos < 0) - bufpos = bufsize - 1; - - break; - } - - backup(backSlashCnt); - return '\\'; - } - } - catch(java.io.IOException e) - { - if (backSlashCnt > 1) - backup(backSlashCnt); - - return '\\'; - } - - UpdateLineColumn(c); - backSlashCnt++; - } + for (;;) // Read all the backslashes + { + if (++bufpos == available) + AdjustBuffSize(); - // Here, we have seen an odd number of backslash's followed by a 'u' try { - while ((c = ReadByte()) == 'u') - ++column; - - buffer[bufpos] = c = (char)(hexval(c) << 12 | - hexval(ReadByte()) << 8 | - hexval(ReadByte()) << 4 | - hexval(ReadByte())); - - column += 4; + if ((buffer[bufpos] = c = ReadByte()) != '\\') + { + if (trackLineColumn) { UpdateLineColumn(c); } + // found a non-backslash char. + if ((c == 'u') && ((backSlashCnt & 1) == 1)) + { + if (--bufpos < 0) + bufpos = bufsize - 1; + + break; + } + + backup(backSlashCnt); + return '\\'; + } } catch(java.io.IOException e) { - throw new Error("Invalid escape character at line " + line + - " column " + column + "."); - } + // We are returning one backslash so we should only backup (count-1) + if (backSlashCnt > 1) + backup(backSlashCnt-1); - if (backSlashCnt == 1) - return c; - else - { - backup(backSlashCnt - 1); - return '\\'; + return '\\'; } - } - else - { - UpdateLineColumn(c); - return (c); - } + + if (trackLineColumn) { UpdateLineColumn(c); } + backSlashCnt++; + } + + // Here, we have seen an odd number of backslash's followed by a 'u' + try + { + while ((c = ReadByte()) == 'u') + ++column; + + buffer[bufpos] = c = (char)(hexval(c) << 12 | + hexval(ReadByte()) << 8 | + hexval(ReadByte()) << 4 | + hexval(ReadByte())); + + column += 4; + } + catch(java.io.IOException e) + { + throw new Error("Invalid escape character at line " + line + + " column " + column + "."); + } + + if (backSlashCnt == 1) + return c; + else + { + backup(backSlashCnt - 1); + return '\\'; + } + } + else + { + UpdateLineColumn(c); + return c; + } } - /** - * @deprecated + /* + * @deprecated * @see #getEndColumn */ - + @Deprecated public int getColumn() { - return bufcolumn[bufpos]; + return bufcolumn[bufpos]; } - /** - * @deprecated + /* + * @deprecated * @see #getEndLine + * @return the line number. */ - + @Deprecated public int getLine() { - return bufline[bufpos]; + return bufline[bufpos]; } +/** Get end column. + * @return the end column or -1 + */ public int getEndColumn() { - return bufcolumn[bufpos]; + return bufcolumn[bufpos]; } +/** Get end line. + * @return the end line number or -1 + */ public int getEndLine() { - return bufline[bufpos]; + return bufline[bufpos]; } +/** Get the beginning column. + * @return column of token start */ public int getBeginColumn() { - return bufcolumn[tokenBegin]; + return bufcolumn[tokenBegin]; } +/** @return line number of token start */ public int getBeginLine() { - return bufline[tokenBegin]; + return bufline[tokenBegin]; } +/** Retreat. */ public void backup(int amount) { inBuf += amount; if ((bufpos -= amount) < 0) - bufpos += bufsize; + bufpos += bufsize; } +/** Constructor. + * @param dstream the underlying data source. + * @param startline line number of the first character of the stream, mostly for error messages. + * @param startcolumn column number of the first character of the stream. + * @param buffersize size of the buffer + */ public JavaCharStream(java.io.Reader dstream, int startline, int startcolumn, int buffersize) { @@ -395,16 +420,26 @@ public JavaCharStream(java.io.Reader dstream, nextCharBuf = new char[4096]; } +/** Constructor. + * @param dstream the underlying data source. + * @param startline line number of the first character of the stream, mostly for error messages. + * @param startcolumn column number of the first character of the stream. + */ public JavaCharStream(java.io.Reader dstream, int startline, int startcolumn) { - this(dstream, startline, startcolumn, 4096); + this(dstream, startline, startcolumn, 4096); } +/** Constructor. + * @param dstream the underlying data source. + * @param startline line number of the first character of the stream, mostly for error messages. + */ public JavaCharStream(java.io.Reader dstream) { - this(dstream, 1, 1, 4096); + this(dstream, 1, 1, 4096); } +/* Reinitialise. */ public void ReInit(java.io.Reader dstream, int startline, int startcolumn, int buffersize) { @@ -425,160 +460,233 @@ public void ReInit(java.io.Reader dstream, nextCharInd = bufpos = -1; } +/* Reinitialise. */ public void ReInit(java.io.Reader dstream, int startline, int startcolumn) { - ReInit(dstream, startline, startcolumn, 4096); + ReInit(dstream, startline, startcolumn, 4096); } +/* Reinitialise. */ public void ReInit(java.io.Reader dstream) { - ReInit(dstream, 1, 1, 4096); + ReInit(dstream, 1, 1, 4096); } +/** Constructor. */ public JavaCharStream(java.io.InputStream dstream, String encoding, int startline, int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException { - this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); + this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); } +/** Constructor. + * @param dstream the underlying data source. + * @param startline line number of the first character of the stream, mostly for error messages. + * @param startcolumn column number of the first character of the stream. + * @param buffersize size of the buffer + */ public JavaCharStream(java.io.InputStream dstream, int startline, int startcolumn, int buffersize) { - this(new java.io.InputStreamReader(dstream), startline, startcolumn, 4096); + this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); } +/** Constructor. + * @param dstream the underlying data source. + * @param encoding the character encoding of the data stream. + * @param startline line number of the first character of the stream, mostly for error messages. + * @param startcolumn column number of the first character of the stream. + * @throws UnsupportedEncodingException encoding is invalid or unsupported. + */ public JavaCharStream(java.io.InputStream dstream, String encoding, int startline, int startcolumn) throws java.io.UnsupportedEncodingException { - this(dstream, encoding, startline, startcolumn, 4096); + this(dstream, encoding, startline, startcolumn, 4096); } +/** Constructor. + * @param dstream the underlying data source. + * @param startline line number of the first character of the stream, mostly for error messages. + * @param startcolumn column number of the first character of the stream. + */ public JavaCharStream(java.io.InputStream dstream, int startline, int startcolumn) { - this(dstream, startline, startcolumn, 4096); + this(dstream, startline, startcolumn, 4096); } +/** Constructor. + * @param dstream the underlying data source. + * @param encoding the character encoding of the data stream. + * @throws UnsupportedEncodingException encoding is invalid or unsupported. + */ public JavaCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException { - this(dstream, encoding, 1, 1, 4096); + this(dstream, encoding, 1, 1, 4096); } + /** Constructor. + * @param dstream the underlying data source. + */ public JavaCharStream(java.io.InputStream dstream) { - this(dstream, 1, 1, 4096); + this(dstream, 1, 1, 4096); } +/** Reinitialise. + * @param dstream the underlying data source. + * @param encoding the character encoding of the data stream. + * @param startline line number of the first character of the stream, mostly for error messages. + * @param startcolumn column number of the first character of the stream. + * @param buffersize size of the buffer + */ public void ReInit(java.io.InputStream dstream, String encoding, int startline, int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException { - ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); + ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); } +/** Reinitialise. + * @param dstream the underlying data source. + * @param startline line number of the first character of the stream, mostly for error messages. + * @param startcolumn column number of the first character of the stream. + * @param buffersize size of the buffer + */ public void ReInit(java.io.InputStream dstream, int startline, int startcolumn, int buffersize) { - ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); - } + ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); + } +/** Reinitialise. + * @param dstream the underlying data source. + * @param encoding the character encoding of the data stream. + * @param startline line number of the first character of the stream, mostly for error messages. + * @param startcolumn column number of the first character of the stream. + * @throws UnsupportedEncodingException encoding is invalid or unsupported. + */ public void ReInit(java.io.InputStream dstream, String encoding, int startline, int startcolumn) throws java.io.UnsupportedEncodingException { - ReInit(dstream, encoding, startline, startcolumn, 4096); + ReInit(dstream, encoding, startline, startcolumn, 4096); } +/** Reinitialise. + * @param dstream the underlying data source. + * @param startline line number of the first character of the stream, mostly for error messages. + * @param startcolumn column number of the first character of the stream. + */ public void ReInit(java.io.InputStream dstream, int startline, int startcolumn) { - ReInit(dstream, startline, startcolumn, 4096); + ReInit(dstream, startline, startcolumn, 4096); } +/** Reinitialise. + * @param dstream the underlying data source. + * @param encoding the character encoding of the data stream. + * @throws UnsupportedEncodingException encoding is invalid or unsupported. + */ public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException { - ReInit(dstream, encoding, 1, 1, 4096); + ReInit(dstream, encoding, 1, 1, 4096); } +/** Reinitialise. + * @param dstream the underlying data source. + */ public void ReInit(java.io.InputStream dstream) { - ReInit(dstream, 1, 1, 4096); + ReInit(dstream, 1, 1, 4096); } + /** Get the token timage. + * @return token image as String */ public String GetImage() { - if (bufpos >= tokenBegin) - return new String(buffer, tokenBegin, bufpos - tokenBegin + 1); - else - return new String(buffer, tokenBegin, bufsize - tokenBegin) + + if (bufpos >= tokenBegin) + return new String(buffer, tokenBegin, bufpos - tokenBegin + 1); + else + return new String(buffer, tokenBegin, bufsize - tokenBegin) + new String(buffer, 0, bufpos + 1); } + /** Get the suffix as an array of characters. + * @param len the length of the array to return. + * @return suffix */ public char[] GetSuffix(int len) { - char[] ret = new char[len]; + char[] ret = new char[len]; - if ((bufpos + 1) >= len) - System.arraycopy(buffer, bufpos - len + 1, ret, 0, len); - else - { - System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0, - len - bufpos - 1); - System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1); - } + if ((bufpos + 1) >= len) + System.arraycopy(buffer, bufpos - len + 1, ret, 0, len); + else + { + System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0, + len - bufpos - 1); + System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1); + } - return ret; + return ret; } + /** Set buffers back to null when finished. */ public void Done() { - nextCharBuf = null; - buffer = null; - bufline = null; - bufcolumn = null; + nextCharBuf = null; + buffer = null; + bufline = null; + bufcolumn = null; } /** * Method to adjust line and column numbers for the start of a token. + * + * @param newLine the new line number. + * @param newCol the new column number. */ public void adjustBeginLineColumn(int newLine, int newCol) { - int start = tokenBegin; - int len; - - if (bufpos >= tokenBegin) - { - len = bufpos - tokenBegin + inBuf + 1; - } - else - { - len = bufsize - tokenBegin + bufpos + 1 + inBuf; - } - - int i = 0, j = 0, k = 0; - int nextColDiff = 0, columnDiff = 0; - - while (i < len && - bufline[j = start % bufsize] == bufline[k = ++start % bufsize]) - { - bufline[j] = newLine; - nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j]; - bufcolumn[j] = newCol + columnDiff; - columnDiff = nextColDiff; - i++; - } - - if (i < len) - { - bufline[j] = newLine++; - bufcolumn[j] = newCol + columnDiff; - - while (i++ < len) - { - if (bufline[j = start % bufsize] != bufline[++start % bufsize]) - bufline[j] = newLine++; - else - bufline[j] = newLine; - } - } + int start = tokenBegin; + int len; + + if (bufpos >= tokenBegin) + { + len = bufpos - tokenBegin + inBuf + 1; + } + else + { + len = bufsize - tokenBegin + bufpos + 1 + inBuf; + } + + int i = 0, j = 0, k = 0; + int nextColDiff = 0, columnDiff = 0; + + while (i < len && bufline[j = start % bufsize] == bufline[k = ++start % bufsize]) + { + bufline[j] = newLine; + nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j]; + bufcolumn[j] = newCol + columnDiff; + columnDiff = nextColDiff; + i++; + } + + if (i < len) + { + bufline[j] = newLine++; + bufcolumn[j] = newCol + columnDiff; + + while (i++ < len) + { + if (bufline[j = start % bufsize] != bufline[++start % bufsize]) + bufline[j] = newLine++; + else + bufline[j] = newLine; + } + } - line = bufline[j]; - column = bufcolumn[j]; + line = bufline[j]; + column = bufcolumn[j]; } + boolean getTrackLineColumn() { return trackLineColumn; } + void setTrackLineColumn(boolean tlc) { trackLineColumn = tlc; } } +/* JavaCC - OriginalChecksum=ce63727a39341a9f623ead341b601e5a (do not edit this line) */ diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/ParseException.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/ParseException.java index aeb2678cd..452be3e6b 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/ParseException.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/ParseException.java @@ -1,4 +1,5 @@ -/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 3.0 */ +/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 7.0 */ +/* JavaCCOptions:KEEP_LINE_COLUMN=true */ package fr.inria.corese.sparql.triple.javacc1; /** @@ -12,25 +13,30 @@ */ public class ParseException extends Exception { + /** + * The version identifier for this Serializable class. + * Increment only if the serialized form of the + * class changes. + */ + private static final long serialVersionUID = 1L; + + /** + * The end of line string for this machine. + */ + protected static String EOL = System.getProperty("line.separator", "\n"); + /** * This constructor is used by the method "generateParseException" * in the generated parser. Calling this constructor generates * a new object of this type with the fields "currentToken", - * "expectedTokenSequences", and "tokenImage" set. The boolean - * flag "specialConstructor" is also set to true to indicate that - * this constructor was used to create this object. - * This constructor calls its super class with the empty string - * to force the "toString" method of parent class "Throwable" to - * print the error message in the form: - * ParseException: + * "expectedTokenSequences", and "tokenImage" set. */ public ParseException(Token currentTokenVal, int[][] expectedTokenSequencesVal, String[] tokenImageVal ) { - super(""); - specialConstructor = true; + super(initialise(currentTokenVal, expectedTokenSequencesVal, tokenImageVal)); currentToken = currentTokenVal; expectedTokenSequences = expectedTokenSequencesVal; tokenImage = tokenImageVal; @@ -48,25 +54,18 @@ public ParseException(Token currentTokenVal, public ParseException() { super(); - specialConstructor = false; } + /** Constructor with message. */ public ParseException(String message) { super(message); - specialConstructor = false; } - /** - * This variable determines which constructor was used to create - * this object and thereby affects the semantics of the - * "getMessage" method (see below). - */ - protected boolean specialConstructor; /** * This is the last token that has been consumed successfully. If * this object has been created due to a parse error, the token - * followng this token will (therefore) be the first error token. + * following this token will (therefore) be the first error token. */ public Token currentToken; @@ -85,32 +84,29 @@ public ParseException(String message) { public String[] tokenImage; /** - * This method has the standard behavior when this object has been - * created using the standard constructors. Otherwise, it uses - * "currentToken" and "expectedTokenSequences" to generate a parse + * It uses "currentToken" and "expectedTokenSequences" to generate a parse * error message and returns it. If this object has been created * due to a parse error, and you do not catch it (it gets thrown - * from the parser), then this method is called during the printing - * of the final stack trace, and hence the correct error message + * from the parser) the correct error message * gets displayed. */ - public String getMessage() { - if (!specialConstructor) { - return super.getMessage(); - } - StringBuffer expected = new StringBuffer(); + private static String initialise(Token currentToken, + int[][] expectedTokenSequences, + String[] tokenImage) { + + StringBuilder expected = new StringBuilder(); int maxSize = 0; for (int i = 0; i < expectedTokenSequences.length; i++) { if (maxSize < expectedTokenSequences[i].length) { maxSize = expectedTokenSequences[i].length; } for (int j = 0; j < expectedTokenSequences[i].length; j++) { - expected.append(tokenImage[expectedTokenSequences[i][j]]).append(" "); + expected.append(tokenImage[expectedTokenSequences[i][j]]).append(' '); } if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) { expected.append("..."); } - expected.append(eol).append(" "); + expected.append(EOL).append(" "); } String retval = "Encountered \""; Token tok = currentToken.next; @@ -120,38 +116,44 @@ public String getMessage() { retval += tokenImage[0]; break; } + retval += " " + tokenImage[tok.kind]; + retval += " \""; retval += add_escapes(tok.image); - tok = tok.next; + retval += " \""; + tok = tok.next; } - retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn; - retval += "." + eol; - if (expectedTokenSequences.length == 1) { - retval += "Was expecting:" + eol + " "; + if (currentToken.next != null) { + retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn; + } + retval += "." + EOL; + + + if (expectedTokenSequences.length == 0) { + // Nothing to add here } else { - retval += "Was expecting one of:" + eol + " "; + if (expectedTokenSequences.length == 1) { + retval += "Was expecting:" + EOL + " "; + } else { + retval += "Was expecting one of:" + EOL + " "; + } + retval += expected.toString(); } - retval += expected.toString(); + return retval; } - /** - * The end of line string for this machine. - */ - protected String eol = System.getProperty("line.separator", "\n"); - + /** * Used to convert raw characters to their escaped version * when these raw version cannot be used as part of an ASCII * string literal. */ - protected String add_escapes(String str) { - StringBuffer retval = new StringBuffer(); + static String add_escapes(String str) { + StringBuilder retval = new StringBuilder(); char ch; for (int i = 0; i < str.length(); i++) { switch (str.charAt(i)) { - case 0 : - continue; case '\b': retval.append("\\b"); continue; @@ -190,3 +192,4 @@ protected String add_escapes(String str) { } } +/* JavaCC - OriginalChecksum=9f407677f116a2ad2713965b8f6dd66c (do not edit this line) */ diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SimpleCharStream.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SimpleCharStream.java new file mode 100644 index 000000000..e5634c265 --- /dev/null +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SimpleCharStream.java @@ -0,0 +1,472 @@ +/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 7.0 */ +/* JavaCCOptions:STATIC=false,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ +package fr.inria.corese.sparql.triple.javacc1; + +/** + * An implementation of interface CharStream, where the stream is assumed to + * contain only ASCII characters (without unicode processing). + */ + +public class SimpleCharStream +{ +/** Whether parser is static. */ + public static final boolean staticFlag = false; + int bufsize; + int available; + int tokenBegin; +/** Position in buffer. */ + public int bufpos = -1; + protected int bufline[]; + protected int bufcolumn[]; + + protected int column = 0; + protected int line = 1; + + protected boolean prevCharIsCR = false; + protected boolean prevCharIsLF = false; + + protected java.io.Reader inputStream; + + protected char[] buffer; + protected int maxNextCharInd = 0; + protected int inBuf = 0; + protected int tabSize = 1; + protected boolean trackLineColumn = true; + + public void setTabSize(int i) { tabSize = i; } + public int getTabSize() { return tabSize; } + + + + protected void ExpandBuff(boolean wrapAround) + { + char[] newbuffer = new char[bufsize + 2048]; + int newbufline[] = new int[bufsize + 2048]; + int newbufcolumn[] = new int[bufsize + 2048]; + + try + { + if (wrapAround) + { + System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); + System.arraycopy(buffer, 0, newbuffer, bufsize - tokenBegin, bufpos); + buffer = newbuffer; + + System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); + System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos); + bufline = newbufline; + + System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); + System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos); + bufcolumn = newbufcolumn; + + maxNextCharInd = (bufpos += (bufsize - tokenBegin)); + } + else + { + System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); + buffer = newbuffer; + + System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); + bufline = newbufline; + + System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); + bufcolumn = newbufcolumn; + + maxNextCharInd = (bufpos -= tokenBegin); + } + } + catch (Throwable t) + { + throw new Error(t.getMessage()); + } + + + bufsize += 2048; + available = bufsize; + tokenBegin = 0; + } + + protected void FillBuff() throws java.io.IOException + { + if (maxNextCharInd == available) + { + if (available == bufsize) + { + if (tokenBegin > 2048) + { + bufpos = maxNextCharInd = 0; + available = tokenBegin; + } + else if (tokenBegin < 0) + bufpos = maxNextCharInd = 0; + else + ExpandBuff(false); + } + else if (available > tokenBegin) + available = bufsize; + else if ((tokenBegin - available) < 2048) + ExpandBuff(true); + else + available = tokenBegin; + } + + int i; + try { + if ((i = inputStream.read(buffer, maxNextCharInd, available - maxNextCharInd)) == -1) + { + inputStream.close(); + throw new java.io.IOException(); + } + else + maxNextCharInd += i; + return; + } + catch(java.io.IOException e) { + --bufpos; + backup(0); + if (tokenBegin == -1) + tokenBegin = bufpos; + throw e; + } + } + +/** Start. */ + public char BeginToken() throws java.io.IOException + { + tokenBegin = -1; + char c = readChar(); + tokenBegin = bufpos; + + return c; + } + + protected void UpdateLineColumn(char c) + { + column++; + + if (prevCharIsLF) + { + prevCharIsLF = false; + line += (column = 1); + } + else if (prevCharIsCR) + { + prevCharIsCR = false; + if (c == '\n') + { + prevCharIsLF = true; + } + else + line += (column = 1); + } + + switch (c) + { + case '\r' : + prevCharIsCR = true; + break; + case '\n' : + prevCharIsLF = true; + break; + case '\t' : + column--; + column += (tabSize - (column % tabSize)); + break; + default : + break; + } + + bufline[bufpos] = line; + bufcolumn[bufpos] = column; + } + +/** Read a character. */ + public char readChar() throws java.io.IOException + { + if (inBuf > 0) + { + --inBuf; + + if (++bufpos == bufsize) + bufpos = 0; + + return buffer[bufpos]; + } + + if (++bufpos >= maxNextCharInd) + FillBuff(); + + char c = buffer[bufpos]; + + UpdateLineColumn(c); + return c; + } + + /** + * @deprecated + * @see #getEndColumn + */ + @Deprecated + public int getColumn() { + return bufcolumn[bufpos]; + } + + /** + * @deprecated + * @see #getEndLine + */ + @Deprecated + public int getLine() { + return bufline[bufpos]; + } + + /** Get token end column number. */ + public int getEndColumn() { + return bufcolumn[bufpos]; + } + + /** Get token end line number. */ + public int getEndLine() { + return bufline[bufpos]; + } + + /** Get token beginning column number. */ + public int getBeginColumn() { + return bufcolumn[tokenBegin]; + } + + /** Get token beginning line number. */ + public int getBeginLine() { + return bufline[tokenBegin]; + } + +/** Backup a number of characters. */ + public void backup(int amount) { + + inBuf += amount; + if ((bufpos -= amount) < 0) + bufpos += bufsize; + } + + /** Constructor. */ + public SimpleCharStream(java.io.Reader dstream, int startline, + int startcolumn, int buffersize) + { + inputStream = dstream; + line = startline; + column = startcolumn - 1; + + available = bufsize = buffersize; + buffer = new char[buffersize]; + bufline = new int[buffersize]; + bufcolumn = new int[buffersize]; + } + + /** Constructor. */ + public SimpleCharStream(java.io.Reader dstream, int startline, + int startcolumn) + { + this(dstream, startline, startcolumn, 4096); + } + + /** Constructor. */ + public SimpleCharStream(java.io.Reader dstream) + { + this(dstream, 1, 1, 4096); + } + + /** Reinitialise. */ + public void ReInit(java.io.Reader dstream, int startline, + int startcolumn, int buffersize) + { + inputStream = dstream; + line = startline; + column = startcolumn - 1; + + if (buffer == null || buffersize != buffer.length) + { + available = bufsize = buffersize; + buffer = new char[buffersize]; + bufline = new int[buffersize]; + bufcolumn = new int[buffersize]; + } + prevCharIsLF = prevCharIsCR = false; + tokenBegin = inBuf = maxNextCharInd = 0; + bufpos = -1; + } + + /** Reinitialise. */ + public void ReInit(java.io.Reader dstream, int startline, + int startcolumn) + { + ReInit(dstream, startline, startcolumn, 4096); + } + + /** Reinitialise. */ + public void ReInit(java.io.Reader dstream) + { + ReInit(dstream, 1, 1, 4096); + } + /** Constructor. */ + public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, + int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException + { + this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); + } + + /** Constructor. */ + public SimpleCharStream(java.io.InputStream dstream, int startline, + int startcolumn, int buffersize) + { + this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); + } + + /** Constructor. */ + public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, + int startcolumn) throws java.io.UnsupportedEncodingException + { + this(dstream, encoding, startline, startcolumn, 4096); + } + + /** Constructor. */ + public SimpleCharStream(java.io.InputStream dstream, int startline, + int startcolumn) + { + this(dstream, startline, startcolumn, 4096); + } + + /** Constructor. */ + public SimpleCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException + { + this(dstream, encoding, 1, 1, 4096); + } + + /** Constructor. */ + public SimpleCharStream(java.io.InputStream dstream) + { + this(dstream, 1, 1, 4096); + } + + /** Reinitialise. */ + public void ReInit(java.io.InputStream dstream, String encoding, int startline, + int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException + { + ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); + } + + /** Reinitialise. */ + public void ReInit(java.io.InputStream dstream, int startline, + int startcolumn, int buffersize) + { + ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); + } + + /** Reinitialise. */ + public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException + { + ReInit(dstream, encoding, 1, 1, 4096); + } + + /** Reinitialise. */ + public void ReInit(java.io.InputStream dstream) + { + ReInit(dstream, 1, 1, 4096); + } + /** Reinitialise. */ + public void ReInit(java.io.InputStream dstream, String encoding, int startline, + int startcolumn) throws java.io.UnsupportedEncodingException + { + ReInit(dstream, encoding, startline, startcolumn, 4096); + } + /** Reinitialise. */ + public void ReInit(java.io.InputStream dstream, int startline, + int startcolumn) + { + ReInit(dstream, startline, startcolumn, 4096); + } + /** Get token literal value. */ + public String GetImage() + { + if (bufpos >= tokenBegin) + return new String(buffer, tokenBegin, bufpos - tokenBegin + 1); + else + return new String(buffer, tokenBegin, bufsize - tokenBegin) + + new String(buffer, 0, bufpos + 1); + } + + /** Get the suffix. */ + public char[] GetSuffix(int len) + { + char[] ret = new char[len]; + + if ((bufpos + 1) >= len) + System.arraycopy(buffer, bufpos - len + 1, ret, 0, len); + else + { + System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0, + len - bufpos - 1); + System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1); + } + + return ret; + } + + /** Reset buffer when finished. */ + public void Done() + { + buffer = null; + bufline = null; + bufcolumn = null; + } + + /** + * Method to adjust line and column numbers for the start of a token. + */ + public void adjustBeginLineColumn(int newLine, int newCol) + { + int start = tokenBegin; + int len; + + if (bufpos >= tokenBegin) + { + len = bufpos - tokenBegin + inBuf + 1; + } + else + { + len = bufsize - tokenBegin + bufpos + 1 + inBuf; + } + + int i = 0, j = 0, k = 0; + int nextColDiff = 0, columnDiff = 0; + + while (i < len && bufline[j = start % bufsize] == bufline[k = ++start % bufsize]) + { + bufline[j] = newLine; + nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j]; + bufcolumn[j] = newCol + columnDiff; + columnDiff = nextColDiff; + i++; + } + + if (i < len) + { + bufline[j] = newLine++; + bufcolumn[j] = newCol + columnDiff; + + while (i++ < len) + { + if (bufline[j = start % bufsize] != bufline[++start % bufsize]) + bufline[j] = newLine++; + else + bufline[j] = newLine; + } + } + + line = bufline[j]; + column = bufcolumn[j]; + } + boolean getTrackLineColumn() { return trackLineColumn; } + void setTrackLineColumn(boolean tlc) { trackLineColumn = tlc; } +} +/* JavaCC - OriginalChecksum=45efa3b08078b51870be02d67d466976 (do not edit this line) */ diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCorese.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCorese.java index 7ee2485d0..fe98fcd3f 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCorese.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCorese.java @@ -1,3 +1,4 @@ +/* SparqlCorese.java */ /* Generated By:JavaCC: Do not edit this line. SparqlCorese.java */ package fr.inria.corese.sparql.triple.javacc1; @@ -84,6 +85,47 @@ public void deprecated(String s1, String s2) { } } + // Method for converting Unicode escape sequences \\uxxxx and \\Uxxxxxxxx + String convertUnicodeSequences(String str) { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < str.length(); i++) { + char ch = str.charAt(i); + // Checks whether the current character is a backslash and whether there is another character before it + if (ch == '\\' && i + 1 < str.length()) { + // Checks whether it is a literal escape sequence (preceded by a backslash) + if (i > 0 && str.charAt(i - 1) == '\\') { + sb.append(ch); + continue; + } + + char nextCh = str.charAt(i + 1); + if (nextCh == 'u' && i + 5 < str.length()) { // Manage \\uxxxx + int code = Integer.parseInt(str.substring(i + 2, i + 6), 16); + sb.append(Character.toChars(code)); + i += 5; // Skip escape sequence + + } else if (nextCh == 'U' && i + 9 < str.length()) { // Manage \\Uxxxxxxxx + int code = Integer.parseInt(str.substring(i + 2, i + 10), 16); + for (char c : Character.toChars(code)) { + sb.append(c); + } + i += 9; // Skip escape sequence + } else { + sb.append(ch); // If this is not an escape sequence, add the character as is + } + } else { + // Handles the case where a literal backslash is immediately followed by another character (not 'u' or 'U') + if (ch == '\\' && i > 0 && str.charAt(i - 1) == '\\') { + // Do nothing here to avoid adding the literal backslash a second time + } else { + sb.append(ch); + } + } + } + return sb.toString(); + } + + public ParseException createStopException() { return new ParseException("stop"); } @@ -165,30 +207,27 @@ Metadata metadata(Metadata global, Metadata local) { } } - final public Exp Start() throws ParseException { - Exp stack; - stack = new And(); + final public Exp Start() throws ParseException {Exp stack; +stack = new And(); stack = Query(); jj_consume_token(0); - {if (true) return stack;} +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} - final public Exp Rule() throws ParseException { - Exp stack; - stack = new And(); + final public Exp Rule() throws ParseException {Exp stack; +stack = new And(); Prolog(); stack = GroupGraphPattern(); jj_consume_token(0); - {if (true) return stack;} +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} - final public Exp Query() throws ParseException { - Exp stack; Metadata global=null, la = null; - stack = BasicGraphPattern.create(); + final public Exp Query() throws ParseException {Exp stack; Metadata global=null, la = null; +stack = BasicGraphPattern.create(); Prolog(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ATPATH: case LANGTAG: case SELECT: @@ -209,9 +248,9 @@ final public Exp Query() throws ParseException { case WITH: case FUNCTION: case PACKAGE: - case LBRACE: + case LBRACE:{ la = Annotate(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case SELECT: case DESCRIBE: case CONSTRUCT: @@ -227,22 +266,26 @@ final public Exp Query() throws ParseException { case COPY: case INSERT: case DELETE: - case WITH: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SELECT: - astq.setAnnotation(la); + case WITH:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SELECT:{ +astq.setAnnotation(la); stack = SelectQuery(la); break; + } case CONSTRUCT: - case RULE: + case RULE:{ stack = ConstructQuery(la); break; - case DESCRIBE: + } + case DESCRIBE:{ stack = DescribeQuery(la); break; - case ASK: + } + case ASK:{ stack = AskQuery(la); break; + } case LOAD: case CLEAR: case DROP: @@ -252,12 +295,14 @@ final public Exp Query() throws ParseException { case COPY: case INSERT: case DELETE: - case WITH: + case WITH:{ UpdateQuery(la); break; - case TEMPLATE: + } + case TEMPLATE:{ stack = TemplateQuery(la); break; + } default: jj_la1[0] = jj_gen; jj_consume_token(-1); @@ -265,17 +310,20 @@ final public Exp Query() throws ParseException { } Defun(); break; + } case FUNCTION: case PACKAGE: - case LBRACE: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case FUNCTION: + case LBRACE:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case FUNCTION:{ Function(global, la); break; + } case PACKAGE: - case LBRACE: + case LBRACE:{ Package(la); break; + } default: jj_la1[1] = jj_gen; jj_consume_token(-1); @@ -283,77 +331,82 @@ final public Exp Query() throws ParseException { } Defun(); break; + } default: jj_la1[2] = jj_gen; jj_consume_token(-1); throw new ParseException(); } break; + } default: jj_la1[3] = jj_gen; ; } - {if (true) return stack;} +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} - final public void Defun() throws ParseException { - Metadata global = null, la = null; + final public void Defun() throws ParseException {Metadata global = null, la = null; label_1: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ATPATH: case LANGTAG: case FUNCTION: case PACKAGE: - case LBRACE: + case LBRACE:{ ; break; + } default: jj_la1[4] = jj_gen; break label_1; } la = Annotate(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case FUNCTION: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case FUNCTION:{ Function(global, la); break; + } case PACKAGE: - case LBRACE: + case LBRACE:{ Package(la); break; + } default: jj_la1[5] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } - } +} - final public Exp nquadPattern() throws ParseException { - Exp stack; Atom subject, predicate, object, graph=null; - stack = BasicGraphPattern.create(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Exp nquadPattern() throws ParseException {Exp stack; Atom subject, predicate, object, graph=null; +stack = BasicGraphPattern.create(); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ABASE: - case BASE: + case BASE:{ BaseDecl(); break; + } default: jj_la1[6] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case APREFIX: - case PREFIX: + case PREFIX:{ PrefixDecl(); break; + } default: jj_la1[7] = jj_gen; ; } label_2: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -369,18 +422,19 @@ final public Exp nquadPattern() throws ParseException { case STRING_LITERAL_LONG2: case LPAREN: case ANON: - case LTLT: + case LTLT:{ ; break; + } default: jj_la1[8] = jj_gen; break label_2; } - graph = null; +graph = null; subject = nquadTerm(stack); predicate = GraphTerm(stack); object = nquadTerm(stack); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -395,24 +449,24 @@ final public Exp nquadPattern() throws ParseException { case STRING_LITERAL_LONG1: case STRING_LITERAL_LONG2: case LPAREN: - case ANON: + case ANON:{ graph = GraphTerm(stack); break; + } default: jj_la1[9] = jj_gen; ; } jj_consume_token(DOT); - handler.createNquad(subject, predicate, object, graph); +handler.createNquad(subject, predicate, object, graph); } jj_consume_token(0); - {if (true) return stack;} +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} - final public Atom nquadTerm(Exp stack) throws ParseException { - Atom at; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Atom nquadTerm(Exp stack) throws ParseException {Atom at; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -427,27 +481,28 @@ final public Atom nquadTerm(Exp stack) throws ParseException { case STRING_LITERAL_LONG1: case STRING_LITERAL_LONG2: case LPAREN: - case ANON: + case ANON:{ at = GraphTerm(stack); break; - case LTLT: + } + case LTLT:{ at = StarTriple(stack); break; + } default: jj_la1[10] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return at;} +{if ("" != null) return at;} throw new Error("Missing return statement in function"); - } +} - final public Exp LoadPattern() throws ParseException { - Exp stack; - stack = BasicGraphPattern.create(); + final public Exp LoadPattern() throws ParseException {Exp stack; +stack = BasicGraphPattern.create(); label_3: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -477,36 +532,40 @@ final public Exp LoadPattern() throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[11] = jj_gen; break label_3; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ABASE: - case BASE: + case BASE:{ BaseDecl(); break; + } case APREFIX: - case PREFIX: + case PREFIX:{ PrefixDecl(); break; + } default: jj_la1[14] = jj_gen; if (jj_2_1(2)) { TriplesSameSubject(stack); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DOT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DOT:{ jj_consume_token(DOT); break; + } default: jj_la1[12] = jj_gen; ; } } else { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -515,17 +574,19 @@ final public Exp LoadPattern() throws ParseException { case VAR2: case GRAPH: case ANON: - case VAR3: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case GRAPH: + case VAR3:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case GRAPH:{ jj_consume_token(GRAPH); break; + } default: jj_la1[13] = jj_gen; ; } BasicGraphGraphPattern(stack); break; + } default: jj_la1[15] = jj_gen; jj_consume_token(-1); @@ -535,85 +596,90 @@ final public Exp LoadPattern() throws ParseException { } } jj_consume_token(0); - {if (true) return stack;} +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} - final public Exp LoadPattern2() throws ParseException { - Exp stack; + final public Exp LoadPattern2() throws ParseException {Exp stack; Prolog(); stack = BodyQuadPattern(); jj_consume_token(0); - {if (true) return stack;} +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} final public void Prolog() throws ParseException { label_4: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ABASE: case APREFIX: case BASE: - case PREFIX: + case PREFIX:{ ; break; + } default: jj_la1[16] = jj_gen; break label_4; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ABASE: - case BASE: + case BASE:{ BaseDecl(); break; + } case APREFIX: - case PREFIX: + case PREFIX:{ PrefixDecl(); break; + } default: jj_la1[17] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } - } +} - final public void BaseDecl() throws ParseException { - Token t; String s; Triple triple; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case BASE: + final public void BaseDecl() throws ParseException {Token t; String s; Triple triple; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case BASE:{ jj_consume_token(BASE); break; - case ABASE: + } + case ABASE:{ jj_consume_token(ABASE); break; + } default: jj_la1[18] = jj_gen; jj_consume_token(-1); throw new ParseException(); } s = Q_IRI_ref(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DOT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DOT:{ jj_consume_token(DOT); break; + } default: jj_la1[19] = jj_gen; ; } - defBase(s); - } +defBase(s); +} - final public void PrefixDecl() throws ParseException { - Token t1, t2; String s; Triple triple; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case PREFIX: + final public void PrefixDecl() throws ParseException {Token t1, t2; String s; Triple triple; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case PREFIX:{ jj_consume_token(PREFIX); break; - case APREFIX: + } + case APREFIX:{ jj_consume_token(APREFIX); break; + } default: jj_la1[20] = jj_gen; jj_consume_token(-1); @@ -621,42 +687,45 @@ final public void PrefixDecl() throws ParseException { } t2 = jj_consume_token(QNAME_NS); s = Q_IRI_ref(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DOT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DOT:{ jj_consume_token(DOT); break; + } default: jj_la1[21] = jj_gen; ; } - defNamespace(t2.image, s); - } +defNamespace(t2.image, s); +} // ******************************************************************* // Update - final public void UpdateQuery(Metadata la) throws ParseException { - astq.setResultForm(ASTQuery.QT_UPDATE); + final public +void UpdateQuery(Metadata la) throws ParseException { +astq.setResultForm(ASTQuery.QT_UPDATE); astq.set(astu); astu.defProlog(); UpdateQuery1(); - astq.setAnnotation(la) ; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case PRAGMA: +astq.setAnnotation(la) ; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case PRAGMA:{ Pragma(); break; + } default: jj_la1[22] = jj_gen; ; } - } +} final public void UpdateQuery1() throws ParseException { UpdateQuery2(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SEMICOLON: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SEMICOLON:{ jj_consume_token(SEMICOLON); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ABASE: case APREFIX: case BASE: @@ -670,218 +739,244 @@ final public void UpdateQuery1() throws ParseException { case COPY: case INSERT: case DELETE: - case WITH: + case WITH:{ Prolog(); - astu.defProlog(); +astu.defProlog(); UpdateQuery1(); break; + } default: jj_la1[23] = jj_gen; ; } break; + } default: jj_la1[24] = jj_gen; ; } - } +} - final public void UpdateQuery2() throws ParseException { - Exp stack; Basic ope; + final public void UpdateQuery2() throws ParseException {Exp stack; Basic ope; Constant graph, uri, target; boolean silent, defaut, named, all; - astq.reset(); - stack = BasicGraphPattern.create(); +astq.reset(); +stack = BasicGraphPattern.create(); graph = null; uri = null; target = null; silent = false; defaut = false; all = false; ope = null; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LOAD: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LOAD:{ jj_consume_token(LOAD); - ope = Basic.create(Update.LOAD); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SILENT: +ope = Basic.create(Update.LOAD); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SILENT:{ jj_consume_token(SILENT); - ope.setSilent(true); +ope.setSilent(true); break; + } default: jj_la1[25] = jj_gen; ; } uri = IRIrefConst(); - ope.setURI(uri); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case INTO: +ope.setURI(uri); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case INTO:{ jj_consume_token(INTO); jj_consume_token(GRAPH); graph = IRIrefConst(); - ope.setTarget(graph); +ope.setTarget(graph); break; + } default: jj_la1[26] = jj_gen; ; } break; + } case CLEAR: - case DROP: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case CLEAR: + case DROP:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case CLEAR:{ jj_consume_token(CLEAR); - ope = Basic.create(Update.CLEAR); +ope = Basic.create(Update.CLEAR); break; - case DROP: + } + case DROP:{ jj_consume_token(DROP); - ope = Basic.create(Update.DROP); +ope = Basic.create(Update.DROP); break; + } default: jj_la1[27] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SILENT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SILENT:{ jj_consume_token(SILENT); - ope.setSilent(true); +ope.setSilent(true); break; + } default: jj_la1[28] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case GRAPH: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case GRAPH:{ jj_consume_token(GRAPH); graph = IRIrefConst(); - ope.setGraph(graph); +ope.setGraph(graph); break; - case DEFAUT: + } + case DEFAUT:{ jj_consume_token(DEFAUT); - ope.setDefault(true); +ope.setDefault(true); break; - case NAMED: + } + case NAMED:{ jj_consume_token(NAMED); - ope.setNamed(true); +ope.setNamed(true); break; - case ALL: + } + case ALL:{ jj_consume_token(ALL); - ope.setAll(true); +ope.setAll(true); break; + } default: jj_la1[29] = jj_gen; jj_consume_token(-1); throw new ParseException(); } break; - case CREATE: + } + case CREATE:{ jj_consume_token(CREATE); - ope = Basic.create(Update.CREATE); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SILENT: +ope = Basic.create(Update.CREATE); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SILENT:{ jj_consume_token(SILENT); - ope.setSilent(true); +ope.setSilent(true); break; + } default: jj_la1[30] = jj_gen; ; } jj_consume_token(GRAPH); graph = IRIrefConst(); - ope.setGraph(graph); +ope.setGraph(graph); break; + } case ADD: case MOVE: - case COPY: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case ADD: + case COPY:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case ADD:{ jj_consume_token(ADD); - ope = Basic.create(Update.ADD); +ope = Basic.create(Update.ADD); break; - case MOVE: + } + case MOVE:{ jj_consume_token(MOVE); - ope = Basic.create(Update.MOVE); +ope = Basic.create(Update.MOVE); break; - case COPY: + } + case COPY:{ jj_consume_token(COPY); - ope = Basic.create(Update.COPY); +ope = Basic.create(Update.COPY); break; + } default: jj_la1[31] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SILENT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SILENT:{ jj_consume_token(SILENT); - ope.setSilent(true); +ope.setSilent(true); break; + } default: jj_la1[32] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: - case GRAPH: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case GRAPH: + case GRAPH:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case GRAPH:{ jj_consume_token(GRAPH); break; + } default: jj_la1[33] = jj_gen; ; } graph = IRIrefConst(); - ope.setGraph(graph); +ope.setGraph(graph); break; - case DEFAUT: + } + case DEFAUT:{ jj_consume_token(DEFAUT); - ope.setDefault(true); +ope.setDefault(true); break; + } default: jj_la1[34] = jj_gen; jj_consume_token(-1); throw new ParseException(); } jj_consume_token(TO); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: - case GRAPH: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case GRAPH: + case GRAPH:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case GRAPH:{ jj_consume_token(GRAPH); break; + } default: jj_la1[35] = jj_gen; ; } graph = IRIrefConst(); - ope.setTarget(graph); +ope.setTarget(graph); break; - case DEFAUT: + } + case DEFAUT:{ jj_consume_token(DEFAUT); - ope.setDefault(true); +ope.setDefault(true); break; + } default: jj_la1[36] = jj_gen; jj_consume_token(-1); throw new ParseException(); } break; + } default: jj_la1[37] = jj_gen; if (jj_2_2(2)) { DeleteInsertData(); } else { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case INSERT: case DELETE: - case WITH: + case WITH:{ DeleteInsertWhere(); break; + } default: jj_la1[38] = jj_gen; jj_consume_token(-1); @@ -889,91 +984,98 @@ final public void UpdateQuery2() throws ParseException { } } } - if (ope!=null) astu.add(ope); - } +if (ope!=null) astu.add(ope); +} - final public void DeleteInsertData() throws ParseException { - Exp exp; Composite ope, upd; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case INSERT: + final public void DeleteInsertData() throws ParseException {Exp exp; Composite ope, upd; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case INSERT:{ jj_consume_token(INSERT); jj_consume_token(DATA); - astq.setInsertData(true); +astq.setInsertData(true); exp = QuadPattern(); - astq.setInsertData(false); +astq.setInsertData(false); ope = Composite.create(Update.INSERT, exp); break; - case DELETE: + } + case DELETE:{ jj_consume_token(DELETE); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DATA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DATA:{ jj_consume_token(DATA); - handler.enterDeleteData(); +handler.enterDeleteData(); exp = QuadPattern(); - handler.leaveDeleteData(); - ope = Composite.create(Update.DELETE, exp); +handler.leaveDeleteData(); +ope = Composite.create(Update.DELETE, exp); break; - case WHERE: + } + case WHERE:{ jj_consume_token(WHERE); - handler.enterWhere(); +handler.enterWhere(); exp = QuadPattern(); - handler.leaveWhere(); - upd = Composite.create(Update.COMPOSITE); upd.setBody(exp); +handler.leaveWhere(); +upd = Composite.create(Update.COMPOSITE); upd.setBody(exp); ope = Composite.create(Update.DELETE); ope.setPattern(exp); upd.add(ope); ope = upd; break; + } default: jj_la1[39] = jj_gen; jj_consume_token(-1); throw new ParseException(); } break; + } default: jj_la1[40] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - astu.add(ope); - } +astu.add(ope); +} // create an AST as a whole - final public void DeleteInsertWhere() throws ParseException { - Exp exp; Composite upd, ope; Constant uri; Values values; - upd = Composite.create(Update.COMPOSITE); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case WITH: + final public +void DeleteInsertWhere() throws ParseException {Exp exp; Composite upd, ope; Constant uri; Values values; +upd = Composite.create(Update.COMPOSITE); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case WITH:{ jj_consume_token(WITH); uri = IRIrefConst(); - upd.setWith(uri); +upd.setWith(uri); break; + } default: jj_la1[41] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DELETE: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DELETE:{ jj_consume_token(DELETE); - handler.enterDelete(); +handler.enterDelete(); exp = QuadPattern(); - handler.leaveDelete(); - ope = Composite.create(Update.DELETE); ope.setPattern(exp); upd.add(ope); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case INSERT: +handler.leaveDelete(); +ope = Composite.create(Update.DELETE); ope.setPattern(exp); upd.add(ope); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case INSERT:{ jj_consume_token(INSERT); exp = QuadPattern(); - ope = Composite.create(Update.INSERT); ope.setPattern(exp); upd.add(ope); +ope = Composite.create(Update.INSERT); ope.setPattern(exp); upd.add(ope); break; + } default: jj_la1[42] = jj_gen; ; } break; - case INSERT: + } + case INSERT:{ jj_consume_token(INSERT); exp = QuadPattern(); - ope = Composite.create(Update.INSERT); ope.setPattern(exp); upd.add(ope); +ope = Composite.create(Update.INSERT); ope.setPattern(exp); upd.add(ope); break; + } default: jj_la1[43] = jj_gen; jj_consume_token(-1); @@ -981,27 +1083,30 @@ final public void DeleteInsertWhere() throws ParseException { } label_5: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case USING: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case USING:{ ; break; + } default: jj_la1[44] = jj_gen; break label_5; } jj_consume_token(USING); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ uri = IRIrefConst(); - upd.addUsing(uri); +upd.addUsing(uri); break; - case NAMED: + } + case NAMED:{ jj_consume_token(NAMED); uri = IRIrefConst(); - upd.addNamed(uri); +upd.addNamed(uri); break; + } default: jj_la1[45] = jj_gen; jj_consume_token(-1); @@ -1009,34 +1114,33 @@ final public void DeleteInsertWhere() throws ParseException { } } jj_consume_token(WHERE); - handler.enterWhere(); +handler.enterWhere(); exp = GroupGraphPattern(); - handler.leaveWhere(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case VALUES: +handler.leaveWhere(); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case VALUES:{ values = Values(); - upd.setValues(values); +upd.setValues(values); break; + } default: jj_la1[46] = jj_gen; ; } - upd.setBody(exp); astu.add(upd); - } +upd.setBody(exp); astu.add(upd); +} - final public Exp QuadPattern() throws ParseException { - Exp stack; + final public Exp QuadPattern() throws ParseException {Exp stack; jj_consume_token(LBRACE); stack = BodyQuadPattern(); jj_consume_token(RBRACE); - {if (true) return stack;} +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} - final public Exp BodyQuadPattern() throws ParseException { - Exp stack; - stack = BasicGraphPattern.create(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Exp BodyQuadPattern() throws ParseException {Exp stack; +stack = BasicGraphPattern.create(); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -1061,25 +1165,27 @@ final public Exp BodyQuadPattern() throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ BlockOfTriples(stack); break; + } default: jj_la1[47] = jj_gen; ; } label_6: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case GRAPH: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case GRAPH:{ ; break; + } default: jj_la1[48] = jj_gen; break label_6; } GraphGraphPattern(stack); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -1104,29 +1210,33 @@ final public Exp BodyQuadPattern() throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ BlockOfTriples(stack); break; + } default: jj_la1[49] = jj_gen; ; } } - {if (true) return stack;} +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} // *********************** End Update ******************************************************** - final public Exp TemplateQuery(Metadata la) throws ParseException { - Exp stack; - stack = BasicGraphPattern.create(); + final public + + +Exp TemplateQuery(Metadata la) throws ParseException {Exp stack; +stack = BasicGraphPattern.create(); Template(stack); label_7: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case FROM: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case FROM:{ ; break; + } default: jj_la1[50] = jj_gen; break label_7; @@ -1135,60 +1245,63 @@ final public Exp TemplateQuery(Metadata la) throws ParseException { } stack = WhereClause(); SolutionModifier(); - astq.setTemplate(true); +astq.setTemplate(true); astq.setAnnotation(la); - {if (true) return stack;} + {if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} - final public void Template(Exp stack) throws ParseException { - String str; Variable var; Constant cst; Expression exp, sep; ExpressionList list; + final public void Template(Exp stack) throws ParseException {String str; Variable var; Constant cst; Expression exp, sep; ExpressionList list; List varList; jj_consume_token(TEMPLATE); - cst = null; astt = astq.defineTemplate(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { +cst = null; astt = astq.defineTemplate(); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ cst = IRIrefConst(); - astq.setName(cst); +astq.setName(cst); break; + } default: jj_la1[51] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LPAREN: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LPAREN:{ jj_consume_token(LPAREN); var = Var(); - astq.defArg(var); +astq.defArg(var); label_8: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ ; break; + } default: jj_la1[52] = jj_gen; break label_8; } jj_consume_token(COMMA); var = Var(); - astq.defArg(var); +astq.defArg(var); } jj_consume_token(RPAREN); break; + } default: jj_la1[53] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LBRACKET: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LBRACKET:{ jj_consume_token(LBRACKET); cst = IRIrefConst(); - astq.defProfile(cst); +astq.defProfile(cst); jj_consume_token(RBRACKET); break; + } default: jj_la1[54] = jj_gen; ; @@ -1196,7 +1309,7 @@ final public void Template(Exp stack) throws ParseException { jj_consume_token(LBRACE); label_9: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -1323,37 +1436,39 @@ final public void Template(Exp stack) throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[55] = jj_gen; break label_9; } exp = texp(stack); - astt.addTemplate(exp); +astt.addTemplate(exp); } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SEMICOLON: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SEMICOLON:{ jj_consume_token(SEMICOLON); jj_consume_token(SEPARATOR); jj_consume_token(EQ); sep = PrimaryExpression(stack); - astt.setSeparator(sep); +astt.setSeparator(sep); break; + } default: jj_la1[56] = jj_gen; ; } jj_consume_token(RBRACE); - } +} - final public Expression texp(Exp stack) throws ParseException { - Expression exp; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case AT: + final public Expression texp(Exp stack) throws ParseException {Expression exp; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case AT:{ exp = XML(stack); break; + } case Q_IRIref: case QNAME_NS: case QNAME: @@ -1474,57 +1589,61 @@ final public Expression texp(Exp stack) throws ParseException { case LPAREN: case ANON: case LTLT: - case VAR3: + case VAR3:{ exp = PrimaryExpression(stack); break; + } case GROUP: case FORMAT: case BOX: case IBOX: - case SBOX: + case SBOX:{ exp = TemplateExpression(stack); break; + } default: jj_la1[57] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return exp;} +{if ("" != null) return exp;} throw new Error("Missing return statement in function"); - } +} - final public Expression TemplateExpression(Exp stack) throws ParseException { - Expression exp; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Expression TemplateExpression(Exp stack) throws ParseException {Expression exp; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case BOX: case IBOX: - case SBOX: + case SBOX:{ exp = box(stack); break; - case GROUP: + } + case GROUP:{ exp = group(stack); break; - case FORMAT: + } + case FORMAT:{ exp = format(stack); break; + } default: jj_la1[58] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return exp;} +{if ("" != null) return exp;} throw new Error("Missing return statement in function"); - } +} - final public Expression group(Exp stack) throws ParseException { - Expression box, exp, sep; ExpressionList list; + final public Expression group(Exp stack) throws ParseException {Expression box, exp, sep; ExpressionList list; jj_consume_token(GROUP); - list = new ExpressionList(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DISTINCT: +list = new ExpressionList(); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DISTINCT:{ jj_consume_token(DISTINCT); - list.setDistinct(true); +list.setDistinct(true); break; + } default: jj_la1[59] = jj_gen; ; @@ -1532,7 +1651,7 @@ final public Expression group(Exp stack) throws ParseException { jj_consume_token(LBRACE); label_10: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -1659,42 +1778,43 @@ final public Expression group(Exp stack) throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[60] = jj_gen; break label_10; } exp = texp(stack); - list.add(exp); +list.add(exp); } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SEMICOLON: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SEMICOLON:{ jj_consume_token(SEMICOLON); jj_consume_token(SEPARATOR); jj_consume_token(EQ); sep = PrimaryExpression(stack); - list.setExpSeparator(sep); +list.setExpSeparator(sep); break; + } default: jj_la1[61] = jj_gen; ; } jj_consume_token(RBRACE); - exp = astt.createGroup(list); - {if (true) return exp;} +exp = astt.createGroup(list); + {if ("" != null) return exp;} throw new Error("Missing return statement in function"); - } +} - final public Expression format(Exp stack) throws ParseException { - Expression box, exp, sep; ExpressionList list; Token t; + final public Expression format(Exp stack) throws ParseException {Expression box, exp, sep; ExpressionList list; Token t; jj_consume_token(FORMAT); - list = new ExpressionList(); +list = new ExpressionList(); jj_consume_token(LBRACE); label_11: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -1821,44 +1941,47 @@ final public Expression format(Exp stack) throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[62] = jj_gen; break label_11; } exp = texp(stack); - list.add(exp); +list.add(exp); } jj_consume_token(RBRACE); - box = astt.createFormat(list); - {if (true) return box;} +box = astt.createFormat(list); + {if ("" != null) return box;} throw new Error("Missing return statement in function"); - } +} - final public Expression box(Exp stack) throws ParseException { - Expression box, exp, sep; ExpressionList list; Token t; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case BOX: + final public Expression box(Exp stack) throws ParseException {Expression box, exp, sep; ExpressionList list; Token t; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case BOX:{ t = jj_consume_token(BOX); break; - case IBOX: + } + case IBOX:{ t = jj_consume_token(IBOX); break; - case SBOX: + } + case SBOX:{ t = jj_consume_token(SBOX); break; + } default: jj_la1[63] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - list = new ExpressionList(); +list = new ExpressionList(); jj_consume_token(LBRACE); label_12: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -1985,61 +2108,64 @@ final public Expression box(Exp stack) throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[64] = jj_gen; break label_12; } exp = texp(stack); - list.add(exp); +list.add(exp); } jj_consume_token(RBRACE); - box = astt.createBox(list, t.image ); - {if (true) return box;} +box = astt.createBox(list, t.image ); + {if ("" != null) return box;} throw new Error("Missing return statement in function"); - } +} - final public Expression XML(Exp stack) throws ParseException { - Expression box, exp, val, sep; ExpressionList list, attr; Token t; Constant uri = null, str; + final public Expression XML(Exp stack) throws ParseException {Expression box, exp, val, sep; ExpressionList list, attr; Token t; Constant uri = null, str; ArrayList lattr = null; jj_consume_token(AT); uri = IRIrefConst(); - list = new ExpressionList(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LPAREN: +list = new ExpressionList(); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LPAREN:{ jj_consume_token(LPAREN); - lattr = new ArrayList(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LPAREN: +lattr = new ArrayList(); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LPAREN:{ label_13: while (true) { jj_consume_token(LPAREN); str = RDFLiteral(); val = PrimaryExpression(stack); - attr = new ExpressionList(); attr.add(str); attr.add(val); +attr = new ExpressionList(); attr.add(str); attr.add(val); lattr.add(attr); jj_consume_token(RPAREN); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LPAREN: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LPAREN:{ ; break; + } default: jj_la1[65] = jj_gen; break label_13; } } break; + } case STRING_LITERAL1: case STRING_LITERAL2: case STRING_LITERAL_LONG1: - case STRING_LITERAL_LONG2: + case STRING_LITERAL_LONG2:{ str = RDFLiteral(); val = PrimaryExpression(stack); - attr = new ExpressionList(); attr.add(str); attr.add(val); +attr = new ExpressionList(); attr.add(str); attr.add(val); lattr.add(attr); break; + } default: jj_la1[66] = jj_gen; jj_consume_token(-1); @@ -2047,6 +2173,7 @@ final public Expression XML(Exp stack) throws ParseException { } jj_consume_token(RPAREN); break; + } default: jj_la1[67] = jj_gen; ; @@ -2054,7 +2181,7 @@ final public Expression XML(Exp stack) throws ParseException { jj_consume_token(LBRACE); label_14: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -2181,37 +2308,39 @@ final public Expression XML(Exp stack) throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[68] = jj_gen; break label_14; } exp = texp(stack); - list.add(exp); +list.add(exp); } jj_consume_token(RBRACE); - box = astt.createXML(uri, lattr, list); - {if (true) return box;} +box = astt.createXML(uri, lattr, list); + {if ("" != null) return box;} throw new Error("Missing return statement in function"); - } +} // ********************** END PP Template ****************************** // ---- Query type clauses - final public Exp SelectQuery(Metadata la) throws ParseException { - Exp stack; + final public +Exp SelectQuery(Metadata la) throws ParseException {Exp stack; jj_consume_token(SELECT); OneMoreListMerge(); GroupCountSortDisplayVar(); label_15: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case FROM: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case FROM:{ ; break; + } default: jj_la1[69] = jj_gen; break label_15; @@ -2220,53 +2349,56 @@ final public Exp SelectQuery(Metadata la) throws ParseException { } stack = WhereClause(); SolutionModifier(); - astq.setResultForm(ASTQuery.QT_SELECT); - {if (true) return stack;} +astq.setResultForm(ASTQuery.QT_SELECT); + {if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} - final public Exp ConstructQuery(Metadata la) throws ParseException { - Exp stack1, stack2; boolean isRule = false; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case RULE: + final public Exp ConstructQuery(Metadata la) throws ParseException {Exp stack1, stack2; boolean isRule = false; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case RULE:{ jj_consume_token(RULE); - isRule = true; +isRule = true; break; + } default: jj_la1[70] = jj_gen; ; } jj_consume_token(CONSTRUCT); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LBRACE: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LBRACE:{ // with a construct pattern stack1 = ConstructTemplate(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SELECT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SELECT:{ jj_consume_token(SELECT); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: case LPAREN: case STAR: - case VAR3: + case VAR3:{ SelectVar(); break; + } default: jj_la1[71] = jj_gen; ; } break; + } default: jj_la1[72] = jj_gen; ; } label_16: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case FROM: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case FROM:{ ; break; + } default: jj_la1[73] = jj_gen; break label_16; @@ -2275,19 +2407,21 @@ final public Exp ConstructQuery(Metadata la) throws ParseException { } stack2 = WhereClause(); SolutionModifier(); - astq.setConstruct(stack1); +astq.setConstruct(stack1); astq.setAnnotation(la); astq.setRule(astq.isRule() || isRule); - {if (true) return stack2;} + {if ("" != null) return stack2;} break; + } case FROM: - case WHERE: + case WHERE:{ label_17: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case FROM: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case FROM:{ ; break; + } default: jj_la1[74] = jj_gen; break label_17; @@ -2295,48 +2429,50 @@ final public Exp ConstructQuery(Metadata la) throws ParseException { DatasetClause(); } jj_consume_token(WHERE); - handler.enterWhere(); +handler.enterWhere(); stack2 = ConstructTemplate2(); - handler.leaveWhere(); +handler.leaveWhere(); SolutionModifier(); - astq.duplicateConstruct(stack2); +astq.duplicateConstruct(stack2); astq.setAnnotation(la); - {if (true) return stack2;} + {if ("" != null) return stack2;} break; + } default: jj_la1[75] = jj_gen; jj_consume_token(-1); throw new ParseException(); } throw new Error("Missing return statement in function"); - } +} - final public Exp DescribeQuery(Metadata la) throws ParseException { - Exp stack; Variable v; String s; Atom at; - stack = new And(); + final public Exp DescribeQuery(Metadata la) throws ParseException {Exp stack; Variable v; String s; Atom at; +stack = new And(); jj_consume_token(DESCRIBE); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: case VAR1: case VAR2: - case VAR3: + case VAR3:{ label_18: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: - case VAR3: + case VAR3:{ at = Var(); - astq.setDescribe(at); +astq.setDescribe(at); break; + } case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ at = IRIrefConst(); - astq.setDescribe(at); +astq.setDescribe(at); break; + } default: jj_la1[76] = jj_gen; jj_consume_token(-1); @@ -2349,10 +2485,12 @@ final public Exp DescribeQuery(Metadata la) throws ParseException { } } break; - case STAR: + } + case STAR:{ jj_consume_token(STAR); - astq.setDescribeAll(true); +astq.setDescribeAll(true); break; + } default: jj_la1[77] = jj_gen; jj_consume_token(-1); @@ -2373,21 +2511,21 @@ final public Exp DescribeQuery(Metadata la) throws ParseException { ; } SolutionModifier(); - astq.setResultForm(ASTQuery.QT_DESCRIBE); +astq.setResultForm(ASTQuery.QT_DESCRIBE); astq.setAnnotation(la); - {if (true) return stack;} + {if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} - final public Exp AskQuery(Metadata la) throws ParseException { - Exp stack; Values values; + final public Exp AskQuery(Metadata la) throws ParseException {Exp stack; Values values; jj_consume_token(ASK); label_20: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case FROM: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case FROM:{ ; break; + } default: jj_la1[78] = jj_gen; break label_20; @@ -2395,144 +2533,155 @@ final public Exp AskQuery(Metadata la) throws ParseException { DatasetClause(); } stack = WhereClause(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case VALUES: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case VALUES:{ values = Values(); - astq.setValues(values); +astq.setValues(values); break; + } default: jj_la1[79] = jj_gen; ; } - astq.setResultForm(ASTQuery.QT_ASK); +astq.setResultForm(ASTQuery.QT_ASK); astq.setAnnotation(la); - {if (true) return stack;} + {if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} - final public Exp DeleteQuery() throws ParseException { - Exp stack; + final public Exp DeleteQuery() throws ParseException {Exp stack; jj_consume_token(DELETE); jj_consume_token(STAR); stack = WhereClause(); - astq.setDelete(true); +astq.setDelete(true); astq.setResultForm(ASTQuery.QT_DELETE); - {if (true) return stack;} + {if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} // ---- // one more list merge in any order and without list and merge being together - final public void OneMoreListMerge() throws ParseException { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case S_MORE: + final public +void OneMoreListMerge() throws ParseException { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case S_MORE:{ More(); break; + } default: jj_la1[80] = jj_gen; ; } - } +} final public void More() throws ParseException { jj_consume_token(S_MORE); - astq.setMore(true); - } +astq.setMore(true); +} final public void GroupCountSortDisplayVar() throws ParseException { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case DISTINCT: - case REDUCED: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DISTINCT: + case REDUCED:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DISTINCT:{ jj_consume_token(DISTINCT); - astq.setDistinct(true); +astq.setDistinct(true); break; - case REDUCED: + } + case REDUCED:{ jj_consume_token(REDUCED); - astq.setReduced(true); +astq.setReduced(true); break; + } default: jj_la1[81] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SORTED: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SORTED:{ jj_consume_token(SORTED); - astq.setStrictDistinct(false); +astq.setStrictDistinct(false); break; + } default: jj_la1[82] = jj_gen; ; } break; + } default: jj_la1[83] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: case LPAREN: case STAR: - case VAR3: + case VAR3:{ SelectVar(); break; + } default: jj_la1[84] = jj_gen; ; } - } +} // ***************** BEGIN SPARQL1.1 - final public void SelectVar() throws ParseException { - Variable e1; Expression e2; Exp stack; - stack = BasicGraphPattern.create(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public +void SelectVar() throws ParseException {Variable e1; Expression e2; Exp stack; +stack = BasicGraphPattern.create(); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: case LPAREN: - case VAR3: + case VAR3:{ label_21: while (true) { if (jj_2_6(2)) { e1 = Var(); - astq.defSelect(e1, null); +astq.defSelect(e1, null); } else { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LPAREN: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LPAREN:{ SelectExp(stack); break; + } default: jj_la1[85] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: case LPAREN: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[86] = jj_gen; break label_21; } } break; - case STAR: + } + case STAR:{ jj_consume_token(STAR); - astq.setSelectAll(true); +astq.setSelectAll(true); label_22: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LPAREN: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LPAREN:{ ; break; + } default: jj_la1[87] = jj_gen; break label_22; @@ -2540,182 +2689,193 @@ final public void SelectVar() throws ParseException { SelectExp(stack); } break; + } default: jj_la1[88] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - } +} final public void SelectExp(Exp stack) throws ParseException { jj_consume_token(LPAREN); FunVar(stack); jj_consume_token(RPAREN); - } +} - final public void FunVar(Exp stack) throws ParseException { - Expression e2; Variable e1; Variable var; + final public void FunVar(Exp stack) throws ParseException {Expression e2; Variable e1; Variable var; e2 = Expression(stack); jj_consume_token(AS); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: - case VAR3: + case VAR3:{ var = Var(); - astq.defSelect(var, e2); +astq.defSelect(var, e2); break; - case LPAREN: - var = astq.metaVariable(); + } + case LPAREN:{ +var = astq.metaVariable(); jj_consume_token(LPAREN); e1 = Var(); - var.addVariable(e1); +var.addVariable(e1); label_23: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ ; break; + } default: jj_la1[89] = jj_gen; break label_23; } jj_consume_token(COMMA); e1 = Var(); - var.addVariable(e1); +var.addVariable(e1); } jj_consume_token(RPAREN); - astq.defSelect(var, e2); +astq.defSelect(var, e2); break; + } default: jj_la1[90] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - } +} // ********************* END SPARQL 1.1 - final public void DatasetClause() throws ParseException { - Constant s; + final public + + +void DatasetClause() throws ParseException {Constant s; jj_consume_token(FROM); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ s = IRIrefConst(); - astq.setFrom(s); +astq.setFrom(s); break; - case NAMED: + } + case NAMED:{ jj_consume_token(NAMED); s = IRIrefConst(); - astq.setNamed(s); +astq.setNamed(s); break; + } default: jj_la1[91] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - } +} - final public Exp WhereClause() throws ParseException { - Exp stack; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case WHERE: + final public Exp WhereClause() throws ParseException {Exp stack; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case WHERE:{ jj_consume_token(WHERE); - astq.setWhere(true); +astq.setWhere(true); break; + } default: jj_la1[92] = jj_gen; ; } - handler.enterWhere(); +handler.enterWhere(); stack = GroupGraphPattern(); - handler.leaveWhere(); - {if (true) return stack;} +handler.leaveWhere(); +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} - final public void SolutionModifier() throws ParseException { - Values values; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case RELAX: + final public void SolutionModifier() throws ParseException {Values values; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case RELAX:{ Relax(); break; + } default: jj_la1[93] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ORDER: case GROUP: - case HAVING: + case HAVING:{ OrderGroup(); break; + } default: jj_la1[94] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case LIMIT: - case OFFSET: + case OFFSET:{ LimitOffset(); break; + } default: jj_la1[95] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case VALUES: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case VALUES:{ values = Values(); - astq.setValues(values); +astq.setValues(values); break; + } default: jj_la1[96] = jj_gen; ; } label_24: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case PRAGMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case PRAGMA:{ ; break; + } default: jj_la1[97] = jj_gen; break label_24; } Pragma(); } - } +} - final public Values Values() throws ParseException { - Variable var; Constant val; ArrayList list = null; ArrayList value; Values values = null; + final public Values Values() throws ParseException {Variable var; Constant val; ArrayList list = null; ArrayList value; Values values = null; Expression exp; Exp stack; - list = new ArrayList(); +list = new ArrayList(); values = Values.create(); stack = BasicGraphPattern.create(); jj_consume_token(VALUES); - handler.setInsideValues(true); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LPAREN: +handler.setInsideValues(true); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LPAREN:{ jj_consume_token(LPAREN); label_25: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[98] = jj_gen; break label_25; } var = Var(); - list.add(var); +list.add(var); } - values.setVariables(list); +values.setVariables(list); jj_consume_token(RPAREN); jj_consume_token(LBRACE); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case NOT: case THROW: case STOP: @@ -2806,28 +2966,30 @@ final public Values Values() throws ParseException { case OBJECT: case IS_TRIPLE: case SPARQL_COMPARE: - case XPATH: + case XPATH:{ // unnest(exp) exp = BuiltInCall(stack); - values.addExp(exp); +values.addExp(exp); break; + } default: jj_la1[101] = jj_gen; label_26: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LPAREN: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LPAREN:{ ; break; + } default: jj_la1[99] = jj_gen; break label_26; } jj_consume_token(LPAREN); - value = new ArrayList(); +value = new ArrayList(); label_27: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -2841,29 +3003,31 @@ final public Values Values() throws ParseException { case STRING_LITERAL2: case STRING_LITERAL_LONG1: case STRING_LITERAL_LONG2: - case LTLT: + case LTLT:{ ; break; + } default: jj_la1[100] = jj_gen; break label_27; } val = Value(); - value.add(val); +value.add(val); } - values.addValues(value); +values.addValues(value); jj_consume_token(RPAREN); } } jj_consume_token(RBRACE); break; + } case VAR1: case VAR2: - case VAR3: + case VAR3:{ var = Var(); - list.add(var); values.setVariables(list); +list.add(var); values.setVariables(list); jj_consume_token(LBRACE); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case NOT: case THROW: case STOP: @@ -2954,15 +3118,16 @@ final public Values Values() throws ParseException { case OBJECT: case IS_TRIPLE: case SPARQL_COMPARE: - case XPATH: + case XPATH:{ exp = BuiltInCall(stack); - values.addExp(exp); +values.addExp(exp); break; + } default: jj_la1[103] = jj_gen; label_28: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -2976,213 +3141,234 @@ final public Values Values() throws ParseException { case STRING_LITERAL2: case STRING_LITERAL_LONG1: case STRING_LITERAL_LONG2: - case LTLT: + case LTLT:{ ; break; + } default: jj_la1[102] = jj_gen; break label_28; } - value = new ArrayList(); +value = new ArrayList(); val = Value(); - value.add(val); - values.addValues(value); +value.add(val); +values.addValues(value); } } jj_consume_token(RBRACE); break; + } default: jj_la1[104] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - handler.setInsideValues(false); {if (true) return astq.complete(values);} +handler.setInsideValues(false); {if ("" != null) return astq.complete(values);} throw new Error("Missing return statement in function"); - } +} - final public Constant Value() throws ParseException { - Constant cst = null; String s; Token t1; Exp exp; Atom triple; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case Q_IRIref: + final public Constant Value() throws ParseException {Constant cst = null; String s; Token t1; Exp exp; Atom triple; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case Q_IRIref:{ s = Q_IRI_ref(); - cst = astq.createURI(s); +cst = astq.createURI(s); break; - case QNAME: + } + case QNAME:{ t1 = jj_consume_token(QNAME); - cst = astq.createQName(t1.image); +cst = astq.createQName(t1.image); break; - case QNAME_NS: + } + case QNAME_NS:{ t1 = jj_consume_token(QNAME_NS); - cst = astq.createQName(t1.image); +cst = astq.createQName(t1.image); break; - case UNDEF: + } + case UNDEF:{ t1 = jj_consume_token(UNDEF); - cst = null; +cst = null; break; + } case STRING_LITERAL1: case STRING_LITERAL2: case STRING_LITERAL_LONG1: - case STRING_LITERAL_LONG2: + case STRING_LITERAL_LONG2:{ cst = RDFLiteral(); break; + } case INTEGER: case DECIMAL: - case DOUBLE: + case DOUBLE:{ cst = NumericLiteral(); break; + } case TRUE: - case FALSE: + case FALSE:{ cst = BooleanLiteral(); break; - case LTLT: - exp = BasicGraphPattern.create(); + } + case LTLT:{ +exp = BasicGraphPattern.create(); triple = StarTriple(exp); - cst = triple.getConstant(); +cst = triple.getConstant(); break; + } default: jj_la1[105] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return cst;} +{if ("" != null) return cst;} throw new Error("Missing return statement in function"); - } +} - final public void Pragma() throws ParseException { - Exp exp; String uri; - uri = null; + final public void Pragma() throws ParseException {Exp exp; String uri; +uri = null; jj_consume_token(PRAGMA); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ uri = IRIref(); break; + } default: jj_la1[106] = jj_gen; ; } exp = GroupGraphPattern(); - astq.setPragma(uri, exp); - } +astq.setPragma(uri, exp); +} - final public void Relax() throws ParseException { - Token t1; + final public void Relax() throws ParseException {Token t1; jj_consume_token(RELAX); jj_consume_token(BY); t1 = jj_consume_token(QNAME); - astq.setDistance(t1.image); - } +astq.setDistance(t1.image); +} final public void LimitOffset() throws ParseException { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LIMIT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LIMIT:{ ProjLimitClause(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case OFFSET: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case OFFSET:{ OffsetClause(); break; + } default: jj_la1[107] = jj_gen; ; } break; - case OFFSET: + } + case OFFSET:{ OffsetClause(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LIMIT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LIMIT:{ ProjLimitClause(); break; + } default: jj_la1[108] = jj_gen; ; } break; + } default: jj_la1[109] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - } +} final public void OrderGroup() throws ParseException { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case GROUP: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case GROUP:{ GroupClause(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case ORDER: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case ORDER:{ OrderClause(); break; + } default: jj_la1[110] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case HAVING: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case HAVING:{ Having(); break; + } default: jj_la1[111] = jj_gen; ; } break; - case ORDER: + } + case ORDER:{ OrderClause(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case GROUP: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case GROUP:{ GroupClause(); break; + } default: jj_la1[112] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case HAVING: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case HAVING:{ Having(); break; + } default: jj_la1[113] = jj_gen; ; } break; - case HAVING: + } + case HAVING:{ Having(); break; + } default: jj_la1[114] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - } +} - final public void GroupClause() throws ParseException { - Variable var; Expression exp; Exp stack; - stack = new And(); + final public void GroupClause() throws ParseException {Variable var; Expression exp; Exp stack; +stack = new And(); var = null; jj_consume_token(GROUP); jj_consume_token(BY); label_29: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LPAREN: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LPAREN:{ jj_consume_token(LPAREN); exp = Expression(stack); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case AS: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case AS:{ jj_consume_token(AS); var = Var(); break; + } default: jj_la1[115] = jj_gen; ; } jj_consume_token(RPAREN); break; + } case VAR1: case VAR2: - case VAR3: + case VAR3:{ exp = Var(); break; + } case NOT: case THROW: case STOP: @@ -3273,14 +3459,16 @@ final public void GroupClause() throws ParseException { case OBJECT: case IS_TRIPLE: case SPARQL_COMPARE: - case XPATH: + case XPATH:{ exp = BuiltInCall(stack); break; + } case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ exp = FunctionCall(stack); break; + } case TRY: case LET: case LETDYN: @@ -3291,30 +3479,30 @@ final public void GroupClause() throws ParseException { case IF: case FUNCTION: case LAMBDA: - case QUERY: + case QUERY:{ exp = FunctionCorese(stack); break; + } default: jj_la1[116] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - astq.setGroup(exp, var); +astq.setGroup(exp, var); if (jj_2_7(2)) { ; } else { break label_29; } } - } +} - final public void Having() throws ParseException { - Exp stack; - stack = new And(); + final public void Having() throws ParseException {Exp stack; +stack = new And(); jj_consume_token(HAVING); Filter(stack); - astq.setHaving(stack); - } +astq.setHaving(stack); +} final public void OrderClause() throws ParseException { jj_consume_token(ORDER); @@ -3328,19 +3516,19 @@ final public void OrderClause() throws ParseException { break label_30; } } - } +} - final public void OrderCondition() throws ParseException { - boolean reverse; Token t; String sortby; Variable v; Expression a; Term tm; Exp stack; - reverse = false; + final public void OrderCondition() throws ParseException {boolean reverse; Token t; String sortby; Variable v; Expression a; Term tm; Exp stack; +reverse = false; stack = new And(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: - case VAR3: + case VAR3:{ v = Var(); - astq.setSort(v, reverse); +astq.setSort(v, reverse); break; + } case NOT: case THROW: case STOP: @@ -3431,50 +3619,56 @@ final public void OrderCondition() throws ParseException { case OBJECT: case IS_TRIPLE: case SPARQL_COMPARE: - case XPATH: + case XPATH:{ a = BuiltInCall(stack); - astq.setSort(a, reverse); +astq.setSort(a, reverse); break; + } case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ tm = FunctionCall(stack); - astq.setSort(tm, reverse); +astq.setSort(tm, reverse); break; + } default: jj_la1[119] = jj_gen; if (jj_2_9(2)) { tm = FunctionCorese(stack); - astq.setSort(tm, reverse); +astq.setSort(tm, reverse); } else { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ASC: case DESC: - case LPAREN: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case LPAREN:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ASC: - case DESC: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case ASC: + case DESC:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case ASC:{ jj_consume_token(ASC); break; - case DESC: + } + case DESC:{ jj_consume_token(DESC); - reverse = true; +reverse = true; break; + } default: jj_la1[117] = jj_gen; jj_consume_token(-1); throw new ParseException(); } break; + } default: jj_la1[118] = jj_gen; ; } a = BrackettedExpression(stack); - astq.setSort(a, reverse); +astq.setSort(a, reverse); break; + } default: jj_la1[120] = jj_gen; jj_consume_token(-1); @@ -3482,49 +3676,49 @@ final public void OrderCondition() throws ParseException { } } } - } +} - final public void ProjLimitClause() throws ParseException { - Token t; + final public void ProjLimitClause() throws ParseException {Token t; LimitClause(); - } +} - final public void LimitClause() throws ParseException { - Token t; + final public void LimitClause() throws ParseException {Token t; jj_consume_token(LIMIT); t = jj_consume_token(INTEGER); - astq.setMaxResult(Integer.parseInt(t.image)); - } +astq.setMaxResult(Integer.parseInt(t.image)); +} - final public void OffsetClause() throws ParseException { - Token t; + final public void OffsetClause() throws ParseException {Token t; jj_consume_token(OFFSET); t = jj_consume_token(INTEGER); - astq.setOffset(Integer.parseInt(t.image)); - } +astq.setOffset(Integer.parseInt(t.image)); +} // **** General Graph Pattern ******************************************** - final public Exp GroupGraphPattern() throws ParseException { - Exp stack; Metadata meta = null; - stack = BasicGraphPattern.create(); + final public + + +Exp GroupGraphPattern() throws ParseException {Exp stack; Metadata meta = null; +stack = BasicGraphPattern.create(); jj_consume_token(LBRACE); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SELECT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SELECT:{ SubQuery(meta, stack); break; + } default: jj_la1[121] = jj_gen; GraphPattern(stack); } jj_consume_token(RBRACE); - {if (true) return stack;} +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} final public void GraphPattern(Exp stack) throws ParseException { label_31: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -3559,14 +3753,15 @@ final public void GraphPattern(Exp stack) throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[122] = jj_gen; break label_31; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -3591,12 +3786,14 @@ final public void GraphPattern(Exp stack) throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ TriplesSameSubject(stack); break; - case FILTER: + } + case FILTER:{ Constraint(stack); break; + } case GRAPH: case SERVICE: case OPTIONAL: @@ -3605,27 +3802,29 @@ final public void GraphPattern(Exp stack) throws ParseException { case EXIST: case VALUES: case BIND: - case LBRACE: + case LBRACE:{ GraphPatternNotTriples(stack); break; + } default: jj_la1[123] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DOT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DOT:{ jj_consume_token(DOT); break; + } default: jj_la1[124] = jj_gen; ; } } - } +} final public void FilteredBasicGraphPattern(Exp stack) throws ParseException { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -3650,46 +3849,49 @@ final public void FilteredBasicGraphPattern(Exp stack) throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ BlockOfTriples(stack); break; + } default: jj_la1[125] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case FILTER: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case FILTER:{ Constraint(stack); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DOT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DOT:{ jj_consume_token(DOT); break; + } default: jj_la1[126] = jj_gen; ; } FilteredBasicGraphPattern(stack); break; + } default: jj_la1[127] = jj_gen; ; } - } +} - final public void BlockOfTriples(Exp stack) throws ParseException { - boolean b1, b2; + final public void BlockOfTriples(Exp stack) throws ParseException {boolean b1, b2; label_32: while (true) { TriplesSameSubject(stack); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DOT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DOT:{ jj_consume_token(DOT); break; + } default: jj_la1[128] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -3714,50 +3916,58 @@ final public void BlockOfTriples(Exp stack) throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[129] = jj_gen; break label_32; } } - } +} - final public void GraphPatternNotTriples(Exp stack) throws ParseException { - ASTQuery ast; Values values; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case OPTIONAL: + final public void GraphPatternNotTriples(Exp stack) throws ParseException {ASTQuery ast; Values values; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case OPTIONAL:{ OptionalGraphPattern(stack); break; - case MINUSP: + } + case MINUSP:{ MinusGraphPattern(stack); break; + } default: jj_la1[130] = jj_gen; if (jj_2_10(2)) { GroupOrUnionGraphPattern(stack); } else { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case GRAPH: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case GRAPH:{ GraphGraphPattern(stack); break; - case SERVICE: + } + case SERVICE:{ ServicePattern(stack); break; - case SCOPE: + } + case SCOPE:{ ScopeGraphPattern(stack); break; - case EXIST: + } + case EXIST:{ ExistGraphPattern(stack); break; - case BIND: + } + case BIND:{ Bind(stack); break; - case VALUES: + } + case VALUES:{ values = Values(); - stack.add(values); +stack.add(values); break; + } default: jj_la1[131] = jj_gen; jj_consume_token(-1); @@ -3765,143 +3975,136 @@ final public void GraphPatternNotTriples(Exp stack) throws ParseException { } } } - } +} - final public void SubQuery(Metadata meta, Exp stack) throws ParseException { - ASTQuery ast; Query q; Exp exp; - ast = astq; astq = gast.subCreate(); + final public void SubQuery(Metadata meta, Exp stack) throws ParseException {ASTQuery ast; Query q; Exp exp; +ast = astq; astq = gast.subCreate(); exp = SelectQuery(meta); - astq.setBody(exp); q = Query.create(astq); stack.add(q); astq = ast; - } +astq.setBody(exp); q = Query.create(astq); stack.add(q); astq = ast; +} - final public Exp SubQueryExp(Metadata meta) throws ParseException { - Exp exp; - exp = BasicGraphPattern.create(); + final public Exp SubQueryExp(Metadata meta) throws ParseException {Exp exp; +exp = BasicGraphPattern.create(); SubQuery(meta, exp); - {if (true) return exp;} +{if ("" != null) return exp;} throw new Error("Missing return statement in function"); - } +} - final public Exp ServiceExp() throws ParseException { - Exp exp, res; ASTQuery tmp; Query q; - exp = BasicGraphPattern.create(); + final public Exp ServiceExp() throws ParseException {Exp exp, res; ASTQuery tmp; Query q; +exp = BasicGraphPattern.create(); ServicePattern(exp); - tmp = gast.subCreate(); +tmp = gast.subCreate(); tmp.setBody(exp); tmp.setSelectAll(true); q = Query.create(tmp); res = BasicGraphPattern.create(); res.add(q); - {if (true) return res;} + {if ("" != null) return res;} throw new Error("Missing return statement in function"); - } +} - final public Exp SubConstruct(Metadata meta) throws ParseException { - ASTQuery ast; Query q; Exp exp; - ast = astq; astq = gast.subCreate(); + final public Exp SubConstruct(Metadata meta) throws ParseException {ASTQuery ast; Query q; Exp exp; +ast = astq; astq = gast.subCreate(); exp = ConstructQuery(meta); - astq.setBody(exp); q = Query.create(astq); astq = ast; {if (true) return q;} +astq.setBody(exp); q = Query.create(astq); astq = ast; {if ("" != null) return q;} throw new Error("Missing return statement in function"); - } +} - final public Exp SubUpdate(Metadata meta) throws ParseException { - ASTQuery ast; ASTUpdate up, save; Query q; Exp exp; - ast = astq; save = astu; astu = astu.subCreate(); astq = gast.subCreate(); astq.set(astu); + final public Exp SubUpdate(Metadata meta) throws ParseException {ASTQuery ast; ASTUpdate up, save; Query q; Exp exp; +ast = astq; save = astu; astu = astu.subCreate(); astq = gast.subCreate(); astq.set(astu); astq.shareForUpdate(gast); astq.setBody(BasicGraphPattern.create()); UpdateQuery(meta); - q = Query.create(astq); astq = ast; astu = save; {if (true) return q;} +q = Query.create(astq); astq = ast; astu = save; {if ("" != null) return q;} throw new Error("Missing return statement in function"); - } +} - final public void Bind(Exp stack) throws ParseException { - Expression exp; Exp q; Variable var, vv; + final public void Bind(Exp stack) throws ParseException {Expression exp; Exp q; Variable var, vv; jj_consume_token(BIND); jj_consume_token(LPAREN); exp = Expression(stack); jj_consume_token(AS); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: - case VAR3: + case VAR3:{ var = Var(); break; - case LPAREN: - var = astq.metaVariable(); + } + case LPAREN:{ +var = astq.metaVariable(); jj_consume_token(LPAREN); vv = Var(); - var.addVariable(vv); +var.addVariable(vv); label_33: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ ; break; + } default: jj_la1[132] = jj_gen; break label_33; } jj_consume_token(COMMA); vv = Var(); - var.addVariable(vv); +var.addVariable(vv); } jj_consume_token(RPAREN); break; + } default: jj_la1[133] = jj_gen; jj_consume_token(-1); throw new ParseException(); } jj_consume_token(RPAREN); - q = astq.createBind(exp, var); stack.add(q); - } +q = astq.createBind(exp, var); stack.add(q); +} // ---- Definitions of each pattern element - final public void OptionalGraphPattern(Exp stack) throws ParseException { - Exp e; + final public +void OptionalGraphPattern(Exp stack) throws ParseException {Exp e; jj_consume_token(OPTIONAL); e = GroupGraphPattern(); - e= Optional.create(e); +e= Optional.create(e); stack.add(e); - } +} - final public void MinusGraphPattern(Exp stack) throws ParseException { - Exp e; + final public void MinusGraphPattern(Exp stack) throws ParseException {Exp e; jj_consume_token(MINUSP); e = GroupGraphPattern(); - e = Minus.create(e); +e = Minus.create(e); stack.add(e); - } +} - final public void ExistGraphPattern(Exp stack) throws ParseException { - Exp e; + final public void ExistGraphPattern(Exp stack) throws ParseException {Exp e; jj_consume_token(EXIST); e = GroupGraphPattern(); - e= Exist.create(e); +e= Exist.create(e); stack.add(e); - } +} - final public void ScopeGraphPattern(Exp stack) throws ParseException { - Exp e; + final public void ScopeGraphPattern(Exp stack) throws ParseException {Exp e; jj_consume_token(SCOPE); e = GroupGraphPattern(); - e= Scope.create(e); +e= Scope.create(e); stack.add(e); - } +} final public void GraphGraphPattern(Exp stack) throws ParseException { jj_consume_token(GRAPH); BasicGraphGraphPattern(stack); - } +} - final public void BasicGraphGraphPattern(Exp stack) throws ParseException { - Atom src; Exp e; Source st; + final public void BasicGraphGraphPattern(Exp stack) throws ParseException {Atom src; Exp e; Source st; boolean state = false, leaf = false, rec = false; //src = VarOrIRIref(stack) src = VarOrBlankNodeOrIRIref(stack); - handler.graphPattern(src); +handler.graphPattern(src); e = GroupGraphPattern(); - st = Source.create(src, e); +st = Source.create(src, e); if (state) { st.setState(true); if (leaf) st.setLeaf(true); @@ -3910,18 +4113,18 @@ final public void BasicGraphGraphPattern(Exp stack) throws ParseException { st.setRec(true); } stack.add(st); - handler.endGraphPattern(src); - } +handler.endGraphPattern(src); +} - final public void ServicePattern(Exp stack) throws ParseException { - Atom src; Exp e; Service st; boolean silent; ArrayList list; - silent = false; list = new ArrayList() ; + final public void ServicePattern(Exp stack) throws ParseException {Atom src; Exp e; Service st; boolean silent; ArrayList list; +silent = false; list = new ArrayList() ; jj_consume_token(SERVICE); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SILENT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SILENT:{ jj_consume_token(SILENT); - silent = true; +silent = true; break; + } default: jj_la1[134] = jj_gen; ; @@ -3929,72 +4132,72 @@ final public void ServicePattern(Exp stack) throws ParseException { label_34: while (true) { src = VarOrIRIref(stack); - handler.enterService(astq, src); list.add(src); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { +handler.enterService(astq, src); list.add(src); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: case VAR1: case VAR2: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[135] = jj_gen; break label_34; } } e = GroupGraphPattern(); - st = Service.create(list, e, silent); +st = Service.create(list, e, silent); stack.add(st); handler.leaveService(astq); - } +} // G (union G)* can be a single group pattern // or a group pattern as part of an union. - final public void GroupOrUnionGraphPattern(Exp stack) throws ParseException { - Exp temp, res; + final public void GroupOrUnionGraphPattern(Exp stack) throws ParseException {Exp temp, res; res = GroupGraphPattern(); label_35: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case UNION: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case UNION:{ ; break; + } default: jj_la1[136] = jj_gen; break label_35; } jj_consume_token(UNION); - temp = res; +temp = res; res = Union.create(); res.add(temp); temp = GroupGraphPattern(); - res.add(temp); +res.add(temp); } - stack.add(res); - } +stack.add(res); +} final public void Constraint(Exp stack) throws ParseException { jj_consume_token(FILTER); Filter(stack); - } +} - final public void Filter(Exp stack) throws ParseException { - Expression expression1; Exp filter; + final public void Filter(Exp stack) throws ParseException {Expression expression1; Exp filter; expression1 = FilterExp(stack); - if (expression1 != null ) { +if (expression1 != null ) { filter = astq.checkCreateFilter(expression1); stack.add(filter); } - } +} - final public Expression FilterExp(Exp stack) throws ParseException { - Expression exp; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LPAREN: + final public Expression FilterExp(Exp stack) throws ParseException {Expression exp; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LPAREN:{ exp = BrackettedExpression(stack); break; + } case NOT: case THROW: case STOP: @@ -4085,9 +4288,10 @@ final public Expression FilterExp(Exp stack) throws ParseException { case OBJECT: case IS_TRIPLE: case SPARQL_COMPARE: - case XPATH: + case XPATH:{ exp = BuiltInCall(stack); break; + } case TRY: case LET: case LETDYN: @@ -4098,36 +4302,38 @@ final public Expression FilterExp(Exp stack) throws ParseException { case IF: case FUNCTION: case LAMBDA: - case QUERY: + case QUERY:{ exp = FunctionCorese(stack); break; + } case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ exp = FunctionCall(stack); break; + } default: jj_la1[137] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return exp;} +{if ("" != null) return exp;} throw new Error("Missing return statement in function"); - } +} // -------- Construct patterns - final public Exp ConstructTemplate() throws ParseException { - Exp stack; + final public +Exp ConstructTemplate() throws ParseException {Exp stack; jj_consume_token(LBRACE); - stack = BasicGraphPattern.create(); +stack = BasicGraphPattern.create(); ConstructTriples(stack); jj_consume_token(RBRACE); - {if (true) return stack;} +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} final public void ConstructTriples(Exp stack) throws ParseException { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -4153,8 +4359,8 @@ final public void ConstructTriples(Exp stack) throws ParseException { case ANON: case LTLT: case AT: - case VAR3: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case VAR3:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -4179,45 +4385,48 @@ final public void ConstructTriples(Exp stack) throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ TriplesSameSubject(stack); break; - case GRAPH: + } + case GRAPH:{ GraphGraphPattern(stack); break; + } default: jj_la1[138] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DOT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DOT:{ jj_consume_token(DOT); break; + } default: jj_la1[139] = jj_gen; ; } ConstructTriples(stack); break; + } default: jj_la1[140] = jj_gen; ; } - } +} - final public Exp ConstructTemplate2() throws ParseException { - Exp stack; + final public Exp ConstructTemplate2() throws ParseException {Exp stack; jj_consume_token(LBRACE); - stack = BasicGraphPattern.create(); +stack = BasicGraphPattern.create(); ConstructTriples2(stack); jj_consume_token(RBRACE); - {if (true) return stack;} +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} final public void ConstructTriples2(Exp stack) throws ParseException { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -4242,83 +4451,84 @@ final public void ConstructTriples2(Exp stack) throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ TriplesSameSubject(stack); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DOT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DOT:{ jj_consume_token(DOT); break; + } default: jj_la1[141] = jj_gen; ; } ConstructTriples2(stack); break; + } default: jj_la1[142] = jj_gen; ; } - } +} // -------- Triple lists with property and object lists - final public void TriplesSameSubject(Exp stack) throws ParseException { - Expression expression1; + final public void TriplesSameSubject(Exp stack) throws ParseException {Expression expression1; if (jj_2_11(2)) { expression1 = VarOrTerm(stack); stack = PropertyListNotEmpty(expression1, stack); } else { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ATLIST: case ATPATH: case LPAREN: case LBRACKET: - case AT: + case AT:{ expression1 = TriplesNode(stack); stack = PropertyList(expression1, stack); break; - case LTLT: + } + case LTLT:{ stack = RDFstar(stack); break; + } case TUPLE: - case TRIPLE: + case TRIPLE:{ stack = tuple(stack); break; + } default: jj_la1[143] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } - } +} // << s p o >> q v . - final public Exp RDFstar(Exp stack) throws ParseException { - Atom ref; + final public Exp RDFstar(Exp stack) throws ParseException {Atom ref; ref = StarTriple(stack); stack = PropertyListNotEmpty(ref, stack); - {if (true) return stack;} +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} // return Constant with: // dt=bnode triple reference isTriple() == true // when sparql: triple = triple(s p o) // when load: triple = null, edge created in graph directly - final public Atom StarTriple(Exp stack) throws ParseException { - Atom sub, obj, ref, pred; Triple t; + final public Atom StarTriple(Exp stack) throws ParseException {Atom sub, obj, ref, pred; Triple t; jj_consume_token(LTLT); sub = StarTerm(stack); pred = Verb(); obj = StarTerm(stack); - ref = handler.createNestedTripleStar(astq, stack, sub, pred, obj, null); +ref = handler.createNestedTripleStar(astq, stack, sub, pred, obj, null); jj_consume_token(GTGT); - {if (true) return ref;} +{if ("" != null) return ref;} throw new Error("Missing return statement in function"); - } +} - final public Atom StarTerm(Exp stack) throws ParseException { - Atom term; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Atom StarTerm(Exp stack) throws ParseException {Atom term; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -4336,34 +4546,37 @@ final public Atom StarTerm(Exp stack) throws ParseException { case STRING_LITERAL_LONG2: case LPAREN: case ANON: - case VAR3: + case VAR3:{ //term = GraphNode(stack) term = VarOrTerm(stack); break; - case LTLT: + } + case LTLT:{ term = StarTriple(stack); break; + } default: jj_la1[144] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return term;} +{if ("" != null) return term;} throw new Error("Missing return statement in function"); - } +} // tuple(p s o v) triple(s p o v) - final public Exp tuple(Exp stack) throws ParseException { - Atom subject, object, term; Variable var; ArrayList list; Atom prop; Triple triple; + final public Exp tuple(Exp stack) throws ParseException {Atom subject, object, term; Variable var; ArrayList list; Atom prop; Triple triple; boolean arity = false, isTriple = true; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case TUPLE: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case TUPLE:{ jj_consume_token(TUPLE); - isTriple = false; +isTriple = false; break; - case TRIPLE: + } + case TRIPLE:{ jj_consume_token(TRIPLE); break; + } default: jj_la1[145] = jj_gen; jj_consume_token(-1); @@ -4373,11 +4586,11 @@ final public Exp tuple(Exp stack) throws ParseException { prop = Verb(); subject = GraphNode(stack); object = GraphNode(stack); - if (isTriple) { term = prop; prop = subject; subject = term; } - list = new ArrayList(); list.add(subject); list.add(object); +if (isTriple) { term = prop; prop = subject; subject = term; } +list = new ArrayList(); list.add(subject); list.add(object); label_36: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -4399,50 +4612,54 @@ final public Exp tuple(Exp stack) throws ParseException { case LBRACKET: case ANON: case AT: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[146] = jj_gen; break label_36; } term = GraphNode(stack); - list.add(term); +list.add(term); } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case BAR: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case BAR:{ jj_consume_token(BAR); var = Var(); - var.setMatchNodeList(true) ; list.add(var); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case BAR: +var.setMatchNodeList(true) ; list.add(var); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case BAR:{ jj_consume_token(BAR); - var.setMatchCardinality(true) ; +var.setMatchCardinality(true) ; break; + } default: jj_la1[147] = jj_gen; ; } break; + } default: jj_la1[148] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DOT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DOT:{ jj_consume_token(DOT); - arity = true; +arity = true; break; + } default: jj_la1[149] = jj_gen; ; } jj_consume_token(RPAREN); - triple = handler.createTriple(astq, prop, list, arity); +triple = handler.createTriple(astq, prop, list, arity); if (triple != null) { stack.add(triple); } - {if (true) return stack;} + {if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} final public Exp PropertyList(Expression expression1, Exp stack) throws ParseException { if (jj_2_12(2)) { @@ -4450,12 +4667,11 @@ final public Exp PropertyList(Expression expression1, Exp stack) throws ParseExc } else { ; } - {if (true) return stack;} +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} - final public Exp PropertyListNotEmpty(Expression e1, Exp stack) throws ParseException { - ExpressionList list; Atom p; Triple triple; int n; + final public Exp PropertyListNotEmpty(Expression e1, Exp stack) throws ParseException {ExpressionList list; Atom p; Triple triple; int n; stack = PropertyListNotEmptyElem(e1, stack); label_37: while (true) { @@ -4472,48 +4688,46 @@ final public Exp PropertyListNotEmpty(Expression e1, Exp stack) throws ParseExce } else { ; } - {if (true) return stack;} +{if ("" != null) return stack;} throw new Error("Missing return statement in function"); - } +} - final public Exp PropertyListNotEmptyElem(Expression e1, Exp stack) throws ParseException { - ExpressionList list; Atom p; Triple triple; int n; + final public Exp PropertyListNotEmptyElem(Expression e1, Exp stack) throws ParseException {ExpressionList list; Atom p; Triple triple; int n; p = Verb(); - n = stack.size(); +n = stack.size(); // ObjectList() may fill the stack beyond n in case of [prop ?val] // hence we store the index n to add the triple at n list = ObjectList(stack); - stack = handler.createTriples(astq, stack, e1, p, list, n); - {if (true) return stack ;} +stack = handler.createTriples(astq, stack, e1, p, list, n); + {if ("" != null) return stack ;} throw new Error("Missing return statement in function"); - } +} - final public ExpressionList ObjectList(Exp stack) throws ParseException { - Expression expression1; ExpressionList v; - v = new ExpressionList(); + final public ExpressionList ObjectList(Exp stack) throws ParseException {Expression expression1; ExpressionList v; +v = new ExpressionList(); expression1 = PropertyValue(stack); - v.add(expression1); +v.add(expression1); label_38: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ ; break; + } default: jj_la1[150] = jj_gen; break label_38; } jj_consume_token(COMMA); expression1 = PropertyValue(stack); - v.add(expression1); +v.add(expression1); } - {if (true) return v;} +{if ("" != null) return v;} throw new Error("Missing return statement in function"); - } +} - final public Atom PropertyValue(Exp stack) throws ParseException { - Atom value; Atom tripleRef = null; Exp newStack; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Atom PropertyValue(Exp stack) throws ParseException {Atom value; Atom tripleRef = null; Exp newStack; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -4535,50 +4749,50 @@ final public Atom PropertyValue(Exp stack) throws ParseException { case LBRACKET: case ANON: case AT: - case VAR3: + case VAR3:{ value = GraphNode(stack); break; - case LTLT: + } + case LTLT:{ value = StarTriple(stack); break; + } default: jj_la1[151] = jj_gen; jj_consume_token(-1); throw new ParseException(); } if (jj_2_15(2)) { - newStack = new And().setStack(true); +newStack = new And().setStack(true); //tripleRef = AssertedAnnotation(stack) tripleRef = AssertedAnnotation(newStack); - value.setTripleReference(tripleRef); +value.setTripleReference(tripleRef); // value = o in s p o with o.reference = t and o.annotation = (t q v) value.setAnnotation(newStack); } else { ; } - {if (true) return value;} +{if ("" != null) return value;} throw new Error("Missing return statement in function"); - } +} // s p o {| q v |} // parse {| q v |} with t as subject // create t q v triples in a fresh stack - final public Atom AssertedAnnotation(Exp stack) throws ParseException { - Exp exp; Atom tripleRef = null; + final public Atom AssertedAnnotation(Exp stack) throws ParseException {Exp exp; Atom tripleRef = null; jj_consume_token(LBRACE); jj_consume_token(BAR); - tripleRef = handler.createTripleReference(astq); +tripleRef = handler.createTripleReference(astq); // annotation triples inserted in stack stack = PropertyListNotEmpty(tripleRef, stack); jj_consume_token(BAR); jj_consume_token(RBRACE); - {if (true) return tripleRef;} +{if ("" != null) return tripleRef;} throw new Error("Missing return statement in function"); - } +} - final public Atom Verb() throws ParseException { - Atom atom; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Atom Verb() throws ParseException {Atom atom; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -4588,272 +4802,290 @@ final public Atom Verb() throws ParseException { case BEGIN_WITH: case LPAREN: case BANG: - case VAR3: + case VAR3:{ atom = PropertyVarOrIRIref(); break; - case KW_A: + } + case KW_A:{ jj_consume_token(KW_A); - atom = astq.createQName(RDFS.qrdftype); +atom = astq.createQName(RDFS.qrdftype); break; + } default: jj_la1[152] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return atom;} +{if ("" != null) return atom;} throw new Error("Missing return statement in function"); - } +} - final public Atom PropertyVarOrIRIref() throws ParseException { - Atom atom; Token t1; Variable var; Constant cst; String temp; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Atom PropertyVarOrIRIref() throws ParseException {Atom atom; Token t1; Variable var; Constant cst; String temp; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: case XPATH: case BEGIN_WITH: case LPAREN: - case BANG: + case BANG:{ atom = StdProperty(); break; + } case VAR1: case VAR2: - case VAR3: + case VAR3:{ atom = Var(); break; + } default: jj_la1[153] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return atom;} +{if ("" != null) return atom;} throw new Error("Missing return statement in function"); - } +} - final public Atom StdProperty() throws ParseException { - Variable var; Constant cst; Expression exp; + final public Atom StdProperty() throws ParseException {Variable var; Constant cst; Expression exp; exp = RegExp(); - cst = astq.createProperty(exp); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COLON2: +cst = astq.createProperty(exp); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COLON2:{ jj_consume_token(COLON2); var = Var(); - cst.setVar(var); +cst.setVar(var); break; + } default: jj_la1[154] = jj_gen; ; } - {if (true) return cst;} +{if ("" != null) return cst;} throw new Error("Missing return statement in function"); - } +} - final public Expression RegExp() throws ParseException { - Token t; String s; Expression exp; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case XPATH: + final public Expression RegExp() throws ParseException {Token t; String s; Expression exp; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case XPATH:{ t = jj_consume_token(XPATH); jj_consume_token(LPAREN); s = String(); jj_consume_token(RPAREN); - exp = astq.createFunction(t.image, astq.createConstant(s, null, null)); +exp = astq.createFunction(t.image, astq.createConstant(s, null, null)); break; + } case Q_IRIref: case QNAME_NS: case QNAME: case BEGIN_WITH: case LPAREN: - case BANG: + case BANG:{ exp = AddRegExp(); break; + } default: jj_la1[155] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return exp;} +{if ("" != null) return exp;} throw new Error("Missing return statement in function"); - } +} - final public Expression AddRegExp() throws ParseException { - Token t; Expression exp1, exp2; + final public Expression AddRegExp() throws ParseException {Token t; Expression exp1, exp2; exp1 = MultRegExp(); label_39: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case SC_OR: - case BAR: + case BAR:{ ; break; + } default: jj_la1[156] = jj_gen; break label_39; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case BAR: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case BAR:{ t = jj_consume_token(BAR); exp2 = MultRegExp(); - exp1 = astq.createAltExpression(exp1, exp2); +exp1 = astq.createAltExpression(exp1, exp2); break; - case SC_OR: + } + case SC_OR:{ t = jj_consume_token(SC_OR); exp2 = MultRegExp(); - exp1 = astq.createParaExpression(exp1, exp2); +exp1 = astq.createParaExpression(exp1, exp2); break; + } default: jj_la1[157] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } - {if (true) return exp1;} +{if ("" != null) return exp1;} throw new Error("Missing return statement in function"); - } +} - final public Expression MultRegExp() throws ParseException { - Token t; Expression exp1, exp2; + final public Expression MultRegExp() throws ParseException {Token t; Expression exp1, exp2; exp1 = UnaryRegExp(); label_40: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SLASH: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SLASH:{ ; break; + } default: jj_la1[158] = jj_gen; break label_40; } t = jj_consume_token(SLASH); exp2 = UnaryRegExp(); - exp1 = astq.createSeqExpression(exp1, exp2); +exp1 = astq.createSeqExpression(exp1, exp2); } - {if (true) return exp1;} +{if ("" != null) return exp1;} throw new Error("Missing return statement in function"); - } +} - final public Expression UnaryRegExp() throws ParseException { - Token t, t1, t2; Expression exp; Token ope; String s1, s2; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case BANG: + final public Expression UnaryRegExp() throws ParseException {Token t, t1, t2; Expression exp; Token ope; String s1, s2; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case BANG:{ t = jj_consume_token(BANG); exp = PrimaryRegExp(); - exp = astq.createOperator(t.image, exp); +exp = astq.createOperator(t.image, exp); break; - case BEGIN_WITH: + } + case BEGIN_WITH:{ t = jj_consume_token(BEGIN_WITH); exp = PrimaryRegExp(); - exp = astq.createOperator(t.image, exp); +exp = astq.createOperator(t.image, exp); break; + } case Q_IRIref: case QNAME_NS: case QNAME: - case LPAREN: + case LPAREN:{ exp = PrimaryRegExp(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case LBRACE: case PLUS: case STAR: - case QM: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case QM:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case PLUS: case STAR: - case QM: + case QM:{ ope = OpeRegExp(); - exp = astq.createOperator(ope.image, exp); +exp = astq.createOperator(ope.image, exp); break; - case LBRACE: + } + case LBRACE:{ jj_consume_token(LBRACE); - s1 = null; s2 = null; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case INTEGER: +s1 = null; s2 = null; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case INTEGER:{ t1 = jj_consume_token(INTEGER); - s1 = t1.image; s2 = s1; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: +s1 = t1.image; s2 = s1; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ jj_consume_token(COMMA); - s2 = null; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case INTEGER: +s2 = null; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case INTEGER:{ t2 = jj_consume_token(INTEGER); - s2 = t2.image; +s2 = t2.image; break; + } default: jj_la1[159] = jj_gen; ; } break; + } default: jj_la1[160] = jj_gen; ; } break; - case COMMA: + } + case COMMA:{ jj_consume_token(COMMA); t2 = jj_consume_token(INTEGER); - s2 = t2.image; +s2 = t2.image; break; + } default: jj_la1[161] = jj_gen; jj_consume_token(-1); throw new ParseException(); } jj_consume_token(RBRACE); - exp = astq.createOperator(s1, s2, exp); +exp = astq.createOperator(s1, s2, exp); break; + } default: jj_la1[162] = jj_gen; jj_consume_token(-1); throw new ParseException(); } break; + } default: jj_la1[163] = jj_gen; ; } break; + } default: jj_la1[164] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return exp;} +{if ("" != null) return exp;} throw new Error("Missing return statement in function"); - } +} - final public Token OpeRegExp() throws ParseException { - Token ope; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case STAR: + final public Token OpeRegExp() throws ParseException {Token ope; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case STAR:{ ope = jj_consume_token(STAR); break; - case PLUS: + } + case PLUS:{ ope = jj_consume_token(PLUS); break; - case QM: + } + case QM:{ ope = jj_consume_token(QM); break; + } default: jj_la1[165] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return ope;} +{if ("" != null) return ope;} throw new Error("Missing return statement in function"); - } +} - final public Expression PrimaryRegExp() throws ParseException { - Token w; Expression exp, test, subject; Exp stack; String t; Constant cst; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Expression PrimaryRegExp() throws ParseException {Token w; Expression exp, test, subject; Exp stack; String t; Constant cst; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ exp = IRIrefConst(); break; - case LPAREN: + } + case LPAREN:{ jj_consume_token(LPAREN); exp = AddRegExp(); jj_consume_token(RPAREN); break; + } default: jj_la1[166] = jj_gen; jj_consume_token(-1); @@ -4861,23 +5093,26 @@ final public Expression PrimaryRegExp() throws ParseException { } if (jj_2_16(2)) { jj_consume_token(AT); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case INTEGER: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case INTEGER:{ w = jj_consume_token(INTEGER); - exp.setWeight(w.image); +exp.setWeight(w.image); break; - case LBRACKET: - stack = BasicGraphPattern.create(); subject = astq.createVariable(THIS); + } + case LBRACKET:{ +stack = BasicGraphPattern.create(); subject = astq.createVariable(THIS); jj_consume_token(LBRACKET); stack = PropertyListNotEmpty(subject, stack); jj_consume_token(RBRACKET); - exp = astq.createRegexTest(exp, stack); +exp = astq.createRegexTest(exp, stack); break; - case LBRACE: + } + case LBRACE:{ // { ?this a foaf:Person } stack = GroupGraphPattern(); - exp = astq.createRegexTest(exp, stack); +exp = astq.createRegexTest(exp, stack); break; + } default: jj_la1[167] = jj_gen; jj_consume_token(-1); @@ -4886,112 +5121,125 @@ final public Expression PrimaryRegExp() throws ParseException { } else { ; } - {if (true) return exp;} +{if ("" != null) return exp;} throw new Error("Missing return statement in function"); - } +} - final public Token Soper() throws ParseException { - Token t; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case EQ: + final public Token Soper() throws ParseException {Token t; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case EQ:{ t = jj_consume_token(EQ); break; - case NE: + } + case NE:{ t = jj_consume_token(NE); break; - case LT: + } + case LT:{ t = jj_consume_token(LT); break; - case GT: + } + case GT:{ t = jj_consume_token(GT); break; - case LE: + } + case LE:{ t = jj_consume_token(LE); break; - case GE: + } + case GE:{ t = jj_consume_token(GE); break; - case EQ2: + } + case EQ2:{ t = jj_consume_token(EQ2); break; - case NE2: + } + case NE2:{ t = jj_consume_token(NE2); break; - case TILDE: + } + case TILDE:{ t = jj_consume_token(TILDE); break; - case BEGIN_WITH: + } + case BEGIN_WITH:{ t = jj_consume_token(BEGIN_WITH); break; + } default: jj_la1[168] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return t;} +{if ("" != null) return t;} throw new Error("Missing return statement in function"); - } +} - final public Token Toper() throws ParseException { - Token t; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SPEC: + final public Token Toper() throws ParseException {Token t; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SPEC:{ //t = | t = jj_consume_token(SPEC); break; - case SAME: + } + case SAME:{ t = jj_consume_token(SAME); break; - case GENERALISATION: + } + case GENERALISATION:{ t = jj_consume_token(GENERALISATION); break; - case STRICT_GENERALISATION: + } + case STRICT_GENERALISATION:{ t = jj_consume_token(STRICT_GENERALISATION); break; + } default: jj_la1[169] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return t;} +{if ("" != null) return t;} throw new Error("Missing return statement in function"); - } +} // -------- Triple expansions // Anything that can stand in a node slot and which is // a number of triples - final public Atom TriplesNode(Exp stack) throws ParseException { - Atom expression1; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public +Atom TriplesNode(Exp stack) throws ParseException {Atom expression1; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ATLIST: case ATPATH: case LPAREN: - case AT: + case AT:{ expression1 = Collection(stack); break; - case LBRACKET: + } + case LBRACKET:{ expression1 = BlankNodePropertyList(stack); break; + } default: jj_la1[170] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} - final public Atom BlankNodePropertyList(Exp stack) throws ParseException { - Atom expression1; - // [ :p ?y ] + final public Atom BlankNodePropertyList(Exp stack) throws ParseException {Atom expression1; +// [ :p ?y ] expression1 = astq.newBlankNode(); jj_consume_token(LBRACKET); stack = PropertyListNotEmpty(expression1, stack); jj_consume_token(RBRACKET); - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} // ------- RDF collections /** @@ -5009,45 +5257,49 @@ final public Atom BlankNodePropertyList(Exp stack) throws ParseException { * _:b3 rdf:first _:b4 _:b3 rdf:rest rdf:nil _:b4 rdf:first 3 _:b4 rdf:rest rdf:nil * */ - final public Atom Collection(Exp stack) throws ParseException { - ArrayList list; + final public Atom Collection(Exp stack) throws ParseException {ArrayList list; Atom node, head; Exp exp; RDFList rlist; int arobase = ASTQuery.L_DEFAULT, save = ASTQuery.L_LIST; - list = new ArrayList(); +list = new ArrayList(); save = astq.getListType(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ATLIST: case ATPATH: - case AT: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case ATLIST: + case AT:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case ATLIST:{ jj_consume_token(ATLIST); - arobase = ASTQuery.L_LIST; astq.setListType(arobase); +arobase = ASTQuery.L_LIST; astq.setListType(arobase); break; + } case ATPATH: - case AT: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case AT: + case AT:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case AT:{ jj_consume_token(AT); break; - case ATPATH: + } + case ATPATH:{ jj_consume_token(ATPATH); break; + } default: jj_la1[171] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - arobase = ASTQuery.L_PATH; astq.setListType(arobase); +arobase = ASTQuery.L_PATH; astq.setListType(arobase); break; + } default: jj_la1[172] = jj_gen; jj_consume_token(-1); throw new ParseException(); } break; + } default: jj_la1[173] = jj_gen; ; @@ -5055,7 +5307,7 @@ final public Atom Collection(Exp stack) throws ParseException { jj_consume_token(LPAREN); label_41: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -5077,19 +5329,21 @@ final public Atom Collection(Exp stack) throws ParseException { case LBRACKET: case ANON: case AT: - case VAR3: + case VAR3:{ node = GraphNode(stack); break; - case LTLT: + } + case LTLT:{ node = StarTriple(stack); break; + } default: jj_la1[174] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - list.add(node); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { +list.add(node); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -5112,53 +5366,55 @@ final public Atom Collection(Exp stack) throws ParseException { case ANON: case LTLT: case AT: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[175] = jj_gen; break label_41; } } jj_consume_token(RPAREN); - head = handler.list(astq, stack, list, arobase); +head = handler.list(astq, stack, list, arobase); astq.setListType(save); - {if (true) return head;} + {if ("" != null) return head;} throw new Error("Missing return statement in function"); - } +} // -------- Nodes in a graph pattern or template - final public Atom GraphNode(Exp stack) throws ParseException { - Atom expression1; + final public +Atom GraphNode(Exp stack) throws ParseException {Atom expression1; if (jj_2_17(2)) { expression1 = VarOrTerm(stack); } else { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ATLIST: case ATPATH: case LPAREN: case LBRACKET: - case AT: + case AT:{ expression1 = TriplesNode(stack); break; + } default: jj_la1[176] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} - final public Atom VarOrTerm(Exp stack) throws ParseException { - Atom expression1; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Atom VarOrTerm(Exp stack) throws ParseException {Atom expression1; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: - case VAR3: + case VAR3:{ expression1 = Var(); break; + } case Q_IRIref: case QNAME_NS: case QNAME: @@ -5173,96 +5429,101 @@ final public Atom VarOrTerm(Exp stack) throws ParseException { case STRING_LITERAL_LONG1: case STRING_LITERAL_LONG2: case LPAREN: - case ANON: + case ANON:{ expression1 = GraphTerm(stack); break; + } default: jj_la1[177] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} - final public Atom VarOrBlankNodeOrIRIref(Exp stack) throws ParseException { - Atom a; String s; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Atom VarOrBlankNodeOrIRIref(Exp stack) throws ParseException {Atom a; String s; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: - case VAR3: + case VAR3:{ a = Var(); - s = a.getName(); +s = a.getName(); break; + } case BLANK_NODE_LABEL: - case ANON: + case ANON:{ a = BlankNode(stack); - s = a.getName(); +s = a.getName(); break; + } case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ a = IRIrefConst(); break; + } default: jj_la1[178] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return a;} +{if ("" != null) return a;} throw new Error("Missing return statement in function"); - } +} - final public Atom VarOrIRIref(Exp stack) throws ParseException { - Atom a; String s; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Atom VarOrIRIref(Exp stack) throws ParseException {Atom a; String s; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: - case VAR3: + case VAR3:{ a = Var(); break; + } case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ a = IRIrefConst(); break; + } default: jj_la1[179] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return a;} +{if ("" != null) return a;} throw new Error("Missing return statement in function"); - } +} - final public Variable Var() throws ParseException { - Token t1; Variable res; String s; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case VAR1: + final public Variable Var() throws ParseException {Token t1; Variable res; String s; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case VAR1:{ t1 = jj_consume_token(VAR1); - s = t1.image; +s = t1.image; break; - case VAR2: + } + case VAR2:{ t1 = jj_consume_token(VAR2); - s = "?"+(t1.image).substring(1,(t1.image).length()); +s = "?"+(t1.image).substring(1,(t1.image).length()); break; - case VAR3: + } + case VAR3:{ t1 = jj_consume_token(VAR3); - s = "?"+(t1.image); handler.checkVariable(t1); +s = "?"+(t1.image); handler.checkVariable(t1); break; + } default: jj_la1[180] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - res = ASTQuery.createVariable(s, astq); - {if (true) return res;} +res = ASTQuery.createVariable(s, astq); +{if ("" != null) return res;} throw new Error("Missing return statement in function"); - } +} - final public Atom GraphTerm(Exp stack) throws ParseException { - Constant cst; String s; Token t1; Variable var; Atom a; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Atom GraphTerm(Exp stack) throws ParseException {Constant cst; String s; Token t1; Variable var; Atom a; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -5275,131 +5536,139 @@ final public Atom GraphTerm(Exp stack) throws ParseException { case STRING_LITERAL2: case STRING_LITERAL_LONG1: case STRING_LITERAL_LONG2: - case LPAREN: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case Q_IRIref: + case LPAREN:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case Q_IRIref:{ s = Q_IRI_ref(); - cst = astq.createURI(s); +cst = astq.createURI(s); break; - case QNAME: + } + case QNAME:{ t1 = jj_consume_token(QNAME); - cst = astq.createQName(t1.image); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COLON2: +cst = astq.createQName(t1.image); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COLON2:{ jj_consume_token(COLON2); var = Var(); - cst.setVar(var); +cst.setVar(var); break; + } default: jj_la1[181] = jj_gen; ; } break; - case QNAME_NS: + } + case QNAME_NS:{ t1 = jj_consume_token(QNAME_NS); - cst = astq.createQName(t1.image); +cst = astq.createQName(t1.image); break; + } case STRING_LITERAL1: case STRING_LITERAL2: case STRING_LITERAL_LONG1: - case STRING_LITERAL_LONG2: + case STRING_LITERAL_LONG2:{ cst = RDFLiteral(); break; + } case INTEGER: case DECIMAL: - case DOUBLE: + case DOUBLE:{ cst = NumericLiteral(); break; + } case TRUE: - case FALSE: + case FALSE:{ cst = BooleanLiteral(); break; - case LPAREN: + } + case LPAREN:{ jj_consume_token(LPAREN); jj_consume_token(RPAREN); - cst = astq.createQName(RDFS.qrdfNil); +cst = astq.createQName(RDFS.qrdfNil); break; + } default: jj_la1[182] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - a = cst; +a = cst; break; + } case BLANK_NODE_LABEL: - case ANON: + case ANON:{ a = BlankNode(stack); break; + } default: jj_la1[183] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return a;} +{if ("" != null) return a;} throw new Error("Missing return statement in function"); - } +} // -------- Constraint syntax - final public Expression Expression(Exp stack) throws ParseException { - Expression expression1; + final public +Expression Expression(Exp stack) throws ParseException {Expression expression1; expression1 = ConditionalOrExpression(stack); - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} - final public Expression ConditionalOrExpression(Exp stack) throws ParseException { - Token t1; Expression expression1, expression2; + final public Expression ConditionalOrExpression(Exp stack) throws ParseException {Token t1; Expression expression1, expression2; expression1 = ConditionalAndExpression(stack); label_42: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SC_OR: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SC_OR:{ ; break; + } default: jj_la1[184] = jj_gen; break label_42; } t1 = jj_consume_token(SC_OR); expression2 = ConditionalAndExpression(stack); - expression1 = astq.createConditionalOrExpression(t1.image, expression1, expression2); +expression1 = astq.createConditionalOrExpression(t1.image, expression1, expression2); } - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} - final public Expression ConditionalAndExpression(Exp stack) throws ParseException { - Token t1; Expression expression1, expression2; + final public Expression ConditionalAndExpression(Exp stack) throws ParseException {Token t1; Expression expression1, expression2; expression1 = ValueLogical(stack); label_43: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SC_AND: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SC_AND:{ ; break; + } default: jj_la1[185] = jj_gen; break label_43; } t1 = jj_consume_token(SC_AND); expression2 = ValueLogical(stack); - expression1 = astq.createConditionalAndExpression(t1.image, expression1, expression2); +expression1 = astq.createConditionalAndExpression(t1.image, expression1, expression2); } - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} - final public Expression ValueLogical(Exp stack) throws ParseException { - Expression expression1; + final public Expression ValueLogical(Exp stack) throws ParseException {Expression expression1; expression1 = RelationalExpression(stack); - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} - final public Expression RelationalExpression(Exp stack) throws ParseException { - Token t1; Expression expression1, expression2; ExpressionList el; boolean not = false; + final public Expression RelationalExpression(Exp stack) throws ParseException {Token t1; Expression expression1, expression2; ExpressionList el; boolean not = false; expression1 = NumericExpression(stack); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case NOT: case IN: case EQ2: @@ -5411,8 +5680,8 @@ final public Expression RelationalExpression(Exp stack) throws ParseException { case GT: case LE: case GE: - case TILDE: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case TILDE:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case EQ2: case NE2: case BEGIN_WITH: @@ -5422,137 +5691,146 @@ final public Expression RelationalExpression(Exp stack) throws ParseException { case GT: case LE: case GE: - case TILDE: + case TILDE:{ t1 = Soper(); expression2 = NumericExpression(stack); break; + } case NOT: - case IN: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case NOT: + case IN:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case NOT:{ jj_consume_token(NOT); - not = true; +not = true; break; + } default: jj_la1[186] = jj_gen; ; } t1 = jj_consume_token(IN); el = ArgList(stack); - expression2 = astq.createList(el); +expression2 = astq.createList(el); break; + } default: jj_la1[187] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - expression1 = astq.createRelationalExpression(t1.image, expression1, expression2); +expression1 = astq.createRelationalExpression(t1.image, expression1, expression2); if (not){ expression1 = astq.negation(expression1); } break; + } default: jj_la1[188] = jj_gen; ; } - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} - final public Expression NumericExpression(Exp stack) throws ParseException { - Expression expression1; + final public Expression NumericExpression(Exp stack) throws ParseException {Expression expression1; expression1 = AdditiveExpression(stack); - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} - final public Expression AdditiveExpression(Exp stack) throws ParseException { - Token t1; Expression expression1, expression2; + final public Expression AdditiveExpression(Exp stack) throws ParseException {Token t1; Expression expression1, expression2; expression1 = MultiplicativeExpression(stack); label_44: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case PLUS: - case MINUS: + case MINUS:{ ; break; + } default: jj_la1[189] = jj_gen; break label_44; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case PLUS: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case PLUS:{ t1 = jj_consume_token(PLUS); expression2 = MultiplicativeExpression(stack); - expression1 = astq.createAdditiveExpression(t1.image, expression1, expression2); +expression1 = astq.createAdditiveExpression(t1.image, expression1, expression2); break; - case MINUS: + } + case MINUS:{ t1 = jj_consume_token(MINUS); expression2 = MultiplicativeExpression(stack); - expression1 = astq.createAdditiveExpression(t1.image, expression1, expression2); +expression1 = astq.createAdditiveExpression(t1.image, expression1, expression2); break; + } default: jj_la1[190] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} - final public Expression MultiplicativeExpression(Exp stack) throws ParseException { - Token t1; Expression expression1, expression2; + final public Expression MultiplicativeExpression(Exp stack) throws ParseException {Token t1; Expression expression1, expression2; expression1 = UnaryExpression(stack); label_45: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case STAR: - case SLASH: + case SLASH:{ ; break; + } default: jj_la1[191] = jj_gen; break label_45; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case STAR: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case STAR:{ t1 = jj_consume_token(STAR); expression2 = UnaryExpression(stack); break; - case SLASH: + } + case SLASH:{ t1 = jj_consume_token(SLASH); expression2 = UnaryExpression(stack); break; + } default: jj_la1[192] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - expression1 = astq.createMultiplicativeExpression(t1.image, expression1, expression2); +expression1 = astq.createMultiplicativeExpression(t1.image, expression1, expression2); } - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} - final public Expression UnaryExpression(Exp stack) throws ParseException { - Token t1; Expression expression1; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case BANG: + final public Expression UnaryExpression(Exp stack) throws ParseException {Token t1; Expression expression1; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case BANG:{ t1 = jj_consume_token(BANG); expression1 = PrimaryExpression(stack); - expression1 = astq.createUnaryExpression(t1.image, expression1); +expression1 = astq.createUnaryExpression(t1.image, expression1); break; - case PLUS: + } + case PLUS:{ t1 = jj_consume_token(PLUS); expression1 = PrimaryExpression(stack); - expression1 = astq.createUnaryExpression(t1.image, expression1); +expression1 = astq.createUnaryExpression(t1.image, expression1); break; - case MINUS: + } + case MINUS:{ t1 = jj_consume_token(MINUS); expression1 = PrimaryExpression(stack); - expression1 = astq.createUnaryExpression(t1.image, expression1); +expression1 = astq.createUnaryExpression(t1.image, expression1); break; + } case Q_IRIref: case QNAME_NS: case QNAME: @@ -5673,44 +5951,47 @@ final public Expression UnaryExpression(Exp stack) throws ParseException { case LPAREN: case ANON: case LTLT: - case VAR3: + case VAR3:{ expression1 = PrimaryExpression(stack); break; + } default: jj_la1[193] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} - final public Expression BuiltInCall(Exp stack) throws ParseException { - ExpressionList el; Expression exp1 = null, exp2; Variable var1; Token t1; Term term; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case BOUND: + final public Expression BuiltInCall(Exp stack) throws ParseException {ExpressionList el; Expression exp1 = null, exp2; Variable var1; Token t1; Term term; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case BOUND:{ t1 = jj_consume_token(BOUND); jj_consume_token(LPAREN); var1 = Var(); jj_consume_token(RPAREN); - term = astq.createFunction(t1.image, var1); +term = astq.createFunction(t1.image, var1); break; + } case ERROR: - case BNODE: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case BNODE: + case BNODE:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case BNODE:{ t1 = jj_consume_token(BNODE); break; - case ERROR: + } + case ERROR:{ t1 = jj_consume_token(ERROR); break; + } default: jj_la1[194] = jj_gen; jj_consume_token(-1); throw new ParseException(); } jj_consume_token(LPAREN); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -5834,38 +6115,45 @@ final public Expression BuiltInCall(Exp stack) throws ParseException { case BANG: case PLUS: case MINUS: - case VAR3: + case VAR3:{ exp1 = Expression(stack); break; + } default: jj_la1[195] = jj_gen; ; } - term = astq.createFunction(t1.image); +term = astq.createFunction(t1.image); if (exp1 != null) { term.add(exp1); } jj_consume_token(RPAREN); break; + } case STOP: case UUID: case STRUUID: case RAND: - case NOW: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case UUID: + case NOW:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case UUID:{ t1 = jj_consume_token(UUID); break; - case STRUUID: + } + case STRUUID:{ t1 = jj_consume_token(STRUUID); break; - case NOW: + } + case NOW:{ t1 = jj_consume_token(NOW); break; - case RAND: + } + case RAND:{ t1 = jj_consume_token(RAND); break; - case STOP: + } + case STOP:{ t1 = jj_consume_token(STOP); break; + } default: jj_la1[196] = jj_gen; jj_consume_token(-1); @@ -5873,8 +6161,9 @@ final public Expression BuiltInCall(Exp stack) throws ParseException { } jj_consume_token(LPAREN); jj_consume_token(RPAREN); - term = astq.createFunction(t1.image); +term = astq.createFunction(t1.image); break; + } case RETURN: case EVAL: case SELF: @@ -5916,134 +6205,176 @@ final public Expression BuiltInCall(Exp stack) throws ParseException { case SUBJECT: case PREDICATE: case OBJECT: - case IS_TRIPLE: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case STR: + case IS_TRIPLE:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case STR:{ t1 = jj_consume_token(STR); break; - case URI: + } + case URI:{ t1 = jj_consume_token(URI); break; - case IRI: + } + case IRI:{ t1 = jj_consume_token(IRI); break; - case DTYPE: + } + case DTYPE:{ t1 = jj_consume_token(DTYPE); break; - case LANG: + } + case LANG:{ t1 = jj_consume_token(LANG); break; - case IS_IRI: + } + case IS_IRI:{ t1 = jj_consume_token(IS_IRI); break; - case IS_URI: + } + case IS_URI:{ t1 = jj_consume_token(IS_URI); break; - case IS_BLANK: + } + case IS_BLANK:{ t1 = jj_consume_token(IS_BLANK); break; - case IS_LITERAL: + } + case IS_LITERAL:{ t1 = jj_consume_token(IS_LITERAL); break; - case IS_NUMERIC: + } + case IS_NUMERIC:{ t1 = jj_consume_token(IS_NUMERIC); break; - case IS_EXTENSION: + } + case IS_EXTENSION:{ t1 = jj_consume_token(IS_EXTENSION); break; - case IS_SAFE: + } + case IS_SAFE:{ t1 = jj_consume_token(IS_SAFE); break; - case IS_UNDEFINED: + } + case IS_UNDEFINED:{ t1 = jj_consume_token(IS_UNDEFINED); break; - case IS_WELLFORMED: + } + case IS_WELLFORMED:{ t1 = jj_consume_token(IS_WELLFORMED); break; - case STRLEN: + } + case STRLEN:{ t1 = jj_consume_token(STRLEN); break; - case UCASE: + } + case UCASE:{ t1 = jj_consume_token(UCASE); break; - case LCASE: + } + case LCASE:{ t1 = jj_consume_token(LCASE); break; - case ENCODE_FOR_URI: + } + case ENCODE_FOR_URI:{ t1 = jj_consume_token(ENCODE_FOR_URI); break; - case MD5: + } + case MD5:{ t1 = jj_consume_token(MD5); break; - case SHA1: + } + case SHA1:{ t1 = jj_consume_token(SHA1); break; - case SHA256: + } + case SHA256:{ t1 = jj_consume_token(SHA256); break; - case SHA384: + } + case SHA384:{ t1 = jj_consume_token(SHA384); break; - case SHA512: + } + case SHA512:{ t1 = jj_consume_token(SHA512); break; - case ROUND: + } + case ROUND:{ t1 = jj_consume_token(ROUND); break; - case FLOOR: + } + case FLOOR:{ t1 = jj_consume_token(FLOOR); break; - case CEIL: + } + case CEIL:{ t1 = jj_consume_token(CEIL); break; - case ABS: + } + case ABS:{ t1 = jj_consume_token(ABS); break; - case YEAR: + } + case YEAR:{ t1 = jj_consume_token(YEAR); break; - case MONTH: + } + case MONTH:{ t1 = jj_consume_token(MONTH); break; - case DAY: + } + case DAY:{ t1 = jj_consume_token(DAY); break; - case HOURS: + } + case HOURS:{ t1 = jj_consume_token(HOURS); break; - case MINUTES: + } + case MINUTES:{ t1 = jj_consume_token(MINUTES); break; - case SECONDS: + } + case SECONDS:{ t1 = jj_consume_token(SECONDS); break; - case TIMEZONE: + } + case TIMEZONE:{ t1 = jj_consume_token(TIMEZONE); break; - case TZ: + } + case TZ:{ t1 = jj_consume_token(TZ); break; - case RETURN: + } + case RETURN:{ t1 = jj_consume_token(RETURN); break; - case SELF: + } + case SELF:{ t1 = jj_consume_token(SELF); break; - case EVAL: + } + case EVAL:{ t1 = jj_consume_token(EVAL); break; - case SUBJECT: + } + case SUBJECT:{ t1 = jj_consume_token(SUBJECT); break; - case OBJECT: + } + case OBJECT:{ t1 = jj_consume_token(OBJECT); break; - case PREDICATE: + } + case PREDICATE:{ t1 = jj_consume_token(PREDICATE); break; - case IS_TRIPLE: + } + case IS_TRIPLE:{ t1 = jj_consume_token(IS_TRIPLE); break; + } default: jj_la1[197] = jj_gen; jj_consume_token(-1); @@ -6052,8 +6383,9 @@ final public Expression BuiltInCall(Exp stack) throws ParseException { jj_consume_token(LPAREN); exp1 = Expression(stack); jj_consume_token(RPAREN); - term = astq.createFunction(t1.image, exp1); +term = astq.createFunction(t1.image, exp1); break; + } case AGGREGATE: case GROUP_CONCAT: case SUM: @@ -6061,40 +6393,49 @@ final public Expression BuiltInCall(Exp stack) throws ParseException { case AVG: case MIN: case MAX: - case COUNT: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case AGGREGATE: + case COUNT:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case AGGREGATE:{ t1 = jj_consume_token(AGGREGATE); break; - case GROUP_CONCAT: + } + case GROUP_CONCAT:{ t1 = jj_consume_token(GROUP_CONCAT); break; - case COUNT: + } + case COUNT:{ t1 = jj_consume_token(COUNT); break; - case SUM: + } + case SUM:{ t1 = jj_consume_token(SUM); break; - case SAMPLE: + } + case SAMPLE:{ t1 = jj_consume_token(SAMPLE); break; - case AVG: + } + case AVG:{ t1 = jj_consume_token(AVG); break; - case MIN: + } + case MIN:{ t1 = jj_consume_token(MIN); break; - case MAX: + } + case MAX:{ t1 = jj_consume_token(MAX); break; + } default: jj_la1[198] = jj_gen; jj_consume_token(-1); throw new ParseException(); } el = ArgList(stack); - term = astq.createFunction( t1.image, el); +term = astq.createFunction( t1.image, el); break; + } case APPLY: case REDUCE: case SAMETERM: @@ -6108,50 +6449,64 @@ final public Expression BuiltInCall(Exp stack) throws ParseException { case STRAFTER: case POWER: case SPARQL_COMPARE: - case XPATH: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LANGMATCHES: + case XPATH:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LANGMATCHES:{ t1 = jj_consume_token(LANGMATCHES); break; - case POWER: + } + case POWER:{ t1 = jj_consume_token(POWER); break; - case XPATH: + } + case XPATH:{ t1 = jj_consume_token(XPATH); break; - case SAMETERM: + } + case SAMETERM:{ t1 = jj_consume_token(SAMETERM); break; - case STRDT: + } + case STRDT:{ t1 = jj_consume_token(STRDT); break; - case STRLANG: + } + case STRLANG:{ t1 = jj_consume_token(STRLANG); break; - case CONTAINS: + } + case CONTAINS:{ t1 = jj_consume_token(CONTAINS); break; - case STRSTARTS: + } + case STRSTARTS:{ t1 = jj_consume_token(STRSTARTS); break; - case STRENDS: + } + case STRENDS:{ t1 = jj_consume_token(STRENDS); break; - case STRBEFORE: + } + case STRBEFORE:{ t1 = jj_consume_token(STRBEFORE); break; - case STRAFTER: + } + case STRAFTER:{ t1 = jj_consume_token(STRAFTER); break; - case REDUCE: + } + case REDUCE:{ t1 = jj_consume_token(REDUCE); break; - case APPLY: + } + case APPLY:{ t1 = jj_consume_token(APPLY); break; - case SPARQL_COMPARE: + } + case SPARQL_COMPARE:{ t1 = jj_consume_token(SPARQL_COMPARE); break; + } default: jj_la1[199] = jj_gen; jj_consume_token(-1); @@ -6162,8 +6517,9 @@ final public Expression BuiltInCall(Exp stack) throws ParseException { jj_consume_token(COMMA); exp2 = Expression(stack); jj_consume_token(RPAREN); - term = astq.createFunction(t1.image, exp1); term.add(exp2); +term = astq.createFunction(t1.image, exp1); term.add(exp2); break; + } case THROW: case UNNEST: case MAP: @@ -6180,167 +6536,194 @@ final public Expression BuiltInCall(Exp stack) throws ParseException { case CONCAT: case REPLACE: case REGEX: - case TRIPLE: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case UNNEST: + case TRIPLE:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case UNNEST:{ t1 = jj_consume_token(UNNEST); break; - case REGEX: + } + case REGEX:{ t1 = jj_consume_token(REGEX); break; - case COALESCE: + } + case COALESCE:{ t1 = jj_consume_token(COALESCE); break; - case CONCAT: + } + case CONCAT:{ t1 = jj_consume_token(CONCAT); break; - case SUBSTR: + } + case SUBSTR:{ t1 = jj_consume_token(SUBSTR); break; - case REPLACE: + } + case REPLACE:{ t1 = jj_consume_token(REPLACE); break; - case METHOD: + } + case METHOD:{ t1 = jj_consume_token(METHOD); break; - case FUNCALL: + } + case FUNCALL:{ t1 = jj_consume_token(FUNCALL); break; - case MAP: + } + case MAP:{ t1 = jj_consume_token(MAP); break; - case MAPLIST: + } + case MAPLIST:{ t1 = jj_consume_token(MAPLIST); break; - case MAPMERGE: + } + case MAPMERGE:{ t1 = jj_consume_token(MAPMERGE); break; - case MAPFINDLIST: + } + case MAPFINDLIST:{ t1 = jj_consume_token(MAPFINDLIST); break; - case MAPFIND: + } + case MAPFIND:{ t1 = jj_consume_token(MAPFIND); break; - case MAPANY: + } + case MAPANY:{ t1 = jj_consume_token(MAPANY); break; - case MAPEVERY: + } + case MAPEVERY:{ t1 = jj_consume_token(MAPEVERY); break; - case THROW: + } + case THROW:{ t1 = jj_consume_token(THROW); break; - case TRIPLE: + } + case TRIPLE:{ t1 = jj_consume_token(TRIPLE); break; + } default: jj_la1[200] = jj_gen; jj_consume_token(-1); throw new ParseException(); } jj_consume_token(LPAREN); - term = astq.createFunction(t1.image); +term = astq.createFunction(t1.image); exp1 = Expression(stack); - term.add(exp1); +term.add(exp1); label_46: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ ; break; + } default: jj_la1[201] = jj_gen; break label_46; } jj_consume_token(COMMA); exp2 = Expression(stack); - term.add(exp2); +term.add(exp2); } jj_consume_token(RPAREN); break; + } case NOT: - case EXIST: + case EXIST:{ term = exists(); break; + } default: jj_la1[202] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return term;} +{if ("" != null) return term;} throw new Error("Missing return statement in function"); - } +} - final public Term exists() throws ParseException { - Exp exp; Term term; boolean neg; - neg = false; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case NOT: + final public Term exists() throws ParseException {Exp exp; Term term; boolean neg; +neg = false; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case NOT:{ jj_consume_token(NOT); - neg = true; +neg = true; break; + } default: jj_la1[203] = jj_gen; ; } jj_consume_token(EXIST); exp = GroupGraphPattern(); - term = astq.createExist(exp, neg); - {if (true) return term;} +term = astq.createExist(exp, neg); + {if ("" != null) return term;} throw new Error("Missing return statement in function"); - } +} - final public Term FunctionCorese(Exp stack) throws ParseException { - Token t; Term term = null; ExpressionList v; Constant cst; Exp sub; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case IF: + final public Term FunctionCorese(Exp stack) throws ParseException {Token t; Term term = null; ExpressionList v; Constant cst; Exp sub; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case IF:{ term = IfThenElse(stack); break; - case TRY: + } + case TRY:{ term = TryCatch(stack); break; + } case LET: - case LETDYN: + case LETDYN:{ term = Let(stack); break; + } case SET: - case STATIC: + case STATIC:{ term = Set(stack); break; + } case FOR: - case LOOP: + case LOOP:{ term = For(stack); break; + } case FUNCTION: - case LAMBDA: + case LAMBDA:{ term = Lambda(); break; - case QUERY: + } + case QUERY:{ term = QueryExp(); break; + } default: jj_la1[204] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return term;} +{if ("" != null) return term;} throw new Error("Missing return statement in function"); - } +} - final public Term QueryExp() throws ParseException { - Expression exp = null; Exp stack; Term term; Exp sub; Metadata meta = null; + final public Term QueryExp() throws ParseException {Expression exp = null; Exp stack; Term term; Exp sub; Metadata meta = null; jj_consume_token(QUERY); jj_consume_token(LPAREN); meta = Annotate(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SELECT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SELECT:{ sub = SubQueryExp(meta); break; + } case CONSTRUCT: - case RULE: + case RULE:{ sub = SubConstruct(meta); - sub = BasicGraphPattern.create(sub); +sub = BasicGraphPattern.create(sub); break; + } case LOAD: case CLEAR: case DROP: @@ -6350,142 +6733,153 @@ final public Term QueryExp() throws ParseException { case COPY: case INSERT: case DELETE: - case WITH: + case WITH:{ sub = SubUpdate(meta); - sub = BasicGraphPattern.create(sub); +sub = BasicGraphPattern.create(sub); break; + } default: jj_la1[205] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ jj_consume_token(COMMA); - stack = BasicGraphPattern.create(); +stack = BasicGraphPattern.create(); exp = Expression(stack); break; + } default: jj_la1[206] = jj_gen; ; } jj_consume_token(RPAREN); - {if (true) return astq.term(sub, exp);} +{if ("" != null) return astq.term(sub, exp);} throw new Error("Missing return statement in function"); - } +} - final public Term IfThenElse(Exp stack) throws ParseException { - Expression test, e1, e2 = null; + final public Term IfThenElse(Exp stack) throws ParseException {Expression test, e1, e2 = null; jj_consume_token(IF); jj_consume_token(LPAREN); test = Expression(stack); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ jj_consume_token(COMMA); e1 = Expression(stack); jj_consume_token(COMMA); e2 = Expression(stack); jj_consume_token(RPAREN); break; - case RPAREN: + } + case RPAREN:{ jj_consume_token(RPAREN); e1 = Body(stack); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case ELSE: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case ELSE:{ jj_consume_token(ELSE); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LBRACE: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LBRACE:{ e2 = Body(stack); break; - case IF: + } + case IF:{ e2 = IfThenElse(stack); break; + } default: jj_la1[207] = jj_gen; jj_consume_token(-1); throw new ParseException(); } break; + } default: jj_la1[208] = jj_gen; ; } break; + } default: jj_la1[209] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return astq.ifThenElse(test, e1, e2);} +{if ("" != null) return astq.ifThenElse(test, e1, e2);} throw new Error("Missing return statement in function"); - } +} - final public Term For(Exp stack) throws ParseException { - boolean isLoop = false; Exp sub; Term t; Expression exp, body ; Variable var = null; + final public Term For(Exp stack) throws ParseException {boolean isLoop = false; Exp sub; Term t; Expression exp, body ; Variable var = null; ExpressionList arg, el = null; Metadata meta = null; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case FOR: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case FOR:{ jj_consume_token(FOR); break; - case LOOP: + } + case LOOP:{ jj_consume_token(LOOP); - isLoop = true; +isLoop = true; break; + } default: jj_la1[210] = jj_gen; jj_consume_token(-1); throw new ParseException(); } jj_consume_token(LPAREN); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: case LPAREN: - case VAR3: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case VAR3:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: - case VAR3: + case VAR3:{ var = Var(); break; - case LPAREN: + } + case LPAREN:{ jj_consume_token(LPAREN); - el = new ExpressionList(); +el = new ExpressionList(); var = Var(); - el.add(var); +el.add(var); label_47: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: case COMMA: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[211] = jj_gen; break label_47; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ jj_consume_token(COMMA); break; + } default: jj_la1[212] = jj_gen; ; } var = Var(); - el.add(var); +el.add(var); } jj_consume_token(RPAREN); break; + } default: jj_la1[213] = jj_gen; jj_consume_token(-1); throw new ParseException(); } jj_consume_token(IN); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -6609,9 +7003,10 @@ final public Term For(Exp stack) throws ParseException { case BANG: case PLUS: case MINUS: - case VAR3: + case VAR3:{ exp = Expression(stack); break; + } case ATPATH: case LANGTAG: case SELECT: @@ -6628,22 +7023,25 @@ final public Term For(Exp stack) throws ParseException { case INSERT: case DELETE: case WITH: - case AT: + case AT:{ exp = LetForExp(); break; + } default: jj_la1[214] = jj_gen; jj_consume_token(-1); throw new ParseException(); } break; + } case ATPATH: case LANGTAG: - case SELECT: + case SELECT:{ meta = Annotate(); sub = SubQueryExp(meta); - el = new ExpressionList(); exp = astq.term(sub); +el = new ExpressionList(); exp = astq.term(sub); break; + } default: jj_la1[215] = jj_gen; jj_consume_token(-1); @@ -6651,27 +7049,29 @@ final public Term For(Exp stack) throws ParseException { } jj_consume_token(RPAREN); body = Body(stack); - {if (true) return astq.defineLoop(var, el, exp, body, isLoop);} +{if ("" != null) return astq.defineLoop(var, el, exp, body, isLoop);} throw new Error("Missing return statement in function"); - } +} - final public Expression LetForExp() throws ParseException { - Exp sub; Expression exp; Metadata meta = null; + final public Expression LetForExp() throws ParseException {Exp sub; Expression exp; Metadata meta = null; meta = Annotate(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SELECT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SELECT:{ sub = SubQueryExp(meta); - exp = astq.term(sub); +exp = astq.term(sub); break; - case SERVICE: + } + case SERVICE:{ sub = ServiceExp(); - exp = astq.term(sub); +exp = astq.term(sub); break; + } case CONSTRUCT: - case RULE: + case RULE:{ sub = SubConstruct(meta); - exp = astq.term(BasicGraphPattern.create(sub)); +exp = astq.term(BasicGraphPattern.create(sub)); break; + } case LOAD: case CLEAR: case DROP: @@ -6681,37 +7081,37 @@ final public Expression LetForExp() throws ParseException { case COPY: case INSERT: case DELETE: - case WITH: + case WITH:{ sub = SubUpdate(meta); - exp = astq.term(BasicGraphPattern.create(sub)); +exp = astq.term(BasicGraphPattern.create(sub)); break; - case AT: + } + case AT:{ jj_consume_token(AT); exp = CstList(); break; + } default: jj_la1[216] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return exp;} +{if ("" != null) return exp;} throw new Error("Missing return statement in function"); - } +} - final public Constant CstList() throws ParseException { - IDatatype dt; Constant list; + final public Constant CstList() throws ParseException {IDatatype dt; Constant list; dt = ExpList(); - {if (true) return astq.createLDSList(dt);} +{if ("" != null) return astq.createLDSList(dt);} throw new Error("Missing return statement in function"); - } +} - final public IDatatype ExpList() throws ParseException { - IDatatype dt; ArrayList list; - list = new ArrayList() ; + final public IDatatype ExpList() throws ParseException {IDatatype dt; ArrayList list; +list = new ArrayList() ; jj_consume_token(LPAREN); label_48: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -6724,14 +7124,15 @@ final public IDatatype ExpList() throws ParseException { case STRING_LITERAL2: case STRING_LITERAL_LONG1: case STRING_LITERAL_LONG2: - case LPAREN: + case LPAREN:{ ; break; + } default: jj_la1[217] = jj_gen; break label_48; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -6743,65 +7144,71 @@ final public IDatatype ExpList() throws ParseException { case STRING_LITERAL1: case STRING_LITERAL2: case STRING_LITERAL_LONG1: - case STRING_LITERAL_LONG2: + case STRING_LITERAL_LONG2:{ dt = ListTerm(); break; - case LPAREN: + } + case LPAREN:{ dt = ExpList(); break; + } default: jj_la1[218] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - list.add(dt); +list.add(dt); } jj_consume_token(RPAREN); - {if (true) return DatatypeMap.createList(list);} +{if ("" != null) return DatatypeMap.createList(list);} throw new Error("Missing return statement in function"); - } +} - final public IDatatype ListTerm() throws ParseException { - String s; Token t1; Atom a; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case Q_IRIref: + final public IDatatype ListTerm() throws ParseException {String s; Token t1; Atom a; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case Q_IRIref:{ s = Q_IRI_ref(); - a = astq.createURI(s); +a = astq.createURI(s); break; - case QNAME: + } + case QNAME:{ t1 = jj_consume_token(QNAME); - a = astq.createQName(t1.image); +a = astq.createQName(t1.image); break; - case QNAME_NS: + } + case QNAME_NS:{ t1 = jj_consume_token(QNAME_NS); - a = astq.createQName(t1.image); +a = astq.createQName(t1.image); break; + } case STRING_LITERAL1: case STRING_LITERAL2: case STRING_LITERAL_LONG1: - case STRING_LITERAL_LONG2: + case STRING_LITERAL_LONG2:{ a = RDFLiteral(); break; + } case INTEGER: case DECIMAL: - case DOUBLE: + case DOUBLE:{ a = NumericLiteral(); break; + } case TRUE: - case FALSE: + case FALSE:{ a = BooleanLiteral(); break; + } default: jj_la1[219] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return a.getDatatypeValue();} +{if ("" != null) return a.getDatatypeValue();} throw new Error("Missing return statement in function"); - } +} - final public Term TryCatch(Exp stack) throws ParseException { - Variable var; Expression e1, e2; + final public Term TryCatch(Exp stack) throws ParseException {Variable var; Expression e1, e2; jj_consume_token(TRY); e1 = Body(stack); jj_consume_token(CATCH); @@ -6809,59 +7216,62 @@ final public Term TryCatch(Exp stack) throws ParseException { var = Var(); jj_consume_token(RPAREN); e2 = Body(stack); - {if (true) return astq.defTryCatch(e1, var, e2);} +{if ("" != null) return astq.defTryCatch(e1, var, e2);} throw new Error("Missing return statement in function"); - } +} - final public Term Let(Exp stack) throws ParseException { - boolean dynamic = false; Expression exp ; Expression arg; ExpressionList list; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LET: + final public Term Let(Exp stack) throws ParseException {boolean dynamic = false; Expression exp ; Expression arg; ExpressionList list; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LET:{ jj_consume_token(LET); break; - case LETDYN: + } + case LETDYN:{ jj_consume_token(LETDYN); - dynamic = true; +dynamic = true; break; + } default: jj_la1[220] = jj_gen; jj_consume_token(-1); throw new ParseException(); } jj_consume_token(LPAREN); - list = new ExpressionList(); +list = new ExpressionList(); arg = LetDef(stack); - list.add(arg); +list.add(arg); label_49: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ ; break; + } default: jj_la1[221] = jj_gen; break label_49; } jj_consume_token(COMMA); arg = LetDef(stack); - list.add(arg); +list.add(arg); } jj_consume_token(RPAREN); exp = Body(stack); - {if (true) return astq.let(list, exp, dynamic);} +{if ("" != null) return astq.let(list, exp, dynamic);} throw new Error("Missing return statement in function"); - } +} - final public Term Set(Exp stack) throws ParseException { - Expression exp ; Variable var; boolean stat = false; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SET: + final public Term Set(Exp stack) throws ParseException {Expression exp ; Variable var; boolean stat = false; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SET:{ jj_consume_token(SET); break; - case STATIC: + } + case STATIC:{ jj_consume_token(STATIC); - stat = true; +stat = true; break; + } default: jj_la1[222] = jj_gen; jj_consume_token(-1); @@ -6872,49 +7282,51 @@ final public Term Set(Exp stack) throws ParseException { jj_consume_token(EQ); exp = Expression(stack); jj_consume_token(RPAREN); - {if (true) return astq.set(var, exp, stat);} +{if ("" != null) return astq.set(var, exp, stat);} throw new Error("Missing return statement in function"); - } +} - final public Expression LetDef(Exp stack) throws ParseException { - Variable var = null; Expression exp ; ExpressionList el = null; Exp sub; Constant type = null; Metadata meta = null; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Expression LetDef(Exp stack) throws ParseException {Variable var = null; Expression exp ; ExpressionList el = null; Exp sub; Constant type = null; Metadata meta = null; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: case VAR1: case VAR2: case LPAREN: - case VAR3: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case VAR3:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: case VAR1: case VAR2: - case VAR3: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case VAR3:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ type = IRIrefConst(); break; + } default: jj_la1[223] = jj_gen; ; } var = Var(); break; - case LPAREN: + } + case LPAREN:{ el = LetArgList(); break; + } default: jj_la1[224] = jj_gen; jj_consume_token(-1); throw new ParseException(); } jj_consume_token(EQ); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -7038,9 +7450,10 @@ final public Expression LetDef(Exp stack) throws ParseException { case BANG: case PLUS: case MINUS: - case VAR3: + case VAR3:{ exp = Expression(stack); break; + } case ATPATH: case LANGTAG: case SELECT: @@ -7057,85 +7470,86 @@ final public Expression LetDef(Exp stack) throws ParseException { case INSERT: case DELETE: case WITH: - case AT: + case AT:{ exp = LetForExp(); break; + } default: jj_la1[225] = jj_gen; jj_consume_token(-1); throw new ParseException(); } break; + } case ATPATH: case LANGTAG: - case SELECT: + case SELECT:{ // let (select where) meta = Annotate(); sub = SubQueryExp(meta); - el = new ExpressionList(); exp = astq.term(sub); +el = new ExpressionList(); exp = astq.term(sub); break; + } default: jj_la1[226] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return (el == null) ? astq.defLet(var, type, exp) : astq.defLet(el, exp);} +{if ("" != null) return (el == null) ? astq.defLet(var, type, exp) : astq.defLet(el, exp);} throw new Error("Missing return statement in function"); - } +} - final public Term FunctionCall(Exp stack) throws ParseException { - Term term1; ExpressionList v; Constant cst; + final public Term FunctionCall(Exp stack) throws ParseException {Term term1; ExpressionList v; Constant cst; cst = IRIrefConst(); v = ArgList(stack); - term1 = astq.createFunction(cst, v); - {if (true) return term1;} +term1 = astq.createFunction(cst, v); +{if ("" != null) return term1;} throw new Error("Missing return statement in function"); - } +} // The case of "q:name()" or just "q:name" // by expanding out FunctionCall() - final public Expression IRIrefOrFunction(Exp stack) throws ParseException { - Expression exp; + final public +Expression IRIrefOrFunction(Exp stack) throws ParseException {Expression exp; exp = StdIRIrefOrFunction(stack); - {if (true) return exp;} +{if ("" != null) return exp;} throw new Error("Missing return statement in function"); - } +} - final public Expression StdIRIrefOrFunction(Exp stack) throws ParseException { - String s1; ExpressionList v; Term term; Expression expression; Constant cst; + final public Expression StdIRIrefOrFunction(Exp stack) throws ParseException {String s1; ExpressionList v; Term term; Expression expression; Constant cst; cst = IRIrefConst(); - expression = cst; +expression = cst; if (jj_2_18(2)) { v = ArgList(stack); - term = astq.createFunction(cst, v); +term = astq.createFunction(cst, v); expression = term; } else { ; } - {if (true) return expression;} +{if ("" != null) return expression;} throw new Error("Missing return statement in function"); - } +} - final public Function Function(Metadata global, Metadata local) throws ParseException { - Exp stack; Token t = null; Function term; ExpressionList v; Constant type, name=null; Expression body; + final public Function Function(Metadata global, Metadata local) throws ParseException {Exp stack; Token t = null; Function term; ExpressionList v; Constant type, name=null; Expression body; jj_consume_token(FUNCTION); type = IRIrefConst(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ name = IRIrefConst(); break; + } default: jj_la1[227] = jj_gen; ; } - stack = BasicGraphPattern.create(); +stack = BasicGraphPattern.create(); handler.setMetadata(metadata(global, local)) ; handler.setFunction(true); v = FunArgList(); body = Body(stack); - // type is optional, name is mandatory +// type is optional, name is mandatory if (name == null) { name = type; type = null; @@ -7143,107 +7557,111 @@ final public Function Function(Metadata global, Metadata local) throws ParseExce term = astq.defineFunction(name, type, v, body, metadata(global, local) ); handler.cleanMetadata(); handler.setFunction(false); - {if (true) return term;} +{if ("" != null) return term;} throw new Error("Missing return statement in function"); - } +} - final public Function Lambda() throws ParseException { - Exp stack; Token t = null; Function term; ExpressionList v; Constant cst; Expression body; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LAMBDA: + final public Function Lambda() throws ParseException {Exp stack; Token t = null; Function term; ExpressionList v; Constant cst; Expression body; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LAMBDA:{ jj_consume_token(LAMBDA); break; - case FUNCTION: + } + case FUNCTION:{ jj_consume_token(FUNCTION); break; + } default: jj_la1[228] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - stack = BasicGraphPattern.create(); +stack = BasicGraphPattern.create(); v = LambdaArgList(); body = Body(stack); - term = gast.defineLambda(v, body, handler.getMetadata()); - {if (true) return term;} +term = gast.defineLambda(v, body, handler.getMetadata()); +{if ("" != null) return term;} throw new Error("Missing return statement in function"); - } +} - final public ExpressionList FunArgList() throws ParseException { - ExpressionList list; Variable var; Constant type; - list = new ExpressionList(); + final public ExpressionList FunArgList() throws ParseException {ExpressionList list; Variable var; Constant type; +list = new ExpressionList(); jj_consume_token(LPAREN); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: case VAR1: case VAR2: - case VAR3: - type = null; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case VAR3:{ +type = null; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ type = IRIrefConst(); break; + } default: jj_la1[229] = jj_gen; ; } var = Var(); - list.add(var); list.defType(var, type); +list.add(var); list.defType(var, type); label_50: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ ; break; + } default: jj_la1[230] = jj_gen; break label_50; } jj_consume_token(COMMA); - type = null; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { +type = null; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ type = IRIrefConst(); break; + } default: jj_la1[231] = jj_gen; ; } var = Var(); - list.add(var); list.defType(var, type); +list.add(var); list.defType(var, type); } break; + } default: jj_la1[232] = jj_gen; ; } jj_consume_token(RPAREN); - {if (true) return list;} +{if ("" != null) return list;} throw new Error("Missing return statement in function"); - } +} - final public ExpressionList LambdaArgList() throws ParseException { - ExpressionList glist, list; Variable var; Constant type; + final public ExpressionList LambdaArgList() throws ParseException {ExpressionList glist, list; Variable var; Constant type; jj_consume_token(LPAREN); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LPAREN: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LPAREN:{ jj_consume_token(LPAREN); - glist = new ExpressionList(); +glist = new ExpressionList(); list = LambdaVarList(); - glist.add(list); +glist.add(list); jj_consume_token(RPAREN); label_51: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ ; break; + } default: jj_la1[233] = jj_gen; break label_51; @@ -7251,75 +7669,78 @@ final public ExpressionList LambdaArgList() throws ParseException { jj_consume_token(COMMA); jj_consume_token(LPAREN); list = LambdaVarList(); - glist.add(list); +glist.add(list); jj_consume_token(RPAREN); } - list = glist; +list = glist; break; + } default: jj_la1[234] = jj_gen; list = LambdaVarList(); } jj_consume_token(RPAREN); - {if (true) return list;} +{if ("" != null) return list;} throw new Error("Missing return statement in function"); - } +} - final public ExpressionList LambdaVarList() throws ParseException { - ExpressionList list; Variable var; - list = new ExpressionList(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public ExpressionList LambdaVarList() throws ParseException {ExpressionList list; Variable var; +list = new ExpressionList(); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: - case VAR3: + case VAR3:{ var = Var(); - list.add(var); +list.add(var); label_52: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ ; break; + } default: jj_la1[235] = jj_gen; break label_52; } jj_consume_token(COMMA); var = Var(); - list.add(var); +list.add(var); } break; + } default: jj_la1[236] = jj_gen; ; } - {if (true) return list;} +{if ("" != null) return list;} throw new Error("Missing return statement in function"); - } +} - final public ExpressionList LetArgList() throws ParseException { - ExpressionList glist, list; Variable var; Constant type; + final public ExpressionList LetArgList() throws ParseException {ExpressionList glist, list; Variable var; Constant type; jj_consume_token(LPAREN); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: case DOT: case BAR: - case VAR3: + case VAR3:{ list = VarList(); break; - case LPAREN: + } + case LPAREN:{ jj_consume_token(LPAREN); - glist = new ExpressionList(); +glist = new ExpressionList(); list = VarList(); - glist.add(list); +glist.add(list); jj_consume_token(RPAREN); label_53: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ ; break; + } default: jj_la1[237] = jj_gen; break label_53; @@ -7327,171 +7748,180 @@ final public ExpressionList LetArgList() throws ParseException { jj_consume_token(COMMA); jj_consume_token(LPAREN); list = VarList(); - glist.add(list); +glist.add(list); jj_consume_token(RPAREN); } - list = glist; +list = glist; break; + } default: jj_la1[238] = jj_gen; jj_consume_token(-1); throw new ParseException(); } jj_consume_token(RPAREN); - {if (true) return list;} +{if ("" != null) return list;} throw new Error("Missing return statement in function"); - } +} - final public ExpressionList VarList() throws ParseException { - ExpressionList list; - list = new ExpressionList(); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public ExpressionList VarList() throws ParseException {ExpressionList list; +list = new ExpressionList(); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: - case VAR3: + case VAR3:{ FstVar(list); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case BAR: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case BAR:{ RstVar(list); break; + } default: jj_la1[239] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DOT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DOT:{ LstVar(list); break; + } default: jj_la1[240] = jj_gen; ; } break; - case BAR: + } + case BAR:{ RstVar(list); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DOT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DOT:{ LstVar(list); break; + } default: jj_la1[241] = jj_gen; ; } break; - case DOT: + } + case DOT:{ LstVar(list); break; + } default: jj_la1[242] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return list;} +{if ("" != null) return list;} throw new Error("Missing return statement in function"); - } +} - final public ExpressionList FstVar(ExpressionList list) throws ParseException { - Variable var; + final public ExpressionList FstVar(ExpressionList list) throws ParseException {Variable var; var = Var(); - list.add(var); +list.add(var); label_54: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: case COMMA: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[243] = jj_gen; break label_54; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ jj_consume_token(COMMA); break; + } default: jj_la1[244] = jj_gen; ; } var = Var(); - list.add(var); +list.add(var); } - {if (true) return list;} +{if ("" != null) return list;} throw new Error("Missing return statement in function"); - } +} - final public ExpressionList RstVar(ExpressionList list) throws ParseException { - Variable var; + final public ExpressionList RstVar(ExpressionList list) throws ParseException {Variable var; jj_consume_token(BAR); var = Var(); - list.add(var); list.setRest(true); - {if (true) return list;} +list.add(var); list.setRest(true); +{if ("" != null) return list;} throw new Error("Missing return statement in function"); - } +} - final public ExpressionList LstVar(ExpressionList list) throws ParseException { - Variable var; + final public ExpressionList LstVar(ExpressionList list) throws ParseException {Variable var; jj_consume_token(DOT); var = Var(); - list.add(var); list.setLast(true); +list.add(var); list.setLast(true); label_55: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case VAR1: case VAR2: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[245] = jj_gen; break label_55; } var = Var(); - list.add(var); +list.add(var); } - {if (true) return list;} +{if ("" != null) return list;} throw new Error("Missing return statement in function"); - } +} - final public Metadata Annotate() throws ParseException { - Token t, v; Metadata list = null; Constant s; + final public Metadata Annotate() throws ParseException {Token t, v; Metadata list = null; Constant s; label_56: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ATPATH: - case LANGTAG: + case LANGTAG:{ ; break; + } default: jj_la1[246] = jj_gen; break label_56; } - s = null; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LANGTAG: +s = null; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LANGTAG:{ t = jj_consume_token(LANGTAG); break; - case ATPATH: + } + case ATPATH:{ t = jj_consume_token(ATPATH); break; + } default: jj_la1[247] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - if (list == null) { list = new Metadata(); } ; list.add(t.image); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case STAR: +if (list == null) { list = new Metadata(); } ; list.add(t.image); + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case STAR:{ jj_consume_token(STAR); - list.add(t.image, "*"); +list.add(t.image, "*"); break; + } default: jj_la1[250] = jj_gen; label_57: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -7504,14 +7934,15 @@ final public Metadata Annotate() throws ParseException { case STRING_LITERAL2: case STRING_LITERAL_LONG1: case STRING_LITERAL_LONG2: - case VAR3: + case VAR3:{ ; break; + } default: jj_la1[248] = jj_gen; break label_57; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -7523,14 +7954,16 @@ final public Metadata Annotate() throws ParseException { case STRING_LITERAL1: case STRING_LITERAL2: case STRING_LITERAL_LONG1: - case STRING_LITERAL_LONG2: + case STRING_LITERAL_LONG2:{ s = Constant(); - list.add(t.image, s); +list.add(t.image, s); break; - case VAR3: + } + case VAR3:{ v = jj_consume_token(VAR3); - list.add(t.image, v.image); +list.add(t.image, v.image); break; + } default: jj_la1[249] = jj_gen; jj_consume_token(-1); @@ -7539,17 +7972,17 @@ final public Metadata Annotate() throws ParseException { } } } - {if (true) return list;} +{if ("" != null) return list;} throw new Error("Missing return statement in function"); - } +} - final public Expression Body(Exp stack) throws ParseException { - ExpressionList body = new ExpressionList(); Expression exp = null; + final public Expression Body(Exp stack) throws ParseException {ExpressionList body = new ExpressionList(); Expression exp = null; jj_consume_token(LBRACE); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case RBRACE: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case RBRACE:{ jj_consume_token(RBRACE); break; + } case Q_IRIref: case QNAME_NS: case QNAME: @@ -7678,8 +8111,8 @@ final public Expression Body(Exp stack) throws ParseException { case BANG: case PLUS: case MINUS: - case VAR3: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case VAR3:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -7803,22 +8236,24 @@ final public Expression Body(Exp stack) throws ParseException { case BANG: case PLUS: case MINUS: - case VAR3: + case VAR3:{ exp = Expression(stack); break; + } case GROUP: case FORMAT: case BOX: case IBOX: - case SBOX: + case SBOX:{ exp = TemplateExpression(stack); break; + } default: jj_la1[251] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - body.add(exp); +body.add(exp); label_58: while (true) { if (jj_2_19(2)) { @@ -7827,7 +8262,7 @@ final public Expression Body(Exp stack) throws ParseException { break label_58; } jj_consume_token(SEMICOLON); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -7951,48 +8386,52 @@ final public Expression Body(Exp stack) throws ParseException { case BANG: case PLUS: case MINUS: - case VAR3: + case VAR3:{ exp = Expression(stack); break; + } case GROUP: case FORMAT: case BOX: case IBOX: - case SBOX: + case SBOX:{ exp = TemplateExpression(stack); break; + } default: jj_la1[252] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - body.add(exp); +body.add(exp); } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SEMICOLON: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SEMICOLON:{ jj_consume_token(SEMICOLON); break; + } default: jj_la1[253] = jj_gen; ; } jj_consume_token(RBRACE); break; + } default: jj_la1[254] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return astq.defineBody(body) ;} +{if ("" != null) return astq.defineBody(body) ;} throw new Error("Missing return statement in function"); - } +} - final public void Package(Metadata global) throws ParseException { - Function term; Metadata la; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case PACKAGE: + final public void Package(Metadata global) throws ParseException {Function term; Metadata la; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case PACKAGE:{ jj_consume_token(PACKAGE); break; + } default: jj_la1[255] = jj_gen; ; @@ -8002,35 +8441,36 @@ final public void Package(Metadata global) throws ParseException { while (true) { la = Annotate(); term = Function(global, la); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ATPATH: case LANGTAG: - case FUNCTION: + case FUNCTION:{ ; break; + } default: jj_la1[256] = jj_gen; break label_59; } } jj_consume_token(RBRACE); - } +} - final public ExpressionList ArgList(Exp stack) throws ParseException { - ExpressionList v; Token t; Expression expression1; String str; - // Vector containing either Expression or Variable + final public ExpressionList ArgList(Exp stack) throws ParseException {ExpressionList v; Token t; Expression expression1; String str; +// Vector containing either Expression or Variable v = new ExpressionList(); jj_consume_token(LPAREN); - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case DISTINCT: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case DISTINCT:{ jj_consume_token(DISTINCT); - v.setDistinct(true); +v.setDistinct(true); break; + } default: jj_la1[257] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case Q_IRIref: case QNAME_NS: case QNAME: @@ -8155,11 +8595,12 @@ final public ExpressionList ArgList(Exp stack) throws ParseException { case PLUS: case MINUS: case STAR: - case VAR3: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case STAR: + case VAR3:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case STAR:{ jj_consume_token(STAR); break; + } case Q_IRIref: case QNAME_NS: case QNAME: @@ -8283,73 +8724,75 @@ final public ExpressionList ArgList(Exp stack) throws ParseException { case BANG: case PLUS: case MINUS: - case VAR3: + case VAR3:{ expression1 = Arg(stack); - v.add(expression1); +v.add(expression1); label_60: while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case COMMA:{ ; break; + } default: jj_la1[258] = jj_gen; break label_60; } jj_consume_token(COMMA); expression1 = Arg(stack); - v.add(expression1); +v.add(expression1); } break; + } default: jj_la1[259] = jj_gen; jj_consume_token(-1); throw new ParseException(); } break; + } default: jj_la1[260] = jj_gen; ; } - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case SEMICOLON: + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case SEMICOLON:{ jj_consume_token(SEMICOLON); jj_consume_token(SEPARATOR); jj_consume_token(EQ); str = String(); - v.setSeparator(str); +v.setSeparator(str); break; + } default: jj_la1[261] = jj_gen; ; } jj_consume_token(RPAREN); - {if (true) return v;} +{if ("" != null) return v;} throw new Error("Missing return statement in function"); - } +} - final public Expression Arg(Exp stack) throws ParseException { - Expression exp; + final public Expression Arg(Exp stack) throws ParseException {Expression exp; exp = Expression(stack); - {if (true) return exp;} +{if ("" != null) return exp;} throw new Error("Missing return statement in function"); - } +} - final public Expression BrackettedExpression(Exp stack) throws ParseException { - Expression expression1; + final public Expression BrackettedExpression(Exp stack) throws ParseException {Expression expression1; jj_consume_token(LPAREN); expression1 = Expression(stack); jj_consume_token(RPAREN); - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} - final public Expression PrimaryExpression(Exp stack) throws ParseException { - Exp exp; Expression expression1; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LPAREN: + final public Expression PrimaryExpression(Exp stack) throws ParseException {Exp exp; Expression expression1; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LPAREN:{ expression1 = BrackettedExpression(stack); break; + } case NOT: case THROW: case STOP: @@ -8440,9 +8883,10 @@ final public Expression PrimaryExpression(Exp stack) throws ParseException { case OBJECT: case IS_TRIPLE: case SPARQL_COMPARE: - case XPATH: + case XPATH:{ expression1 = BuiltInCall(stack); break; + } case TRY: case LET: case LETDYN: @@ -8453,648 +8897,485 @@ final public Expression PrimaryExpression(Exp stack) throws ParseException { case IF: case FUNCTION: case LAMBDA: - case QUERY: + case QUERY:{ expression1 = FunctionCorese(stack); break; + } case VAR1: case VAR2: - case VAR3: + case VAR3:{ expression1 = Var(); break; + } case STRING_LITERAL1: case STRING_LITERAL2: case STRING_LITERAL_LONG1: - case STRING_LITERAL_LONG2: + case STRING_LITERAL_LONG2:{ expression1 = RDFLiteral(); break; + } case INTEGER: case DECIMAL: - case DOUBLE: + case DOUBLE:{ expression1 = NumericLiteral(); break; + } case TRUE: - case FALSE: + case FALSE:{ expression1 = BooleanLiteral(); break; + } case BLANK_NODE_LABEL: - case ANON: + case ANON:{ expression1 = BlankNode(stack); break; + } case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ expression1 = IRIrefOrFunction(stack); break; - case LTLT: - exp = BasicGraphPattern.create() ; + } + case LTLT:{ +exp = BasicGraphPattern.create() ; expression1 = StarTriple(exp); break; + } default: jj_la1[262] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return expression1;} +{if ("" != null) return expression1;} throw new Error("Missing return statement in function"); - } +} - final public Constant Constant() throws ParseException { - Constant c; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + final public Constant Constant() throws ParseException {Constant c; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case STRING_LITERAL1: case STRING_LITERAL2: case STRING_LITERAL_LONG1: - case STRING_LITERAL_LONG2: + case STRING_LITERAL_LONG2:{ c = StringLiteral(); break; + } case INTEGER: case DECIMAL: - case DOUBLE: + case DOUBLE:{ c = NumericLiteral(); break; + } case TRUE: - case FALSE: + case FALSE:{ c = BooleanLiteral(); break; + } case Q_IRIref: case QNAME_NS: - case QNAME: + case QNAME:{ c = IRIrefConst(); break; + } default: jj_la1[263] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return c;} +{if ("" != null) return c;} throw new Error("Missing return statement in function"); - } +} - final public Constant NumericLiteral() throws ParseException { - Token t; String datatype; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case INTEGER: + final public Constant NumericLiteral() throws ParseException {Token t; String datatype; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case INTEGER:{ t = jj_consume_token(INTEGER); - datatype = RDFS.qxsdInteger; +datatype = RDFS.qxsdInteger; break; - case DECIMAL: + } + case DECIMAL:{ t = jj_consume_token(DECIMAL); - datatype = RDFS.qxsdDecimal; +datatype = RDFS.qxsdDecimal; break; - case DOUBLE: + } + case DOUBLE:{ t = jj_consume_token(DOUBLE); - datatype = RDFS.qxsdDouble; +datatype = RDFS.qxsdDouble; break; + } default: jj_la1[264] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return astq.createConstant(t.image, datatype, null);} +{if ("" != null) return astq.createConstant(t.image, datatype, null);} throw new Error("Missing return statement in function"); - } +} - final public Constant RDFLiteral() throws ParseException { - Constant res; String iriref, s, lang; Token t; + final public Constant RDFLiteral() throws ParseException {Constant res; String iriref, s, lang; Token t; s = String(); - iriref = null; lang=null; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { +iriref = null; lang=null; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case LANGTAG: - case DATATYPE: - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case LANGTAG: + case DATATYPE:{ + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case LANGTAG:{ t = jj_consume_token(LANGTAG); - lang = t.image.substring(1,t.image.length()); +lang = t.image.substring(1,t.image.length()); break; - case DATATYPE: + } + case DATATYPE:{ t = jj_consume_token(DATATYPE); iriref = IRIref(); break; + } default: jj_la1[265] = jj_gen; jj_consume_token(-1); throw new ParseException(); } break; + } default: jj_la1[266] = jj_gen; ; } - res = astq.createConstantWithDatatype(s, iriref, lang); - {if (true) return res;} +res = astq.createConstantWithDatatype(s, iriref, lang); +{if ("" != null) return res;} throw new Error("Missing return statement in function"); - } +} - final public Constant StringLiteral() throws ParseException { - String s; + final public Constant StringLiteral() throws ParseException {String s; s = String(); - {if (true) return Constant.createString(s);} +{if ("" != null) return Constant.createString(s);} throw new Error("Missing return statement in function"); - } +} - final public Constant BooleanLiteral() throws ParseException { - Token t1, t2; Constant cst; String iriref, lang; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case TRUE: + final public Constant BooleanLiteral() throws ParseException {Token t1, t2; Constant cst; String iriref, lang; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case TRUE:{ t1 = jj_consume_token(TRUE); break; - case FALSE: + } + case FALSE:{ t1 = jj_consume_token(FALSE); break; + } default: jj_la1[267] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - iriref = RDFS.qxsdBoolean; lang=null; - cst = astq.createConstant(t1.image.toLowerCase(), iriref, lang); - {if (true) return cst;} +iriref = RDFS.qxsdBoolean; lang=null; +cst = astq.createConstant(t1.image.toLowerCase(), iriref, lang); + {if ("" != null) return cst;} throw new Error("Missing return statement in function"); - } +} - final public String String() throws ParseException { - Token t; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case STRING_LITERAL1: + final public String String() throws ParseException {Token t; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case STRING_LITERAL1:{ t = jj_consume_token(STRING_LITERAL1); break; - case STRING_LITERAL2: + } + case STRING_LITERAL2:{ t = jj_consume_token(STRING_LITERAL2); break; - case STRING_LITERAL_LONG1: + } + case STRING_LITERAL_LONG1:{ t = jj_consume_token(STRING_LITERAL_LONG1); break; - case STRING_LITERAL_LONG2: + } + case STRING_LITERAL_LONG2:{ t = jj_consume_token(STRING_LITERAL_LONG2); break; + } default: jj_la1[268] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return handler.remEscapes(t.image);} +final String SQ3 = "\"\"\""; + final String SSQ3 = "'''"; + + String str = t.image; + + // remove leading/trailing " or ' + int start = 1, end = str.length() - 1; + + if ((str.startsWith(SQ3) && str.endsWith(SQ3)) + || (str.startsWith(SSQ3) && str.endsWith(SSQ3))) { + // remove leading/trailing """ or ''' + start = 3; + end = str.length() - 3; + } + + str = str.substring(start, end); + + {if ("" != null) return handler.remEscapes(convertUnicodeSequences(str));} throw new Error("Missing return statement in function"); - } +} - final public String IRIref() throws ParseException { - String res; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case Q_IRIref: + final public String IRIref() throws ParseException {String res; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case Q_IRIref:{ res = Q_IRI_ref(); break; + } case QNAME_NS: - case QNAME: + case QNAME:{ res = QName(); break; + } default: jj_la1[269] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return res;} +{if ("" != null) return res;} throw new Error("Missing return statement in function"); - } +} - final public Constant IRIrefConst() throws ParseException { - String res; Constant cst; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case Q_IRIref: + final public Constant IRIrefConst() throws ParseException {String res; Constant cst; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case Q_IRIref:{ res = Q_IRI_ref(); - cst = astq.createURI(res); +cst = astq.createURI(res); break; + } case QNAME_NS: - case QNAME: + case QNAME:{ res = QName(); - cst = astq.createQName(res); +cst = astq.createQName(res); break; + } default: jj_la1[270] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return cst;} +{if ("" != null) return cst;} throw new Error("Missing return statement in function"); - } +} - final public String QName() throws ParseException { - Token t; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case QNAME: + final public String QName() throws ParseException {Token t; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case QNAME:{ t = jj_consume_token(QNAME); break; - case QNAME_NS: + } + case QNAME_NS:{ t = jj_consume_token(QNAME_NS); break; + } default: jj_la1[271] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return t.image;} +{if ("" != null) return t.image;} throw new Error("Missing return statement in function"); - } +} - final public Variable BlankNode(Exp stack) throws ParseException { - Token t; Variable v; - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case BLANK_NODE_LABEL: + final public Variable BlankNode(Exp stack) throws ParseException {Token t; Variable v; + switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { + case BLANK_NODE_LABEL:{ t = jj_consume_token(BLANK_NODE_LABEL); - // check if the blank node is already in the table +// check if the blank node is already in the table v = astq.newBlankNode(stack, t.image); handler.declareBlankNode(t); break; - case ANON: + } + case ANON:{ t = jj_consume_token(ANON); - // create a new blank node +// create a new blank node v = astq.newBlankNode(); handler.declareBlankNode(t); break; + } default: jj_la1[272] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - {if (true) return v;} +{if ("" != null) return v;} throw new Error("Missing return statement in function"); - } +} - final public String Q_IRI_ref() throws ParseException { - Token t; String s; + final public String Q_IRI_ref() throws ParseException {Token t; String s; t = jj_consume_token(Q_IRIref); - s = t.image; - s = s.substring(1,s.length()-1); +s = t.image; + s = s.substring(1,s.length()-1); // Remove "<" and ">" + s = convertUnicodeSequences(s); // Convert Unicode escape sequences s = astq.defURI(s); - {if (true) return s;} + {if ("" != null) return s;} throw new Error("Missing return statement in function"); - } +} - final private boolean jj_2_1(int xla) { + private boolean jj_2_1(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_1(); } + try { return (!jj_3_1()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(0, xla); } } - final private boolean jj_2_2(int xla) { + private boolean jj_2_2(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_2(); } + try { return (!jj_3_2()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(1, xla); } } - final private boolean jj_2_3(int xla) { + private boolean jj_2_3(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_3(); } + try { return (!jj_3_3()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(2, xla); } } - final private boolean jj_2_4(int xla) { + private boolean jj_2_4(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_4(); } + try { return (!jj_3_4()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(3, xla); } } - final private boolean jj_2_5(int xla) { + private boolean jj_2_5(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_5(); } + try { return (!jj_3_5()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(4, xla); } } - final private boolean jj_2_6(int xla) { + private boolean jj_2_6(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_6(); } + try { return (!jj_3_6()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(5, xla); } } - final private boolean jj_2_7(int xla) { + private boolean jj_2_7(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_7(); } + try { return (!jj_3_7()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(6, xla); } } - final private boolean jj_2_8(int xla) { + private boolean jj_2_8(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_8(); } + try { return (!jj_3_8()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(7, xla); } } - final private boolean jj_2_9(int xla) { + private boolean jj_2_9(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_9(); } + try { return (!jj_3_9()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(8, xla); } } - final private boolean jj_2_10(int xla) { + private boolean jj_2_10(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_10(); } + try { return (!jj_3_10()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(9, xla); } } - final private boolean jj_2_11(int xla) { + private boolean jj_2_11(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_11(); } + try { return (!jj_3_11()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(10, xla); } } - final private boolean jj_2_12(int xla) { + private boolean jj_2_12(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_12(); } + try { return (!jj_3_12()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(11, xla); } } - final private boolean jj_2_13(int xla) { + private boolean jj_2_13(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_13(); } + try { return (!jj_3_13()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(12, xla); } } - final private boolean jj_2_14(int xla) { + private boolean jj_2_14(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_14(); } + try { return (!jj_3_14()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(13, xla); } } - final private boolean jj_2_15(int xla) { + private boolean jj_2_15(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_15(); } + try { return (!jj_3_15()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(14, xla); } } - final private boolean jj_2_16(int xla) { + private boolean jj_2_16(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_16(); } + try { return (!jj_3_16()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(15, xla); } } - final private boolean jj_2_17(int xla) { + private boolean jj_2_17(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_17(); } + try { return (!jj_3_17()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(16, xla); } } - final private boolean jj_2_18(int xla) { + private boolean jj_2_18(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_18(); } + try { return (!jj_3_18()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(17, xla); } } - final private boolean jj_2_19(int xla) { + private boolean jj_2_19(int xla) + { jj_la = xla; jj_lastpos = jj_scanpos = token; - try { return !jj_3_19(); } + try { return (!jj_3_19()); } catch(LookaheadSuccess ls) { return true; } finally { jj_save(18, xla); } } - final private boolean jj_3R_107() { - if (jj_3R_141()) return true; - return false; - } - - final private boolean jj_3R_106() { - if (jj_3R_140()) return true; - return false; - } - - final private boolean jj_3R_73() { - Token xsp; - xsp = jj_scanpos; - if (jj_3R_102()) { - jj_scanpos = xsp; - if (jj_3R_103()) { - jj_scanpos = xsp; - if (jj_3R_104()) { - jj_scanpos = xsp; - if (jj_3_9()) { - jj_scanpos = xsp; - if (jj_3R_105()) return true; - } - } - } - } - return false; - } - - final private boolean jj_3R_74() { - Token xsp; - xsp = jj_scanpos; - if (jj_3R_106()) { - jj_scanpos = xsp; - if (jj_3R_107()) { - jj_scanpos = xsp; - if (jj_3R_108()) { - jj_scanpos = xsp; - if (jj_3R_109()) { - jj_scanpos = xsp; - if (jj_3R_110()) { - jj_scanpos = xsp; - if (jj_3R_111()) { - jj_scanpos = xsp; - if (jj_3R_112()) return true; - } - } - } - } - } - } - return false; - } - - final private boolean jj_3R_166() { - if (jj_scan_token(LBRACE)) return true; - return false; - } - - final private boolean jj_3R_121() { - if (jj_3R_157()) return true; - return false; - } - - final private boolean jj_3R_185() { - if (jj_scan_token(NOT)) return true; - return false; - } - - final private boolean jj_3R_182() { - if (jj_scan_token(SELECT)) return true; - return false; - } - - final private boolean jj_3R_86() { - if (jj_3R_120()) return true; - return false; - } - - final private boolean jj_3_8() { - if (jj_3R_73()) return true; - return false; - } - - final private boolean jj_3R_88() { - if (jj_3R_122()) return true; - return false; - } - - final private boolean jj_3R_164() { - Token xsp; - xsp = jj_scanpos; - if (jj_3R_185()) jj_scanpos = xsp; - if (jj_scan_token(EXIST)) return true; - if (jj_3R_95()) return true; - return false; - } - - final private boolean jj_3R_87() { - if (jj_3R_121()) return true; - return false; - } - - final private boolean jj_3_11() { - if (jj_3R_76()) return true; - if (jj_3R_77()) return true; - return false; - } - - final private boolean jj_3R_61() { - Token xsp; - xsp = jj_scanpos; - if (jj_3_11()) { - jj_scanpos = xsp; - if (jj_3R_86()) { - jj_scanpos = xsp; - if (jj_3R_87()) { - jj_scanpos = xsp; - if (jj_3R_88()) return true; - } - } - } - return false; - } - - final private boolean jj_3R_82() { - if (jj_3R_95()) return true; - return false; - } - - final private boolean jj_3R_137() { - if (jj_3R_164()) return true; - return false; - } - - final private boolean jj_3R_72() { - if (jj_3R_74()) return true; - return false; - } - - final private boolean jj_3R_71() { - if (jj_3R_101()) return true; - return false; - } - - final private boolean jj_3R_70() { - if (jj_3R_100()) return true; - return false; - } - - final private boolean jj_3R_69() { - if (jj_3R_67()) return true; - return false; - } - - final private boolean jj_3R_68() { - if (jj_scan_token(LPAREN)) return true; - if (jj_3R_99()) return true; - return false; - } - - final private boolean jj_3R_81() { - if (jj_scan_token(LBRACKET)) return true; - return false; - } - - final private boolean jj_3R_136() { - Token xsp; - xsp = jj_scanpos; - if (jj_scan_token(85)) { - jj_scanpos = xsp; - if (jj_scan_token(134)) { - jj_scanpos = xsp; - if (jj_scan_token(100)) { - jj_scanpos = xsp; - if (jj_scan_token(123)) { - jj_scanpos = xsp; - if (jj_scan_token(117)) { - jj_scanpos = xsp; - if (jj_scan_token(124)) { - jj_scanpos = xsp; - if (jj_scan_token(95)) { - jj_scanpos = xsp; - if (jj_scan_token(93)) { - jj_scanpos = xsp; - if (jj_scan_token(86)) { - jj_scanpos = xsp; - if (jj_scan_token(87)) { - jj_scanpos = xsp; - if (jj_scan_token(90)) { - jj_scanpos = xsp; - if (jj_scan_token(89)) { - jj_scanpos = xsp; - if (jj_scan_token(88)) { - jj_scanpos = xsp; - if (jj_scan_token(92)) { - jj_scanpos = xsp; - if (jj_scan_token(91)) { - jj_scanpos = xsp; - if (jj_scan_token(52)) { - jj_scanpos = xsp; - if (jj_scan_token(183)) return true; - } - } - } - } - } - } - } - } - } - } - } - } - } - } - } - } - if (jj_scan_token(LPAREN)) return true; - return false; - } - - final private boolean jj_3_7() { + private boolean jj_3_7() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_68()) { + if (jj_3R_GroupClause_1144_11_68()) { jj_scanpos = xsp; - if (jj_3R_69()) { + if (jj_3R_GroupClause_1145_11_69()) { jj_scanpos = xsp; - if (jj_3R_70()) { + if (jj_3R_GroupClause_1146_11_70()) { jj_scanpos = xsp; - if (jj_3R_71()) { + if (jj_3R_GroupClause_1147_11_71()) { jj_scanpos = xsp; - if (jj_3R_72()) return true; + if (jj_3R_GroupClause_1148_11_72()) return true; } } } @@ -9102,26 +9383,29 @@ final private boolean jj_3_7() { return false; } - final private boolean jj_3R_80() { + private boolean jj_3R_PrimaryRegExp_1896_17_80() + { if (jj_scan_token(INTEGER)) return true; return false; } - final private boolean jj_3_16() { + private boolean jj_3_16() + { if (jj_scan_token(AT)) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_80()) { + if (jj_3R_PrimaryRegExp_1896_17_80()) { jj_scanpos = xsp; - if (jj_3R_81()) { + if (jj_3R_PrimaryRegExp_1902_19_81()) { jj_scanpos = xsp; - if (jj_3R_82()) return true; + if (jj_3R_PrimaryRegExp_1915_19_82()) return true; } } return false; } - final private boolean jj_3R_135() { + private boolean jj_3R_BuiltInCall_2284_5_135() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(112)) { @@ -9168,18 +9452,21 @@ final private boolean jj_3R_135() { return false; } - final private boolean jj_3R_281() { + private boolean jj_3R_PrimaryRegExp_1887_17_281() + { if (jj_scan_token(LPAREN)) return true; - if (jj_3R_251()) return true; + if (jj_3R_AddRegExp_1784_5_251()) return true; return false; } - final private boolean jj_3R_280() { - if (jj_3R_91()) return true; + private boolean jj_3R_PrimaryRegExp_1883_17_280() + { + if (jj_3R_IRIrefConst_2896_4_91()) return true; return false; } - final private boolean jj_3R_134() { + private boolean jj_3R_BuiltInCall_2276_5_134() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(84)) { @@ -9204,23 +9491,25 @@ final private boolean jj_3R_134() { } } } - if (jj_3R_83()) return true; + if (jj_3R_ArgList_2738_3_83()) return true; return false; } - final private boolean jj_3R_277() { + private boolean jj_3R_PrimaryRegExp_1882_9_277() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_280()) { + if (jj_3R_PrimaryRegExp_1883_17_280()) { jj_scanpos = xsp; - if (jj_3R_281()) return true; + if (jj_3R_PrimaryRegExp_1887_17_281()) return true; } xsp = jj_scanpos; if (jj_3_16()) jj_scanpos = xsp; return false; } - final private boolean jj_3R_176() { + private boolean jj_3R_box_729_7_176() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(167)) { @@ -9233,7 +9522,8 @@ final private boolean jj_3R_176() { return false; } - final private boolean jj_3R_133() { + private boolean jj_3R_BuiltInCall_2251_5_133() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(102)) { @@ -9364,7 +9654,8 @@ final private boolean jj_3R_133() { return false; } - final private boolean jj_3R_285() { + private boolean jj_3R_OpeRegExp_1870_1_285() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(241)) { @@ -9377,12 +9668,14 @@ final private boolean jj_3R_285() { return false; } - final private boolean jj_3R_178() { + private boolean jj_3R_format_710_7_178() + { if (jj_scan_token(FORMAT)) return true; return false; } - final private boolean jj_3R_132() { + private boolean jj_3R_BuiltInCall_2241_5_132() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(108)) { @@ -9402,27 +9695,32 @@ final private boolean jj_3R_132() { return false; } - final private boolean jj_3_1() { - if (jj_3R_61()) return true; + private boolean jj_3_1() + { + if (jj_3R_TriplesSameSubject_1548_3_61()) return true; return false; } - final private boolean jj_3R_284() { + private boolean jj_3R_UnaryRegExp_1843_17_284() + { if (jj_scan_token(LBRACE)) return true; return false; } - final private boolean jj_3R_218() { + private boolean jj_3R_Constraint_1471_3_218() + { if (jj_scan_token(FILTER)) return true; return false; } - final private boolean jj_3R_170() { + private boolean jj_3R_LambdaArgList_2616_4_170() + { if (jj_scan_token(LPAREN)) return true; return false; } - final private boolean jj_3R_131() { + private boolean jj_3R_BuiltInCall_2226_5_131() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(105)) { @@ -9433,58 +9731,64 @@ final private boolean jj_3R_131() { return false; } - final private boolean jj_3R_75() { - if (jj_3R_95()) return true; + private boolean jj_3R_GroupOrUnionGraphPattern_1455_9_75() + { + if (jj_3R_GroupGraphPattern_1223_3_95()) return true; return false; } - final private boolean jj_3R_283() { - if (jj_3R_285()) return true; + private boolean jj_3R_UnaryRegExp_1838_8_283() + { + if (jj_3R_OpeRegExp_1870_1_285()) return true; return false; } - final private boolean jj_3R_279() { + private boolean jj_3R_UnaryRegExp_1838_8_279() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_283()) { + if (jj_3R_UnaryRegExp_1838_8_283()) { jj_scanpos = xsp; - if (jj_3R_284()) return true; + if (jj_3R_UnaryRegExp_1843_17_284()) return true; } return false; } - final private boolean jj_3R_266() { - if (jj_3R_277()) return true; + private boolean jj_3R_UnaryRegExp_1836_9_266() + { + if (jj_3R_PrimaryRegExp_1882_9_277()) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_279()) jj_scanpos = xsp; + if (jj_3R_UnaryRegExp_1838_8_279()) jj_scanpos = xsp; return false; } - final private boolean jj_3R_130() { + private boolean jj_3R_BuiltInCall_2221_5_130() + { if (jj_scan_token(BOUND)) return true; if (jj_scan_token(LPAREN)) return true; return false; } - final private boolean jj_3R_100() { + private boolean jj_3R_BuiltInCall_2219_3_100() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_130()) { + if (jj_3R_BuiltInCall_2221_5_130()) { jj_scanpos = xsp; - if (jj_3R_131()) { + if (jj_3R_BuiltInCall_2226_5_131()) { jj_scanpos = xsp; - if (jj_3R_132()) { + if (jj_3R_BuiltInCall_2241_5_132()) { jj_scanpos = xsp; - if (jj_3R_133()) { + if (jj_3R_BuiltInCall_2251_5_133()) { jj_scanpos = xsp; - if (jj_3R_134()) { + if (jj_3R_BuiltInCall_2276_5_134()) { jj_scanpos = xsp; - if (jj_3R_135()) { + if (jj_3R_BuiltInCall_2284_5_135()) { jj_scanpos = xsp; - if (jj_3R_136()) { + if (jj_3R_BuiltInCall_2298_5_136()) { jj_scanpos = xsp; - if (jj_3R_137()) return true; + if (jj_3R_BuiltInCall_2309_7_137()) return true; } } } @@ -9495,511 +9799,589 @@ final private boolean jj_3R_100() { return false; } - final private boolean jj_3R_265() { + private boolean jj_3R_UnaryRegExp_1831_5_265() + { if (jj_scan_token(BEGIN_WITH)) return true; - if (jj_3R_277()) return true; + if (jj_3R_PrimaryRegExp_1882_9_277()) return true; return false; } - final private boolean jj_3R_257() { - if (jj_3R_260()) return true; + private boolean jj_3R_UnaryExpression_2212_7_257() + { + if (jj_3R_PrimaryExpression_2795_3_260()) return true; return false; } - final private boolean jj_3R_264() { + private boolean jj_3R_UnaryRegExp_1826_9_264() + { if (jj_scan_token(BANG)) return true; - if (jj_3R_277()) return true; + if (jj_3R_PrimaryRegExp_1882_9_277()) return true; return false; } - final private boolean jj_3R_256() { + private boolean jj_3R_UnaryExpression_2210_7_256() + { if (jj_scan_token(MINUS)) return true; return false; } - final private boolean jj_3R_177() { + private boolean jj_3R_group_683_2_177() + { if (jj_scan_token(GROUP)) return true; return false; } - final private boolean jj_3R_255() { + private boolean jj_3R_UnaryExpression_2208_7_255() + { if (jj_scan_token(PLUS)) return true; return false; } - final private boolean jj_3R_154() { - if (jj_3R_178()) return true; + private boolean jj_3R_TemplateExpression_674_9_154() + { + if (jj_3R_format_710_7_178()) return true; return false; } - final private boolean jj_3R_254() { + private boolean jj_3R_UnaryExpression_2206_7_254() + { if (jj_scan_token(BANG)) return true; return false; } - final private boolean jj_3R_153() { - if (jj_3R_177()) return true; + private boolean jj_3R_TemplateExpression_672_9_153() + { + if (jj_3R_group_683_2_177()) return true; return false; } - final private boolean jj_3R_245() { + private boolean jj_3R_ServicePattern_1436_9_245() + { if (jj_scan_token(SERVICE)) return true; return false; } - final private boolean jj_3R_152() { - if (jj_3R_176()) return true; + private boolean jj_3R_TemplateExpression_670_9_152() + { + if (jj_3R_box_729_7_176()) return true; return false; } - final private boolean jj_3R_259() { + private boolean jj_3R_UnaryRegExp_1825_1_259() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_264()) { + if (jj_3R_UnaryRegExp_1826_9_264()) { jj_scanpos = xsp; - if (jj_3R_265()) { + if (jj_3R_UnaryRegExp_1831_5_265()) { jj_scanpos = xsp; - if (jj_3R_266()) return true; + if (jj_3R_UnaryRegExp_1836_9_266()) return true; } } return false; } - final private boolean jj_3R_252() { + private boolean jj_3R_UnaryExpression_2206_3_252() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_254()) { + if (jj_3R_UnaryExpression_2206_7_254()) { jj_scanpos = xsp; - if (jj_3R_255()) { + if (jj_3R_UnaryExpression_2208_7_255()) { jj_scanpos = xsp; - if (jj_3R_256()) { + if (jj_3R_UnaryExpression_2210_7_256()) { jj_scanpos = xsp; - if (jj_3R_257()) return true; + if (jj_3R_UnaryExpression_2212_7_257()) return true; } } } return false; } - final private boolean jj_3R_119() { + private boolean jj_3R_TemplateExpression_670_5_119() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_152()) { + if (jj_3R_TemplateExpression_670_9_152()) { jj_scanpos = xsp; - if (jj_3R_153()) { + if (jj_3R_TemplateExpression_672_9_153()) { jj_scanpos = xsp; - if (jj_3R_154()) return true; + if (jj_3R_TemplateExpression_674_9_154()) return true; } } return false; } - final private boolean jj_3R_145() { + private boolean jj_3R_Lambda_2587_3_145() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(191)) { jj_scanpos = xsp; if (jj_scan_token(190)) return true; } - if (jj_3R_170()) return true; + if (jj_3R_LambdaArgList_2616_4_170()) return true; return false; } - final private boolean jj_3R_261() { + private boolean jj_3R_MultRegExp_1812_11_261() + { if (jj_scan_token(SLASH)) return true; return false; } - final private boolean jj_3R_253() { - if (jj_3R_259()) return true; + private boolean jj_3R_MultRegExp_1808_9_253() + { + if (jj_3R_UnaryRegExp_1825_1_259()) return true; Token xsp; while (true) { xsp = jj_scanpos; - if (jj_3R_261()) { jj_scanpos = xsp; break; } + if (jj_3R_MultRegExp_1812_11_261()) { jj_scanpos = xsp; break; } } return false; } - final private boolean jj_3R_250() { - if (jj_3R_252()) return true; + private boolean jj_3R_MultiplicativeExpression_2193_3_250() + { + if (jj_3R_UnaryExpression_2206_3_252()) return true; return false; } - final private boolean jj_3R_263() { + private boolean jj_3R_AddRegExp_1793_17_263() + { if (jj_scan_token(SC_OR)) return true; return false; } - final private boolean jj_3R_236() { - if (jj_3R_250()) return true; + private boolean jj_3R_AdditiveExpression_2181_3_236() + { + if (jj_3R_MultiplicativeExpression_2193_3_250()) return true; return false; } - final private boolean jj_3R_244() { + private boolean jj_3R_GraphGraphPattern_1407_9_244() + { if (jj_scan_token(GRAPH)) return true; return false; } - final private boolean jj_3R_262() { + private boolean jj_3R_AddRegExp_1787_9_262() + { if (jj_scan_token(BAR)) return true; return false; } - final private boolean jj_3R_258() { + private boolean jj_3R_AddRegExp_1787_9_258() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_262()) { + if (jj_3R_AddRegExp_1787_9_262()) { jj_scanpos = xsp; - if (jj_3R_263()) return true; + if (jj_3R_AddRegExp_1793_17_263()) return true; } return false; } - final private boolean jj_3R_220() { - if (jj_3R_236()) return true; + private boolean jj_3R_NumericExpression_2175_3_220() + { + if (jj_3R_AdditiveExpression_2181_3_236()) return true; return false; } - final private boolean jj_3R_249() { + private boolean jj_3R_Values_1019_9_249() + { if (jj_scan_token(VALUES)) return true; return false; } - final private boolean jj_3R_251() { - if (jj_3R_253()) return true; + private boolean jj_3R_Q_IRI_ref_2935_5_159() + { + if (jj_scan_token(Q_IRIref)) return true; + return false; + } + + private boolean jj_3R_AddRegExp_1784_5_251() + { + if (jj_3R_MultRegExp_1808_9_253()) return true; Token xsp; while (true) { xsp = jj_scanpos; - if (jj_3R_258()) { jj_scanpos = xsp; break; } + if (jj_3R_AddRegExp_1787_9_258()) { jj_scanpos = xsp; break; } } return false; } - final private boolean jj_3_18() { - if (jj_3R_83()) return true; + private boolean jj_3_18() + { + if (jj_3R_ArgList_2738_3_83()) return true; return false; } - final private boolean jj_3R_246() { + private boolean jj_3R_ScopeGraphPattern_1394_10_246() + { if (jj_scan_token(SCOPE)) return true; return false; } - final private boolean jj_3R_282() { - if (jj_3R_91()) return true; + private boolean jj_3R_StdIRIrefOrFunction_2548_3_282() + { + if (jj_3R_IRIrefConst_2896_4_91()) return true; return false; } - final private boolean jj_3R_241() { - if (jj_3R_251()) return true; + private boolean jj_3R_RegExp_1774_11_241() + { + if (jj_3R_AddRegExp_1784_5_251()) return true; return false; } - final private boolean jj_3R_240() { + private boolean jj_3R_RegExp_1770_12_240() + { if (jj_scan_token(XPATH)) return true; if (jj_scan_token(LPAREN)) return true; return false; } - final private boolean jj_3R_278() { - if (jj_3R_282()) return true; + private boolean jj_3R_BlankNode_2920_11_211() + { + if (jj_scan_token(ANON)) return true; + return false; + } + + private boolean jj_3R_IRIrefOrFunction_2541_6_278() + { + if (jj_3R_StdIRIrefOrFunction_2548_3_282()) return true; return false; } - final private boolean jj_3R_247() { + private boolean jj_3R_ExistGraphPattern_1384_10_247() + { if (jj_scan_token(EXIST)) return true; return false; } - final private boolean jj_3R_159() { - if (jj_scan_token(Q_IRIref)) return true; + private boolean jj_3R_BlankNode_2913_9_210() + { + if (jj_scan_token(BLANK_NODE_LABEL)) return true; return false; } - final private boolean jj_3R_205() { - if (jj_3R_220()) return true; + private boolean jj_3R_RelationalExpression_2153_3_205() + { + if (jj_3R_NumericExpression_2175_3_220()) return true; return false; } - final private boolean jj_3R_92() { - if (jj_3R_91()) return true; + private boolean jj_3R_DatasetClause_993_13_92() + { + if (jj_3R_IRIrefConst_2896_4_91()) return true; return false; } - final private boolean jj_3R_94() { + private boolean jj_3R_WhereClause_1000_5_94() + { if (jj_scan_token(WHERE)) return true; return false; } - final private boolean jj_3R_227() { + private boolean jj_3R_RegExp_1769_1_227() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_240()) { + if (jj_3R_RegExp_1770_12_240()) { jj_scanpos = xsp; - if (jj_3R_241()) return true; + if (jj_3R_RegExp_1774_11_241()) return true; } return false; } - final private boolean jj_3R_66() { + private boolean jj_3R_WhereClause_1000_4_66() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_94()) jj_scanpos = xsp; - if (jj_3R_95()) return true; + if (jj_3R_WhereClause_1000_5_94()) jj_scanpos = xsp; + if (jj_3R_GroupGraphPattern_1223_3_95()) return true; return false; } - final private boolean jj_3R_184() { - if (jj_3R_205()) return true; + private boolean jj_3R_ValueLogical_2147_3_184() + { + if (jj_3R_RelationalExpression_2153_3_205()) return true; return false; } - final private boolean jj_3R_243() { + private boolean jj_3R_MinusGraphPattern_1374_9_243() + { if (jj_scan_token(MINUSP)) return true; return false; } - final private boolean jj_3R_93() { + private boolean jj_3R_BlankNode_2912_3_193() + { + Token xsp; + xsp = jj_scanpos; + if (jj_3R_BlankNode_2913_9_210()) { + jj_scanpos = xsp; + if (jj_3R_BlankNode_2920_11_211()) return true; + } + return false; + } + + private boolean jj_3R_DatasetClause_994_5_93() + { if (jj_scan_token(NAMED)) return true; return false; } - final private boolean jj_3R_101() { - if (jj_3R_91()) return true; - if (jj_3R_83()) return true; + private boolean jj_3R_FunctionCall_2528_3_101() + { + if (jj_3R_IRIrefConst_2896_4_91()) return true; + if (jj_3R_ArgList_2738_3_83()) return true; return false; } - final private boolean jj_3R_239() { + private boolean jj_3R_StdProperty_1754_9_239() + { if (jj_scan_token(COLON2)) return true; return false; } - final private boolean jj_3R_211() { - if (jj_scan_token(ANON)) return true; + private boolean jj_3R_DatasetClause_991_3_65() + { + if (jj_scan_token(FROM)) return true; + Token xsp; + xsp = jj_scanpos; + if (jj_3R_DatasetClause_993_13_92()) { + jj_scanpos = xsp; + if (jj_3R_DatasetClause_994_5_93()) return true; + } return false; } - final private boolean jj_3R_65() { - if (jj_scan_token(FROM)) return true; + private boolean jj_3R_QName_2906_3_160() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_92()) { + if (jj_scan_token(10)) { jj_scanpos = xsp; - if (jj_3R_93()) return true; + if (jj_scan_token(9)) return true; } return false; } - final private boolean jj_3R_213() { - if (jj_3R_227()) return true; + private boolean jj_3R_StdProperty_1748_11_213() + { + if (jj_3R_RegExp_1769_1_227()) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_239()) jj_scanpos = xsp; + if (jj_3R_StdProperty_1754_9_239()) jj_scanpos = xsp; return false; } - final private boolean jj_3R_242() { + private boolean jj_3R_OptionalGraphPattern_1364_9_242() + { if (jj_scan_token(OPTIONAL)) return true; return false; } - final private boolean jj_3R_163() { - if (jj_3R_184()) return true; + private boolean jj_3R_ConditionalAndExpression_2136_3_163() + { + if (jj_3R_ValueLogical_2147_3_184()) return true; return false; } - final private boolean jj_3R_210() { - if (jj_scan_token(BLANK_NODE_LABEL)) return true; + private boolean jj_3R_IRIrefConst_2897_6_126() + { + if (jj_3R_QName_2906_3_160()) return true; + return false; + } + + private boolean jj_3R_IRIrefConst_2896_6_125() + { + if (jj_3R_Q_IRI_ref_2935_5_159()) return true; return false; } - final private boolean jj_3R_193() { + private boolean jj_3R_IRIrefConst_2896_4_91() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_210()) { + if (jj_3R_IRIrefConst_2896_6_125()) { jj_scanpos = xsp; - if (jj_3R_211()) return true; + if (jj_3R_IRIrefConst_2897_6_126()) return true; } return false; } - final private boolean jj_3R_129() { - if (jj_3R_163()) return true; + private boolean jj_3R_ConditionalOrExpression_2125_3_129() + { + if (jj_3R_ConditionalAndExpression_2136_3_163()) return true; return false; } - final private boolean jj_3R_168() { + private boolean jj_3R_Set_2491_18_168() + { if (jj_scan_token(STATIC)) return true; return false; } - final private boolean jj_3R_160() { - Token xsp; - xsp = jj_scanpos; - if (jj_scan_token(10)) { - jj_scanpos = xsp; - if (jj_scan_token(9)) return true; - } - return false; - } - - final private boolean jj_3R_197() { - if (jj_3R_67()) return true; + private boolean jj_3R_PropertyVarOrIRIref_1733_7_197() + { + if (jj_3R_Var_2082_3_67()) return true; return false; } - final private boolean jj_3R_99() { - if (jj_3R_129()) return true; + private boolean jj_3R_Expression_2119_3_99() + { + if (jj_3R_ConditionalOrExpression_2125_3_129()) return true; return false; } - final private boolean jj_3R_196() { - if (jj_3R_213()) return true; + private boolean jj_3R_PropertyVarOrIRIref_1731_7_196() + { + if (jj_3R_StdProperty_1748_11_213()) return true; return false; } - final private boolean jj_3R_172() { - if (jj_3R_193()) return true; + private boolean jj_3R_GraphTerm_2110_11_172() + { + if (jj_3R_BlankNode_2912_3_193()) return true; return false; } - final private boolean jj_3R_192() { + private boolean jj_3R_GraphTerm_2107_11_192() + { if (jj_scan_token(LPAREN)) return true; if (jj_scan_token(RPAREN)) return true; return false; } - final private boolean jj_3R_126() { - if (jj_3R_160()) return true; - return false; - } - - final private boolean jj_3R_191() { - if (jj_3R_209()) return true; + private boolean jj_3R_GraphTerm_2106_11_191() + { + if (jj_3R_BooleanLiteral_2848_3_209()) return true; return false; } - final private boolean jj_3R_143() { + private boolean jj_3R_Set_2491_9_143() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(56)) { jj_scanpos = xsp; - if (jj_3R_168()) return true; + if (jj_3R_Set_2491_18_168()) return true; } if (jj_scan_token(LPAREN)) return true; return false; } - final private boolean jj_3R_125() { - if (jj_3R_159()) return true; - return false; - } - - final private boolean jj_3R_206() { + private boolean jj_3R_GraphTerm_2099_17_206() + { if (jj_scan_token(COLON2)) return true; return false; } - final private boolean jj_3R_190() { - if (jj_3R_208()) return true; + private boolean jj_3R_GraphTerm_2105_11_190() + { + if (jj_3R_NumericLiteral_2822_4_208()) return true; return false; } - final private boolean jj_3R_174() { + private boolean jj_3R_PropertyVarOrIRIref_1730_3_174() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_196()) { + if (jj_3R_PropertyVarOrIRIref_1731_7_196()) { jj_scanpos = xsp; - if (jj_3R_197()) return true; + if (jj_3R_PropertyVarOrIRIref_1733_7_197()) return true; } return false; } - final private boolean jj_3R_189() { - if (jj_3R_207()) return true; - return false; - } - - final private boolean jj_3R_91() { - Token xsp; - xsp = jj_scanpos; - if (jj_3R_125()) { - jj_scanpos = xsp; - if (jj_3R_126()) return true; - } + private boolean jj_3R_GraphTerm_2104_11_189() + { + if (jj_3R_RDFLiteral_2830_3_207()) return true; return false; } - final private boolean jj_3R_188() { + private boolean jj_3R_GraphTerm_2103_11_188() + { if (jj_scan_token(QNAME_NS)) return true; return false; } - final private boolean jj_3_6() { - if (jj_3R_67()) return true; + private boolean jj_3_6() + { + if (jj_3R_Var_2082_3_67()) return true; return false; } - final private boolean jj_3R_167() { + private boolean jj_3R_Let_2476_19_167() + { if (jj_scan_token(LETDYN)) return true; return false; } - final private boolean jj_3R_150() { + private boolean jj_3R_Verb_1721_7_150() + { if (jj_scan_token(KW_A)) return true; return false; } - final private boolean jj_3R_149() { - if (jj_3R_174()) return true; + private boolean jj_3R_Verb_1720_5_149() + { + if (jj_3R_PropertyVarOrIRIref_1730_3_174()) return true; return false; } - final private boolean jj_3R_187() { + private boolean jj_3R_GraphTerm_2096_11_187() + { if (jj_scan_token(QNAME)) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_206()) jj_scanpos = xsp; + if (jj_3R_GraphTerm_2099_17_206()) jj_scanpos = xsp; return false; } - final private boolean jj_3R_248() { + private boolean jj_3R_Bind_1330_9_248() + { if (jj_scan_token(BIND)) return true; return false; } - final private boolean jj_3R_115() { + private boolean jj_3R_Verb_1719_3_115() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_149()) { + if (jj_3R_Verb_1720_5_149()) { jj_scanpos = xsp; - if (jj_3R_150()) return true; + if (jj_3R_Verb_1721_7_150()) return true; } return false; } - final private boolean jj_3R_186() { - if (jj_3R_159()) return true; + private boolean jj_3R_GraphTerm_2095_9_186() + { + if (jj_3R_Q_IRI_ref_2935_5_159()) return true; return false; } - final private boolean jj_3R_171() { + private boolean jj_3R_GraphTerm_2094_9_171() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_186()) { + if (jj_3R_GraphTerm_2095_9_186()) { jj_scanpos = xsp; - if (jj_3R_187()) { + if (jj_3R_GraphTerm_2096_11_187()) { jj_scanpos = xsp; - if (jj_3R_188()) { + if (jj_3R_GraphTerm_2103_11_188()) { jj_scanpos = xsp; - if (jj_3R_189()) { + if (jj_3R_GraphTerm_2104_11_189()) { jj_scanpos = xsp; - if (jj_3R_190()) { + if (jj_3R_GraphTerm_2105_11_190()) { jj_scanpos = xsp; - if (jj_3R_191()) { + if (jj_3R_GraphTerm_2106_11_191()) { jj_scanpos = xsp; - if (jj_3R_192()) return true; + if (jj_3R_GraphTerm_2107_11_192()) return true; } } } @@ -10009,18 +10391,20 @@ final private boolean jj_3R_171() { return false; } - final private boolean jj_3R_142() { + private boolean jj_3R_Let_2476_9_142() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(54)) { jj_scanpos = xsp; - if (jj_3R_167()) return true; + if (jj_3R_Let_2476_19_167()) return true; } if (jj_scan_token(LPAREN)) return true; return false; } - final private boolean jj_3R_221() { + private boolean jj_3R_String_2859_3_221() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(210)) { @@ -10036,44 +10420,51 @@ final private boolean jj_3R_221() { return false; } - final private boolean jj_3R_147() { + private boolean jj_3R_GraphTerm_2093_3_147() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_171()) { + if (jj_3R_GraphTerm_2094_9_171()) { jj_scanpos = xsp; - if (jj_3R_172()) return true; + if (jj_3R_GraphTerm_2110_11_172()) return true; } return false; } - final private boolean jj_3R_79() { + private boolean jj_3R_AssertedAnnotation_1703_5_79() + { if (jj_scan_token(LBRACE)) return true; if (jj_scan_token(BAR)) return true; return false; } - final private boolean jj_3R_98() { + private boolean jj_3R_Var_2084_7_98() + { if (jj_scan_token(VAR3)) return true; return false; } - final private boolean jj_3R_97() { + private boolean jj_3R_Var_2083_7_97() + { if (jj_scan_token(VAR2)) return true; return false; } - final private boolean jj_3R_96() { + private boolean jj_3R_Var_2082_7_96() + { if (jj_scan_token(VAR1)) return true; return false; } - final private boolean jj_3R_141() { + private boolean jj_3R_TryCatch_2467_5_141() + { if (jj_scan_token(TRY)) return true; - if (jj_3R_166()) return true; + if (jj_3R_Body_2715_2_166()) return true; return false; } - final private boolean jj_3R_209() { + private boolean jj_3R_BooleanLiteral_2848_3_209() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(161)) { @@ -10083,210 +10474,240 @@ final private boolean jj_3R_209() { return false; } - final private boolean jj_3R_67() { + private boolean jj_3R_Var_2082_3_67() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_96()) { + if (jj_3R_Var_2082_7_96()) { jj_scanpos = xsp; - if (jj_3R_97()) { + if (jj_3R_Var_2083_7_97()) { jj_scanpos = xsp; - if (jj_3R_98()) return true; + if (jj_3R_Var_2084_7_98()) return true; } } return false; } - final private boolean jj_3R_114() { - if (jj_3R_147()) return true; + private boolean jj_3R_VarOrTerm_2056_27_114() + { + if (jj_3R_GraphTerm_2093_3_147()) return true; return false; } - final private boolean jj_3R_238() { + private boolean jj_3R_RDFLiteral_2834_7_238() + { if (jj_scan_token(DATATYPE)) return true; return false; } - final private boolean jj_3R_237() { + private boolean jj_3R_RDFLiteral_2833_5_237() + { if (jj_scan_token(LANGTAG)) return true; return false; } - final private boolean jj_3R_222() { + private boolean jj_3R_RDFLiteral_2833_5_222() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_237()) { + if (jj_3R_RDFLiteral_2833_5_237()) { jj_scanpos = xsp; - if (jj_3R_238()) return true; + if (jj_3R_RDFLiteral_2834_7_238()) return true; } return false; } - final private boolean jj_3R_195() { - if (jj_3R_157()) return true; + private boolean jj_3R_PropertyValue_1680_9_195() + { + if (jj_3R_StarTriple_1574_5_157()) return true; return false; } - final private boolean jj_3_15() { - if (jj_3R_79()) return true; + private boolean jj_3_15() + { + if (jj_3R_AssertedAnnotation_1703_5_79()) return true; return false; } - final private boolean jj_3R_194() { - if (jj_3R_212()) return true; + private boolean jj_3R_PropertyValue_1679_9_194() + { + if (jj_3R_GraphNode_2046_3_212()) return true; return false; } - final private boolean jj_3R_207() { - if (jj_3R_221()) return true; + private boolean jj_3R_RDFLiteral_2830_3_207() + { + if (jj_3R_String_2859_3_221()) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_222()) jj_scanpos = xsp; + if (jj_3R_RDFLiteral_2833_5_222()) jj_scanpos = xsp; return false; } - final private boolean jj_3R_173() { + private boolean jj_3R_PropertyValue_1678_5_173() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_194()) { + if (jj_3R_PropertyValue_1679_9_194()) { jj_scanpos = xsp; - if (jj_3R_195()) return true; + if (jj_3R_PropertyValue_1680_9_195()) return true; } return false; } - final private boolean jj_3R_225() { + private boolean jj_3R_NumericLiteral_2824_6_225() + { if (jj_scan_token(DOUBLE)) return true; return false; } - final private boolean jj_3R_224() { + private boolean jj_3R_NumericLiteral_2823_6_224() + { if (jj_scan_token(DECIMAL)) return true; return false; } - final private boolean jj_3R_223() { + private boolean jj_3R_NumericLiteral_2822_6_223() + { if (jj_scan_token(INTEGER)) return true; return false; } - final private boolean jj_3R_113() { - if (jj_3R_67()) return true; + private boolean jj_3R_VarOrTerm_2056_5_113() + { + if (jj_3R_Var_2082_3_67()) return true; return false; } - final private boolean jj_3R_235() { - if (jj_3R_249()) return true; + private boolean jj_3R_GraphPatternNotTriples_1283_11_235() + { + if (jj_3R_Values_1019_9_249()) return true; return false; } - final private boolean jj_3R_226() { - if (jj_3R_120()) return true; + private boolean jj_3R_GraphNode_2049_11_226() + { + if (jj_3R_TriplesNode_1959_3_120()) return true; return false; } - final private boolean jj_3R_208() { + private boolean jj_3R_NumericLiteral_2822_4_208() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_223()) { + if (jj_3R_NumericLiteral_2822_6_223()) { jj_scanpos = xsp; - if (jj_3R_224()) { + if (jj_3R_NumericLiteral_2823_6_224()) { jj_scanpos = xsp; - if (jj_3R_225()) return true; + if (jj_3R_NumericLiteral_2824_6_225()) return true; } } return false; } - final private boolean jj_3R_234() { - if (jj_3R_248()) return true; + private boolean jj_3R_GraphPatternNotTriples_1282_11_234() + { + if (jj_3R_Bind_1330_9_248()) return true; return false; } - final private boolean jj_3R_76() { + private boolean jj_3R_VarOrTerm_2056_3_76() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_113()) { + if (jj_3R_VarOrTerm_2056_5_113()) { jj_scanpos = xsp; - if (jj_3R_114()) return true; + if (jj_3R_VarOrTerm_2056_27_114()) return true; } return false; } - final private boolean jj_3R_233() { - if (jj_3R_247()) return true; + private boolean jj_3R_GraphPatternNotTriples_1281_11_233() + { + if (jj_3R_ExistGraphPattern_1384_10_247()) return true; return false; } - final private boolean jj_3R_232() { - if (jj_3R_246()) return true; + private boolean jj_3R_GraphPatternNotTriples_1280_11_232() + { + if (jj_3R_ScopeGraphPattern_1394_10_246()) return true; return false; } - final private boolean jj_3R_231() { - if (jj_3R_245()) return true; + private boolean jj_3R_GraphPatternNotTriples_1279_11_231() + { + if (jj_3R_ServicePattern_1436_9_245()) return true; return false; } - final private boolean jj_3R_161() { - if (jj_3R_182()) return true; + private boolean jj_3R_SubQuery_1288_2_161() + { + if (jj_3R_SelectQuery_793_5_182()) return true; return false; } - final private boolean jj_3R_230() { - if (jj_3R_244()) return true; + private boolean jj_3R_GraphPatternNotTriples_1278_11_230() + { + if (jj_3R_GraphGraphPattern_1407_9_244()) return true; return false; } - final private boolean jj_3_10() { - if (jj_3R_75()) return true; + private boolean jj_3_10() + { + if (jj_3R_GroupOrUnionGraphPattern_1455_9_75()) return true; return false; } - final private boolean jj_3_17() { - if (jj_3R_76()) return true; + private boolean jj_3_17() + { + if (jj_3R_VarOrTerm_2056_3_76()) return true; return false; } - final private boolean jj_3R_229() { - if (jj_3R_243()) return true; + private boolean jj_3R_GraphPatternNotTriples_1276_7_229() + { + if (jj_3R_MinusGraphPattern_1374_9_243()) return true; return false; } - final private boolean jj_3R_212() { + private boolean jj_3R_GraphNode_2046_3_212() + { Token xsp; xsp = jj_scanpos; if (jj_3_17()) { jj_scanpos = xsp; - if (jj_3R_226()) return true; + if (jj_3R_GraphNode_2049_11_226()) return true; } return false; } - final private boolean jj_3R_228() { - if (jj_3R_242()) return true; + private boolean jj_3R_GraphPatternNotTriples_1275_7_228() + { + if (jj_3R_OptionalGraphPattern_1364_9_242()) return true; return false; } - final private boolean jj_3R_219() { + private boolean jj_3R_GraphPatternNotTriples_1275_7_219() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_228()) { + if (jj_3R_GraphPatternNotTriples_1275_7_228()) { jj_scanpos = xsp; - if (jj_3R_229()) { + if (jj_3R_GraphPatternNotTriples_1276_7_229()) { jj_scanpos = xsp; if (jj_3_10()) { jj_scanpos = xsp; - if (jj_3R_230()) { + if (jj_3R_GraphPatternNotTriples_1278_11_230()) { jj_scanpos = xsp; - if (jj_3R_231()) { + if (jj_3R_GraphPatternNotTriples_1279_11_231()) { jj_scanpos = xsp; - if (jj_3R_232()) { + if (jj_3R_GraphPatternNotTriples_1280_11_232()) { jj_scanpos = xsp; - if (jj_3R_233()) { + if (jj_3R_GraphPatternNotTriples_1281_11_233()) { jj_scanpos = xsp; - if (jj_3R_234()) { + if (jj_3R_GraphPatternNotTriples_1282_11_234()) { jj_scanpos = xsp; - if (jj_3R_235()) return true; + if (jj_3R_GraphPatternNotTriples_1283_11_235()) return true; } } } @@ -10298,129 +10719,149 @@ final private boolean jj_3R_219() { return false; } - final private boolean jj_3R_148() { - if (jj_3R_173()) return true; + private boolean jj_3R_ObjectList_1660_3_148() + { + if (jj_3R_PropertyValue_1678_5_173()) return true; return false; } - final private boolean jj_3R_276() { - if (jj_3R_157()) return true; + private boolean jj_3R_PrimaryExpression_2805_6_276() + { + if (jj_3R_StarTriple_1574_5_157()) return true; return false; } - final private boolean jj_3R_275() { - if (jj_3R_278()) return true; + private boolean jj_3R_PrimaryExpression_2804_6_275() + { + if (jj_3R_IRIrefOrFunction_2541_6_278()) return true; return false; } - final private boolean jj_3R_124() { + private boolean jj_3R_DeleteInsertData_487_25_124() + { if (jj_scan_token(WHERE)) return true; return false; } - final private boolean jj_3R_274() { - if (jj_3R_193()) return true; + private boolean jj_3R_PrimaryExpression_2803_6_274() + { + if (jj_3R_BlankNode_2912_3_193()) return true; return false; } - final private boolean jj_3R_273() { - if (jj_3R_209()) return true; + private boolean jj_3R_PrimaryExpression_2802_6_273() + { + if (jj_3R_BooleanLiteral_2848_3_209()) return true; return false; } - final private boolean jj_3R_217() { - if (jj_3R_157()) return true; + private boolean jj_3R_Collection_2024_18_217() + { + if (jj_3R_StarTriple_1574_5_157()) return true; return false; } - final private boolean jj_3R_272() { - if (jj_3R_208()) return true; + private boolean jj_3R_PrimaryExpression_2801_6_272() + { + if (jj_3R_NumericLiteral_2822_4_208()) return true; return false; } - final private boolean jj_3R_216() { - if (jj_3R_212()) return true; + private boolean jj_3R_Collection_2023_18_216() + { + if (jj_3R_GraphNode_2046_3_212()) return true; return false; } - final private boolean jj_3R_271() { - if (jj_3R_207()) return true; + private boolean jj_3R_PrimaryExpression_2800_6_271() + { + if (jj_3R_RDFLiteral_2830_3_207()) return true; return false; } - final private boolean jj_3R_199() { + private boolean jj_3R_Collection_2023_17_199() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_216()) { + if (jj_3R_Collection_2023_18_216()) { jj_scanpos = xsp; - if (jj_3R_217()) return true; + if (jj_3R_Collection_2024_18_217()) return true; } return false; } - final private boolean jj_3R_270() { - if (jj_3R_67()) return true; + private boolean jj_3R_PrimaryExpression_2799_6_270() + { + if (jj_3R_Var_2082_3_67()) return true; return false; } - final private boolean jj_3R_269() { - if (jj_3R_74()) return true; + private boolean jj_3R_PrimaryExpression_2798_6_269() + { + if (jj_3R_FunctionCorese_2331_3_74()) return true; return false; } - final private boolean jj_3R_268() { - if (jj_3R_100()) return true; + private boolean jj_3R_PrimaryExpression_2797_6_268() + { + if (jj_3R_BuiltInCall_2219_3_100()) return true; return false; } - final private boolean jj_3R_123() { + private boolean jj_3R_DeleteInsertData_480_25_123() + { if (jj_scan_token(DATA)) return true; return false; } - final private boolean jj_3R_267() { - if (jj_3R_139()) return true; + private boolean jj_3R_PrimaryExpression_2796_6_267() + { + if (jj_3R_BrackettedExpression_2787_9_139()) return true; return false; } - final private boolean jj_3R_78() { - if (jj_3R_115()) return true; - if (jj_3R_148()) return true; + private boolean jj_3R_PropertyListNotEmptyElem_1647_5_78() + { + if (jj_3R_Verb_1719_3_115()) return true; + if (jj_3R_ObjectList_1660_3_148()) return true; return false; } - final private boolean jj_3R_64() { - if (jj_3R_91()) return true; + private boolean jj_3R_DescribeQuery_858_27_64() + { + if (jj_3R_IRIrefConst_2896_4_91()) return true; return false; } - final private boolean jj_3R_63() { - if (jj_3R_67()) return true; + private boolean jj_3R_DescribeQuery_857_27_63() + { + if (jj_3R_Var_2082_3_67()) return true; return false; } - final private boolean jj_3R_260() { + private boolean jj_3R_PrimaryExpression_2795_3_260() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_267()) { + if (jj_3R_PrimaryExpression_2796_6_267()) { jj_scanpos = xsp; - if (jj_3R_268()) { + if (jj_3R_PrimaryExpression_2797_6_268()) { jj_scanpos = xsp; - if (jj_3R_269()) { + if (jj_3R_PrimaryExpression_2798_6_269()) { jj_scanpos = xsp; - if (jj_3R_270()) { + if (jj_3R_PrimaryExpression_2799_6_270()) { jj_scanpos = xsp; - if (jj_3R_271()) { + if (jj_3R_PrimaryExpression_2800_6_271()) { jj_scanpos = xsp; - if (jj_3R_272()) { + if (jj_3R_PrimaryExpression_2801_6_272()) { jj_scanpos = xsp; - if (jj_3R_273()) { + if (jj_3R_PrimaryExpression_2802_6_273()) { jj_scanpos = xsp; - if (jj_3R_274()) { + if (jj_3R_PrimaryExpression_2803_6_274()) { jj_scanpos = xsp; - if (jj_3R_275()) { + if (jj_3R_PrimaryExpression_2804_6_275()) { jj_scanpos = xsp; - if (jj_3R_276()) return true; + if (jj_3R_PrimaryExpression_2805_6_276()) return true; } } } @@ -10433,7 +10874,8 @@ final private boolean jj_3R_260() { return false; } - final private boolean jj_3R_215() { + private boolean jj_3R_Collection_2015_17_215() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(246)) { @@ -10443,227 +10885,259 @@ final private boolean jj_3R_215() { return false; } - final private boolean jj_3R_139() { + private boolean jj_3R_BrackettedExpression_2787_9_139() + { if (jj_scan_token(LPAREN)) return true; - if (jj_3R_99()) return true; + if (jj_3R_Expression_2119_3_99()) return true; return false; } - final private boolean jj_3R_214() { + private boolean jj_3R_Collection_2011_17_214() + { if (jj_scan_token(ATLIST)) return true; return false; } - final private boolean jj_3R_198() { + private boolean jj_3R_Collection_2011_17_198() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_214()) { + if (jj_3R_Collection_2011_17_214()) { jj_scanpos = xsp; - if (jj_3R_215()) return true; + if (jj_3R_Collection_2015_17_215()) return true; } return false; } - final private boolean jj_3R_90() { + private boolean jj_3R_DeleteInsertData_478_17_90() + { if (jj_scan_token(DELETE)) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_123()) { + if (jj_3R_DeleteInsertData_480_25_123()) { jj_scanpos = xsp; - if (jj_3R_124()) return true; + if (jj_3R_DeleteInsertData_487_25_124()) return true; } return false; } - final private boolean jj_3_14() { + private boolean jj_3_14() + { if (jj_scan_token(SEMICOLON)) return true; return false; } - final private boolean jj_3_13() { + private boolean jj_3_13() + { if (jj_scan_token(SEMICOLON)) return true; - if (jj_3R_78()) return true; + if (jj_3R_PropertyListNotEmptyElem_1647_5_78()) return true; return false; } - final private boolean jj_3R_169() { + private boolean jj_3R_For_2388_14_169() + { if (jj_scan_token(LOOP)) return true; return false; } - final private boolean jj_3R_151() { - if (jj_3R_175()) return true; + private boolean jj_3R_ArgList_2754_31_151() + { + if (jj_3R_Arg_2771_9_175()) return true; return false; } - final private boolean jj_3_5() { - if (jj_3R_66()) return true; + private boolean jj_3_5() + { + if (jj_3R_WhereClause_1000_4_66()) return true; return false; } - final private boolean jj_3R_204() { - if (jj_3R_219()) return true; + private boolean jj_3R_GraphPattern_1242_9_204() + { + if (jj_3R_GraphPatternNotTriples_1275_7_219()) return true; return false; } - final private boolean jj_3_4() { - if (jj_3R_65()) return true; + private boolean jj_3_4() + { + if (jj_3R_DatasetClause_991_3_65()) return true; return false; } - final private boolean jj_3R_77() { - if (jj_3R_78()) return true; + private boolean jj_3R_PropertyListNotEmpty_1631_3_77() + { + if (jj_3R_PropertyListNotEmptyElem_1647_5_78()) return true; return false; } - final private boolean jj_3R_89() { + private boolean jj_3R_DeleteInsertData_467_17_89() + { if (jj_scan_token(INSERT)) return true; if (jj_scan_token(DATA)) return true; return false; } - final private boolean jj_3R_203() { - if (jj_3R_218()) return true; + private boolean jj_3R_GraphPattern_1240_9_203() + { + if (jj_3R_Constraint_1471_3_218()) return true; return false; } - final private boolean jj_3_3() { + private boolean jj_3_3() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_63()) { + if (jj_3R_DescribeQuery_857_27_63()) { jj_scanpos = xsp; - if (jj_3R_64()) return true; + if (jj_3R_DescribeQuery_858_27_64()) return true; } return false; } - final private boolean jj_3R_175() { - if (jj_3R_99()) return true; + private boolean jj_3R_Arg_2771_9_175() + { + if (jj_3R_Expression_2119_3_99()) return true; return false; } - final private boolean jj_3R_202() { - if (jj_3R_61()) return true; + private boolean jj_3R_GraphPattern_1238_9_202() + { + if (jj_3R_TriplesSameSubject_1548_3_61()) return true; return false; } - final private boolean jj_3R_183() { + private boolean jj_3R_GraphPattern_1237_9_183() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_202()) { + if (jj_3R_GraphPattern_1238_9_202()) { jj_scanpos = xsp; - if (jj_3R_203()) { + if (jj_3R_GraphPattern_1240_9_203()) { jj_scanpos = xsp; - if (jj_3R_204()) return true; + if (jj_3R_GraphPattern_1242_9_204()) return true; } } return false; } - final private boolean jj_3_12() { - if (jj_3R_77()) return true; + private boolean jj_3_12() + { + if (jj_3R_PropertyListNotEmpty_1631_3_77()) return true; return false; } - final private boolean jj_3R_179() { + private boolean jj_3R_Collection_2003_9_179() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_198()) jj_scanpos = xsp; + if (jj_3R_Collection_2011_17_198()) jj_scanpos = xsp; if (jj_scan_token(LPAREN)) return true; - if (jj_3R_199()) return true; + if (jj_3R_Collection_2023_17_199()) return true; while (true) { xsp = jj_scanpos; - if (jj_3R_199()) { jj_scanpos = xsp; break; } + if (jj_3R_Collection_2023_17_199()) { jj_scanpos = xsp; break; } } return false; } - final private boolean jj_3R_144() { + private boolean jj_3R_For_2388_5_144() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(58)) { jj_scanpos = xsp; - if (jj_3R_169()) return true; + if (jj_3R_For_2388_14_169()) return true; } if (jj_scan_token(LPAREN)) return true; return false; } - final private boolean jj_3R_117() { + private boolean jj_3R_ArgList_2750_25_117() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(241)) { jj_scanpos = xsp; - if (jj_3R_151()) return true; + if (jj_3R_ArgList_2754_31_151()) return true; } return false; } - final private boolean jj_3R_62() { + private boolean jj_3R_DeleteInsertData_466_9_62() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_89()) { + if (jj_3R_DeleteInsertData_467_17_89()) { jj_scanpos = xsp; - if (jj_3R_90()) return true; + if (jj_3R_DeleteInsertData_478_17_90()) return true; } return false; } - final private boolean jj_3R_128() { - if (jj_3R_162()) return true; + private boolean jj_3R_GroupGraphPattern_1227_11_128() + { + if (jj_3R_GraphPattern_1236_1_162()) return true; return false; } - final private boolean jj_3R_127() { - if (jj_3R_161()) return true; + private boolean jj_3R_GroupGraphPattern_1226_11_127() + { + if (jj_3R_SubQuery_1288_2_161()) return true; return false; } - final private boolean jj_3R_162() { + private boolean jj_3R_GraphPattern_1236_1_162() + { Token xsp; while (true) { xsp = jj_scanpos; - if (jj_3R_183()) { jj_scanpos = xsp; break; } + if (jj_3R_GraphPattern_1237_9_183()) { jj_scanpos = xsp; break; } } return false; } - final private boolean jj_3R_118() { + private boolean jj_3R_ArgList_2760_7_118() + { if (jj_scan_token(SEMICOLON)) return true; return false; } - final private boolean jj_3R_116() { + private boolean jj_3R_ArgList_2747_18_116() + { if (jj_scan_token(DISTINCT)) return true; return false; } - final private boolean jj_3_2() { - if (jj_3R_62()) return true; + private boolean jj_3_2() + { + if (jj_3R_DeleteInsertData_466_9_62()) return true; return false; } - final private boolean jj_3R_158() { + private boolean jj_3R_tuple_1599_11_158() + { if (jj_scan_token(TUPLE)) return true; return false; } - final private boolean jj_3R_95() { + private boolean jj_3R_GroupGraphPattern_1223_3_95() + { if (jj_scan_token(LBRACE)) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_127()) { + if (jj_3R_GroupGraphPattern_1226_11_127()) { jj_scanpos = xsp; - if (jj_3R_128()) return true; + if (jj_3R_GroupGraphPattern_1227_11_128()) return true; } if (jj_scan_token(RBRACE)) return true; return false; } - final private boolean jj_3R_122() { + private boolean jj_3R_tuple_1599_9_122() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_158()) { + if (jj_3R_tuple_1599_11_158()) { jj_scanpos = xsp; if (jj_scan_token(183)) return true; } @@ -10671,180 +11145,454 @@ final private boolean jj_3R_122() { return false; } - final private boolean jj_3R_85() { - if (jj_3R_119()) return true; + private boolean jj_3R_Body_2718_36_85() + { + if (jj_3R_TemplateExpression_670_5_119()) return true; return false; } - final private boolean jj_3R_140() { + private boolean jj_3R_IfThenElse_2365_5_140() + { if (jj_scan_token(IF)) return true; if (jj_scan_token(LPAREN)) return true; return false; } - final private boolean jj_3R_201() { - if (jj_3R_157()) return true; + private boolean jj_3R_StarTerm_1589_9_201() + { + if (jj_3R_StarTriple_1574_5_157()) return true; return false; } - final private boolean jj_3R_165() { + private boolean jj_3R_OrderCondition_1188_27_165() + { if (jj_scan_token(DESC)) return true; return false; } - final private boolean jj_3R_200() { - if (jj_3R_76()) return true; + private boolean jj_3R_StarTerm_1588_9_200() + { + if (jj_3R_VarOrTerm_2056_3_76()) return true; return false; } - final private boolean jj_3R_83() { + private boolean jj_3R_ArgList_2738_3_83() + { if (jj_scan_token(LPAREN)) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_116()) jj_scanpos = xsp; + if (jj_3R_ArgList_2747_18_116()) jj_scanpos = xsp; xsp = jj_scanpos; - if (jj_3R_117()) jj_scanpos = xsp; + if (jj_3R_ArgList_2750_25_117()) jj_scanpos = xsp; xsp = jj_scanpos; - if (jj_3R_118()) jj_scanpos = xsp; + if (jj_3R_ArgList_2760_7_118()) jj_scanpos = xsp; if (jj_scan_token(RPAREN)) return true; return false; } - final private boolean jj_3R_181() { + private boolean jj_3R_StarTerm_1586_5_181() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_200()) { + if (jj_3R_StarTerm_1588_9_200()) { jj_scanpos = xsp; - if (jj_3R_201()) return true; + if (jj_3R_StarTerm_1589_9_201()) return true; } return false; } - final private boolean jj_3R_138() { + private boolean jj_3R_OrderCondition_1188_19_138() + { Token xsp; xsp = jj_scanpos; if (jj_scan_token(38)) { jj_scanpos = xsp; - if (jj_3R_165()) return true; + if (jj_3R_OrderCondition_1188_27_165()) return true; } return false; } - final private boolean jj_3R_156() { - if (jj_3R_180()) return true; + private boolean jj_3R_TriplesNode_1961_11_156() + { + if (jj_3R_BlankNodePropertyList_1968_3_180()) return true; return false; } - final private boolean jj_3R_155() { - if (jj_3R_179()) return true; + private boolean jj_3R_TriplesNode_1960_11_155() + { + if (jj_3R_Collection_2003_9_179()) return true; return false; } - final private boolean jj_3R_180() { + private boolean jj_3R_BlankNodePropertyList_1968_3_180() + { if (jj_scan_token(LBRACKET)) return true; - if (jj_3R_77()) return true; + if (jj_3R_PropertyListNotEmpty_1631_3_77()) return true; return false; } - final private boolean jj_3R_146() { + private boolean jj_3R_QueryExp_2346_4_146() + { if (jj_scan_token(QUERY)) return true; if (jj_scan_token(LPAREN)) return true; return false; } - final private boolean jj_3_9() { - if (jj_3R_74()) return true; + private boolean jj_3_9() + { + if (jj_3R_FunctionCorese_2331_3_74()) return true; return false; } - final private boolean jj_3R_104() { - if (jj_3R_101()) return true; + private boolean jj_3R_OrderCondition_1184_15_104() + { + if (jj_3R_FunctionCall_2528_3_101()) return true; return false; } - final private boolean jj_3R_102() { - if (jj_3R_67()) return true; + private boolean jj_3R_OrderCondition_1182_17_102() + { + if (jj_3R_Var_2082_3_67()) return true; return false; } - final private boolean jj_3R_103() { - if (jj_3R_100()) return true; + private boolean jj_3R_OrderCondition_1183_15_103() + { + if (jj_3R_BuiltInCall_2219_3_100()) return true; return false; } - final private boolean jj_3R_105() { + private boolean jj_3R_OrderCondition_1187_9_105() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_138()) jj_scanpos = xsp; - if (jj_3R_139()) return true; + if (jj_3R_OrderCondition_1188_19_138()) jj_scanpos = xsp; + if (jj_3R_BrackettedExpression_2787_9_139()) return true; return false; } - final private boolean jj_3R_157() { + private boolean jj_3R_StarTriple_1574_5_157() + { if (jj_scan_token(LTLT)) return true; - if (jj_3R_181()) return true; + if (jj_3R_StarTerm_1586_5_181()) return true; return false; } - final private boolean jj_3R_120() { + private boolean jj_3R_TriplesNode_1959_3_120() + { Token xsp; xsp = jj_scanpos; - if (jj_3R_155()) { + if (jj_3R_TriplesNode_1960_11_155()) { jj_scanpos = xsp; - if (jj_3R_156()) return true; + if (jj_3R_TriplesNode_1961_11_156()) return true; } return false; } - final private boolean jj_3R_84() { - if (jj_3R_99()) return true; + private boolean jj_3R_Body_2718_10_84() + { + if (jj_3R_Expression_2119_3_99()) return true; return false; } - final private boolean jj_3_19() { + private boolean jj_3_19() + { if (jj_scan_token(SEMICOLON)) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_84()) { + if (jj_3R_Body_2718_10_84()) { + jj_scanpos = xsp; + if (jj_3R_Body_2718_36_85()) return true; + } + return false; + } + + private boolean jj_3R_FunctionCorese_2338_5_112() + { + if (jj_3R_QueryExp_2346_4_146()) return true; + return false; + } + + private boolean jj_3R_FunctionCorese_2337_5_111() + { + if (jj_3R_Lambda_2587_3_145()) return true; + return false; + } + + private boolean jj_3R_FunctionCorese_2336_5_110() + { + if (jj_3R_For_2388_5_144()) return true; + return false; + } + + private boolean jj_3R_FunctionCorese_2335_5_109() + { + if (jj_3R_Set_2491_9_143()) return true; + return false; + } + + private boolean jj_3R_FunctionCorese_2334_5_108() + { + if (jj_3R_Let_2476_9_142()) return true; + return false; + } + + private boolean jj_3R_FunctionCorese_2333_5_107() + { + if (jj_3R_TryCatch_2467_5_141()) return true; + return false; + } + + private boolean jj_3R_FunctionCorese_2332_5_106() + { + if (jj_3R_IfThenElse_2365_5_140()) return true; + return false; + } + + private boolean jj_3R_OrderCondition_1177_9_73() + { + Token xsp; + xsp = jj_scanpos; + if (jj_3R_OrderCondition_1182_17_102()) { + jj_scanpos = xsp; + if (jj_3R_OrderCondition_1183_15_103()) { + jj_scanpos = xsp; + if (jj_3R_OrderCondition_1184_15_104()) { + jj_scanpos = xsp; + if (jj_3_9()) { + jj_scanpos = xsp; + if (jj_3R_OrderCondition_1187_9_105()) return true; + } + } + } + } + return false; + } + + private boolean jj_3R_FunctionCorese_2331_3_74() + { + Token xsp; + xsp = jj_scanpos; + if (jj_3R_FunctionCorese_2332_5_106()) { + jj_scanpos = xsp; + if (jj_3R_FunctionCorese_2333_5_107()) { + jj_scanpos = xsp; + if (jj_3R_FunctionCorese_2334_5_108()) { + jj_scanpos = xsp; + if (jj_3R_FunctionCorese_2335_5_109()) { + jj_scanpos = xsp; + if (jj_3R_FunctionCorese_2336_5_110()) { + jj_scanpos = xsp; + if (jj_3R_FunctionCorese_2337_5_111()) { + jj_scanpos = xsp; + if (jj_3R_FunctionCorese_2338_5_112()) return true; + } + } + } + } + } + } + return false; + } + + private boolean jj_3R_Body_2715_2_166() + { + if (jj_scan_token(LBRACE)) return true; + return false; + } + + private boolean jj_3R_RDFstar_1562_5_121() + { + if (jj_3R_StarTriple_1574_5_157()) return true; + return false; + } + + private boolean jj_3R_exists_2319_11_185() + { + if (jj_scan_token(NOT)) return true; + return false; + } + + private boolean jj_3R_SelectQuery_793_5_182() + { + if (jj_scan_token(SELECT)) return true; + return false; + } + + private boolean jj_3R_TriplesSameSubject_1552_9_86() + { + if (jj_3R_TriplesNode_1959_3_120()) return true; + return false; + } + + private boolean jj_3_8() + { + if (jj_3R_OrderCondition_1177_9_73()) return true; + return false; + } + + private boolean jj_3R_TriplesSameSubject_1555_5_88() + { + if (jj_3R_tuple_1599_9_122()) return true; + return false; + } + + private boolean jj_3R_exists_2317_9_164() + { + Token xsp; + xsp = jj_scanpos; + if (jj_3R_exists_2319_11_185()) jj_scanpos = xsp; + if (jj_scan_token(EXIST)) return true; + if (jj_3R_GroupGraphPattern_1223_3_95()) return true; + return false; + } + + private boolean jj_3R_TriplesSameSubject_1554_5_87() + { + if (jj_3R_RDFstar_1562_5_121()) return true; + return false; + } + + private boolean jj_3_11() + { + if (jj_3R_VarOrTerm_2056_3_76()) return true; + if (jj_3R_PropertyListNotEmpty_1631_3_77()) return true; + return false; + } + + private boolean jj_3R_TriplesSameSubject_1548_3_61() + { + Token xsp; + xsp = jj_scanpos; + if (jj_3_11()) { + jj_scanpos = xsp; + if (jj_3R_TriplesSameSubject_1552_9_86()) { + jj_scanpos = xsp; + if (jj_3R_TriplesSameSubject_1554_5_87()) { jj_scanpos = xsp; - if (jj_3R_85()) return true; + if (jj_3R_TriplesSameSubject_1555_5_88()) return true; } + } + } + return false; + } + + private boolean jj_3R_PrimaryRegExp_1915_19_82() + { + if (jj_3R_GroupGraphPattern_1223_3_95()) return true; + return false; + } + + private boolean jj_3R_BuiltInCall_2309_7_137() + { + if (jj_3R_exists_2317_9_164()) return true; + return false; + } + + private boolean jj_3R_GroupClause_1148_11_72() + { + if (jj_3R_FunctionCorese_2331_3_74()) return true; + return false; + } + + private boolean jj_3R_GroupClause_1147_11_71() + { + if (jj_3R_FunctionCall_2528_3_101()) return true; return false; } - final private boolean jj_3R_112() { - if (jj_3R_146()) return true; + private boolean jj_3R_GroupClause_1146_11_70() + { + if (jj_3R_BuiltInCall_2219_3_100()) return true; return false; } - final private boolean jj_3R_111() { - if (jj_3R_145()) return true; + private boolean jj_3R_GroupClause_1145_11_69() + { + if (jj_3R_Var_2082_3_67()) return true; return false; } - final private boolean jj_3R_110() { - if (jj_3R_144()) return true; + private boolean jj_3R_GroupClause_1144_11_68() + { + if (jj_scan_token(LPAREN)) return true; + if (jj_3R_Expression_2119_3_99()) return true; return false; } - final private boolean jj_3R_109() { - if (jj_3R_143()) return true; + private boolean jj_3R_PrimaryRegExp_1902_19_81() + { + if (jj_scan_token(LBRACKET)) return true; return false; } - final private boolean jj_3R_108() { - if (jj_3R_142()) return true; + private boolean jj_3R_BuiltInCall_2298_5_136() + { + Token xsp; + xsp = jj_scanpos; + if (jj_scan_token(85)) { + jj_scanpos = xsp; + if (jj_scan_token(134)) { + jj_scanpos = xsp; + if (jj_scan_token(100)) { + jj_scanpos = xsp; + if (jj_scan_token(123)) { + jj_scanpos = xsp; + if (jj_scan_token(117)) { + jj_scanpos = xsp; + if (jj_scan_token(124)) { + jj_scanpos = xsp; + if (jj_scan_token(95)) { + jj_scanpos = xsp; + if (jj_scan_token(93)) { + jj_scanpos = xsp; + if (jj_scan_token(86)) { + jj_scanpos = xsp; + if (jj_scan_token(87)) { + jj_scanpos = xsp; + if (jj_scan_token(90)) { + jj_scanpos = xsp; + if (jj_scan_token(89)) { + jj_scanpos = xsp; + if (jj_scan_token(88)) { + jj_scanpos = xsp; + if (jj_scan_token(92)) { + jj_scanpos = xsp; + if (jj_scan_token(91)) { + jj_scanpos = xsp; + if (jj_scan_token(52)) { + jj_scanpos = xsp; + if (jj_scan_token(183)) return true; + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + if (jj_scan_token(LPAREN)) return true; return false; } + /** Generated Token Manager. */ public SparqlCoreseTokenManager token_source; - JavaCharStream jj_input_stream; - public Token token, jj_nt; + SimpleCharStream jj_input_stream; + /** Current token. */ + public Token token; + /** Next token. */ + public Token jj_nt; private int jj_ntk; private Token jj_scanpos, jj_lastpos; private int jj_la; - public boolean lookingAhead = false; - private boolean jj_semLA; private int jj_gen; final private int[] jj_la1 = new int[273]; static private int[] jj_la1_0; @@ -10857,333 +11605,380 @@ final private boolean jj_3R_108() { static private int[] jj_la1_7; static private int[] jj_la1_8; static { - jj_la1_0(); - jj_la1_1(); - jj_la1_2(); - jj_la1_3(); - jj_la1_4(); - jj_la1_5(); - jj_la1_6(); - jj_la1_7(); - jj_la1_8(); - } - private static void jj_la1_0() { - jj_la1_0 = new int[] {0x3f000000,0x0,0x3f000000,0x3f060000,0x60000,0x0,0x404000,0x808000,0xf00,0xf00,0xf00,0xc3ff00,0x0,0x0,0xc0c000,0x3f00,0xc0c000,0xc0c000,0x404000,0x0,0x808000,0x0,0x0,0xc0c000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x700,0x0,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x700,0x0,0x33f00,0x0,0x33f00,0x0,0x700,0x0,0x0,0x0,0x3f00,0x0,0x3f00,0x0,0x40000000,0x3f00,0x0,0x3f00,0x0,0x3f00,0x0,0x0,0x0,0x3f00,0x0,0x8000000,0x3000,0x1000000,0x0,0x0,0x0,0x3700,0x3700,0x0,0x0,0x0,0x40000000,0x0,0x40000000,0x3000,0x0,0x3000,0x0,0x3000,0x0,0x3000,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x3000,0x0,0x700,0x0,0x700,0x0,0x3000,0x700,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3700,0x0,0x0,0x3700,0x0,0x1000000,0x33f00,0x33f00,0x0,0x33f00,0x0,0x0,0x0,0x33f00,0x0,0x0,0x0,0x3000,0x0,0x3700,0x0,0x700,0x33f00,0x0,0x33f00,0x0,0x33f00,0x30000,0x3f00,0x0,0x33f00,0x0,0x0,0x0,0x0,0x33f00,0x203700,0x3700,0x0,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x700,0x0,0x700,0x0,0x0,0x0,0x30000,0x20000,0x30000,0x30000,0x33f00,0x33f00,0x30000,0x3f00,0x3f00,0x3700,0x3000,0x0,0x700,0xf00,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3f00,0x0,0x3f00,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xd000000,0x0,0x0,0x0,0x0,0x0,0x3000,0x0,0x3000,0xd063f00,0x1063000,0xd000000,0x700,0x700,0x700,0x0,0x0,0x0,0x700,0x3700,0xd063f00,0x1063700,0x700,0x0,0x700,0x0,0x700,0x3700,0x0,0x0,0x0,0x3000,0x0,0x3000,0x0,0x0,0x0,0x3000,0x3000,0x0,0x3000,0x60000,0x60000,0x700,0x700,0x0,0x3f00,0x3f00,0x0,0x3f00,0x0,0x60000,0x40000000,0x0,0x3f00,0x3f00,0x0,0x3f00,0x700,0x0,0x40000,0x40000,0x0,0x0,0x700,0x700,0x600,0x800,}; - } - private static void jj_la1_1() { - jj_la1_1 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800,0x0,0x800,0x0,0x800,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x900,0x0,0x0,0x0,0x800,0x800,0x800,0x800,0x0,0x0,0x400,0x0,0x0,0x0,0x0,0x0,0x100,0x0,0x0,0x800,0x0,0x200,0x0,0x0,0x0,0x0,0x9ff50000,0x0,0x9ff50000,0x0,0x0,0x9ff50000,0x0,0x9ff50000,0x0,0x9ff50000,0x0,0x0,0x0,0x9ff50000,0x200,0x0,0x0,0x0,0x200,0x200,0x600,0x0,0x0,0x200,0x0,0x0,0x1,0x0,0x1,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x100,0x400,0x20,0x8,0x6,0x0,0x0,0x0,0x0,0x0,0x80310000,0x0,0x80310000,0x0,0x0,0x0,0x4,0x2,0x6,0x8,0x0,0x0,0x0,0x8,0x0,0x9ff50000,0xc0,0xc0,0x80310000,0xc0,0x0,0x8002b800,0x8002b800,0x0,0x0,0x0,0x0,0x0,0x0,0xa000,0x80021800,0x0,0x0,0x0,0x0,0x4000,0x9ff50000,0x800,0x0,0x800,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x10000,0x10000,0x10000,0x0,0x0,0x0,0x0,0x9ff50000,0x0,0x9ff50000,0x200000,0x0,0x0,0x0,0x100000,0x0,0x80310000,0x10000,0x1fc40000,0x0,0x0,0x10000000,0x40000000,0x0,0xc000000,0x0,0x0,0x0,0x9ff51000,0x0,0x1000,0x0,0x0,0x0,0xc00000,0x0,0x3000000,0x0,0x0,0x9ff51000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x9ff50000,0x9ff50000,0x0,0x9ff50000,0x0,0x0,0x0,0x0,0x9ff50000,0x9ff50000,0x0,0x9ff50000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; - } - private static void jj_la1_2() { - jj_la1_2 = new int[] {0xfbe,0x0,0xfbe,0xfbe,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfbe,0x0,0x10000,0x8000,0xc,0x10000,0x6000,0x10000,0x1a0,0x10000,0x0,0x2000,0x0,0x2000,0x1be,0xe00,0x20000,0x600,0x800,0x200,0x600,0x1000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfffc0000,0x0,0xfffc0000,0x0,0x0,0xfffc0000,0x0,0xfffc0000,0x0,0xfffc0000,0x0,0x0,0x0,0xfffc0000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfffc0000,0x0,0xfffc0000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfffc0000,0x0,0x0,0xfffc0000,0x0,0x0,0x1,0x1,0x0,0x0,0x0,0x1,0x0,0x0,0x0,0x0,0x0,0x0,0x10000,0x0,0x0,0xfffc0000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfffc0000,0x40000,0xfffc0000,0x0,0x40080000,0x100000,0x0,0xbfe00000,0x0,0xfffc0000,0x0,0x0,0xfbe,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfffc0fbe,0x0,0xfbe,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfffc0fbe,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfffc0000,0xfffc0000,0x0,0xfffc0000,0x0,0x0,0x0,0x0,0xfffc0000,0xfffc0000,0x0,0xfffc0000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; - } - private static void jj_la1_3() { - jj_la1_3 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0xffffffff,0x0,0x0,0xffffffff,0x0,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x200,0xffffffff,0x3000,0xe4d0cc44,0x0,0x30f01a3,0x18200010,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0x0,0xffffffff,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; - } - private static void jj_la1_4() { - jj_la1_4 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0xffffffff,0x0,0x0,0xffffffff,0x0,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0xffffffff,0x2080,0x7ffd73f,0xf8000000,0x800,0x40,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0x0,0xffffffff,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; - } - private static void jj_la1_5() { - jj_la1_5 = new int[] {0x0,0x40000000,0x40000000,0x40000000,0x40000000,0x40000000,0x0,0x0,0x6,0x6,0x6,0xc00006,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x10,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800,0xc00006,0x0,0xc00006,0x0,0x0,0x0,0x0,0x0,0xff8043e7,0x0,0xff8043e7,0x3e0,0x0,0xff8043e7,0x0,0xff8043e7,0x380,0xff8043e7,0x0,0x0,0x0,0xff8043e7,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800,0x8,0x0,0x20000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x420,0x0,0x800,0x10,0x0,0x0,0x2006,0x3f804001,0x2006,0x3f804001,0x0,0x2006,0x0,0x0,0x0,0x0,0x0,0x400,0x20,0x400,0x420,0x10000,0xff804001,0x0,0x0,0x3f804001,0x0,0x0,0xc01806,0xc01806,0x0,0xc00006,0x0,0x0,0x0,0xc00006,0x0,0x1800,0x0,0x0,0x0,0x0,0x0,0xff804001,0xc00006,0x0,0xc00006,0x0,0xc00006,0xc00000,0x6,0xc00000,0x6,0x0,0x0,0x0,0x0,0x6,0x20000000,0x20000000,0x200000,0x20000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x6,0x6,0x0,0x6,0x0,0x0,0x0,0x200000,0x6,0x6,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xff804007,0x0,0xff804007,0x0,0xf000000,0x4001,0x30000000,0x800000,0x0,0x3f804001,0x0,0xc0000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xff804007,0x0,0x0,0x6,0x6,0x6,0x0,0x0,0x0,0x0,0x0,0xff804007,0x0,0x0,0xc0000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x6,0x6,0x0,0xff8043e7,0xff8043e7,0x0,0xff8043e7,0x0,0x40000000,0x0,0x0,0xff804007,0xff804007,0x0,0xff804007,0x6,0x0,0x0,0x0,0x6,0x0,0x0,0x0,0x0,0x0,}; - } - private static void jj_la1_6() { - jj_la1_6 = new int[] {0x0,0x4000002,0x4000002,0x4000002,0x4000002,0x4000002,0x0,0x0,0x413c3800,0x413c3800,0x413c3800,0x513c3800,0x0,0x0,0x0,0x40000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x80000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x513c3800,0x0,0x513c3800,0x0,0x0,0x0,0x1000000,0x10000000,0x413c3801,0x80000000,0x413c3801,0x0,0x0,0x413c3801,0x80000000,0x413c3801,0x0,0x413c3801,0x1000000,0x13c0000,0x1000000,0x413c3801,0x0,0x0,0x1000000,0x0,0x0,0x0,0x4000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000000,0x1000000,0x1000000,0x1000000,0x1000000,0x0,0x1000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000000,0x3c3800,0x0,0x3c3800,0x0,0x1000000,0x3c3800,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000001,0x0,0x0,0x0,0x1000000,0x0,0x553c3800,0x553c3800,0x0,0x513c3800,0x0,0x0,0x0,0x513c3800,0x0,0x0,0x0,0x1000000,0x0,0x0,0x0,0x1000001,0x513c3800,0x0,0x513c3800,0x0,0x513c3800,0x11000000,0x413c3800,0x0,0x513c3800,0x0,0x0,0x0,0x0,0x513c3800,0x1000020,0x1000020,0x0,0x1000020,0x0,0x0,0x0,0x800,0x0,0x800,0x4000000,0x4000000,0x1000020,0x0,0x1000000,0x14000800,0x38,0x3c0,0x11000000,0x0,0x0,0x0,0x513c3800,0x513c3800,0x11000000,0x413c3800,0x40000000,0x0,0x0,0x0,0x13c3800,0x413c3800,0x0,0x0,0x0,0x3c,0x3c,0x0,0x0,0x0,0x0,0x413c3801,0x0,0x413c3801,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0x4000000,0x0,0x2000000,0x0,0x0,0x0,0x1000000,0x413c3801,0x1000000,0x0,0x13c3800,0x13c3800,0x3c3800,0x0,0x0,0x0,0x0,0x1000000,0x413c3801,0x1000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000000,0x0,0x0,0x0,0x1000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3c3800,0x3c3800,0x0,0x413c3801,0x413c3801,0x80000000,0x493c3801,0x2,0x0,0x0,0x0,0x413c3801,0x413c3801,0x80000000,0x413c3801,0x3c3800,0x3800,0x0,0x0,0x0,0x3c0000,0x0,0x0,0x0,0x40000000,}; - } - private static void jj_la1_7() { - jj_la1_7 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x10,0x0,0x10,0xc00010,0x2,0x0,0x0,0x800000,0x0,0x0,0x0,0x2,0x0,0x2,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xc00010,0x0,0xc00010,0x0,0x0,0x1,0x0,0x0,0xc00010,0x0,0xc00010,0x0,0x0,0xc00010,0x0,0xc00010,0x0,0xc00010,0x0,0x0,0x0,0xc00010,0x0,0x0,0x820000,0x0,0x0,0x0,0x0,0x800000,0x820000,0x0,0x0,0x0,0x0,0x0,0x0,0x820000,0x0,0x800000,0x0,0x820000,0x1,0x800000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800000,0x0,0x10,0x0,0x10,0x0,0x800000,0x10,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800000,0x0,0x0,0x800000,0x0,0x0,0xc00010,0xc00010,0x2,0xc00010,0x2,0x0,0x2,0xc00010,0x0,0x0,0x1,0x800000,0x0,0x800000,0x0,0x0,0xc00010,0x2,0xc00010,0x2,0xc00010,0x400010,0x800010,0x0,0xc00000,0x100000,0x100000,0x2,0x1,0xc00010,0x800400,0x800400,0x0,0x400,0x102000,0x102000,0x40000,0x0,0x1,0x1,0xa8000,0xa8000,0x400,0xa8000,0x0,0x0,0xbcc,0x0,0x400000,0x400000,0x400000,0x400000,0xc00010,0xc00010,0x400000,0x800000,0x800000,0x800000,0x800000,0x0,0x0,0x0,0x2000,0x4000,0x0,0xbcc,0xbcc,0x18000,0x18000,0x60000,0x60000,0x818410,0x0,0x818410,0x0,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0x1,0x0,0x800001,0x1,0x800000,0xc18410,0x800000,0x400000,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0x800000,0xc18410,0x800000,0x0,0x0,0x0,0x1,0x0,0x800000,0x1,0x0,0x1,0x800000,0x1,0x900002,0x100000,0x2,0x2,0x900002,0x800001,0x1,0x800000,0x0,0x0,0x800000,0x800000,0x20000,0x818410,0x818410,0x0,0x818410,0x0,0x0,0x0,0x1,0x838410,0x838410,0x0,0x800010,0x0,0x0,0x200000,0x200000,0x0,0x0,0x0,0x0,0x0,0x0,}; - } - private static void jj_la1_8() { - jj_la1_8 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; - } + jj_la1_init_0(); + jj_la1_init_1(); + jj_la1_init_2(); + jj_la1_init_3(); + jj_la1_init_4(); + jj_la1_init_5(); + jj_la1_init_6(); + jj_la1_init_7(); + jj_la1_init_8(); + } + private static void jj_la1_init_0() { + jj_la1_0 = new int[] {0x3f000000,0x0,0x3f000000,0x3f060000,0x60000,0x0,0x404000,0x808000,0xf00,0xf00,0xf00,0xc3ff00,0x0,0x0,0xc0c000,0x3f00,0xc0c000,0xc0c000,0x404000,0x0,0x808000,0x0,0x0,0xc0c000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x700,0x0,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x700,0x0,0x33f00,0x0,0x33f00,0x0,0x700,0x0,0x0,0x0,0x3f00,0x0,0x3f00,0x0,0x40000000,0x3f00,0x0,0x3f00,0x0,0x3f00,0x0,0x0,0x0,0x3f00,0x0,0x8000000,0x3000,0x1000000,0x0,0x0,0x0,0x3700,0x3700,0x0,0x0,0x0,0x40000000,0x0,0x40000000,0x3000,0x0,0x3000,0x0,0x3000,0x0,0x3000,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x3000,0x0,0x700,0x0,0x700,0x0,0x3000,0x700,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3700,0x0,0x0,0x3700,0x0,0x1000000,0x33f00,0x33f00,0x0,0x33f00,0x0,0x0,0x0,0x33f00,0x0,0x0,0x0,0x3000,0x0,0x3700,0x0,0x700,0x33f00,0x0,0x33f00,0x0,0x33f00,0x30000,0x3f00,0x0,0x33f00,0x0,0x0,0x0,0x0,0x33f00,0x203700,0x3700,0x0,0x700,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x700,0x0,0x700,0x0,0x0,0x0,0x30000,0x20000,0x30000,0x30000,0x33f00,0x33f00,0x30000,0x3f00,0x3f00,0x3700,0x3000,0x0,0x700,0xf00,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3f00,0x0,0x3f00,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xd000000,0x0,0x0,0x0,0x0,0x0,0x3000,0x0,0x3000,0xd063f00,0x1063000,0xd000000,0x700,0x700,0x700,0x0,0x0,0x0,0x700,0x3700,0xd063f00,0x1063700,0x700,0x0,0x700,0x0,0x700,0x3700,0x0,0x0,0x0,0x3000,0x0,0x3000,0x0,0x0,0x0,0x3000,0x3000,0x0,0x3000,0x60000,0x60000,0x700,0x700,0x0,0x3f00,0x3f00,0x0,0x3f00,0x0,0x60000,0x40000000,0x0,0x3f00,0x3f00,0x0,0x3f00,0x700,0x0,0x40000,0x40000,0x0,0x0,0x700,0x700,0x600,0x800,}; + } + private static void jj_la1_init_1() { + jj_la1_1 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800,0x0,0x800,0x0,0x800,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x900,0x0,0x0,0x0,0x800,0x800,0x800,0x800,0x0,0x0,0x400,0x0,0x0,0x0,0x0,0x0,0x100,0x0,0x0,0x800,0x0,0x200,0x0,0x0,0x0,0x0,0x9ff50000,0x0,0x9ff50000,0x0,0x0,0x9ff50000,0x0,0x9ff50000,0x0,0x9ff50000,0x0,0x0,0x0,0x9ff50000,0x200,0x0,0x0,0x0,0x200,0x200,0x600,0x0,0x0,0x200,0x0,0x0,0x1,0x0,0x1,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x100,0x400,0x20,0x8,0x6,0x0,0x0,0x0,0x0,0x0,0x80310000,0x0,0x80310000,0x0,0x0,0x0,0x4,0x2,0x6,0x8,0x0,0x0,0x0,0x8,0x0,0x9ff50000,0xc0,0xc0,0x80310000,0xc0,0x0,0x8002b800,0x8002b800,0x0,0x0,0x0,0x0,0x0,0x0,0xa000,0x80021800,0x0,0x0,0x0,0x0,0x4000,0x9ff50000,0x800,0x0,0x800,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x10000,0x10000,0x10000,0x0,0x0,0x0,0x0,0x9ff50000,0x0,0x9ff50000,0x200000,0x0,0x0,0x0,0x100000,0x0,0x80310000,0x10000,0x1fc40000,0x0,0x0,0x10000000,0x40000000,0x0,0xc000000,0x0,0x0,0x0,0x9ff51000,0x0,0x1000,0x0,0x0,0x0,0xc00000,0x0,0x3000000,0x0,0x0,0x9ff51000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x9ff50000,0x9ff50000,0x0,0x9ff50000,0x0,0x0,0x0,0x0,0x9ff50000,0x9ff50000,0x0,0x9ff50000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; + } + private static void jj_la1_init_2() { + jj_la1_2 = new int[] {0xfbe,0x0,0xfbe,0xfbe,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfbe,0x0,0x10000,0x8000,0xc,0x10000,0x6000,0x10000,0x1a0,0x10000,0x0,0x2000,0x0,0x2000,0x1be,0xe00,0x20000,0x600,0x800,0x200,0x600,0x1000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfffc0000,0x0,0xfffc0000,0x0,0x0,0xfffc0000,0x0,0xfffc0000,0x0,0xfffc0000,0x0,0x0,0x0,0xfffc0000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfffc0000,0x0,0xfffc0000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfffc0000,0x0,0x0,0xfffc0000,0x0,0x0,0x1,0x1,0x0,0x0,0x0,0x1,0x0,0x0,0x0,0x0,0x0,0x0,0x10000,0x0,0x0,0xfffc0000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfffc0000,0x40000,0xfffc0000,0x0,0x40080000,0x100000,0x0,0xbfe00000,0x0,0xfffc0000,0x0,0x0,0xfbe,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfffc0fbe,0x0,0xfbe,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfffc0fbe,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xfffc0000,0xfffc0000,0x0,0xfffc0000,0x0,0x0,0x0,0x0,0xfffc0000,0xfffc0000,0x0,0xfffc0000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; + } + private static void jj_la1_init_3() { + jj_la1_3 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0xffffffff,0x0,0x0,0xffffffff,0x0,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x200,0xffffffff,0x3000,0xe4d0cc44,0x0,0x30f01a3,0x18200010,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0x0,0xffffffff,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; + } + private static void jj_la1_init_4() { + jj_la1_4 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0xffffffff,0x0,0x0,0xffffffff,0x0,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0xffffffff,0x2080,0x7ffd73f,0xf8000000,0x800,0x40,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffffffff,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0x0,0xffffffff,0xffffffff,0x0,0xffffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; + } + private static void jj_la1_init_5() { + jj_la1_5 = new int[] {0x0,0x40000000,0x40000000,0x40000000,0x40000000,0x40000000,0x0,0x0,0x6,0x6,0x6,0xc00006,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x10,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800,0xc00006,0x0,0xc00006,0x0,0x0,0x0,0x0,0x0,0xff8043e7,0x0,0xff8043e7,0x3e0,0x0,0xff8043e7,0x0,0xff8043e7,0x380,0xff8043e7,0x0,0x0,0x0,0xff8043e7,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800,0x8,0x0,0x20000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x420,0x0,0x800,0x10,0x0,0x0,0x2006,0x3f804001,0x2006,0x3f804001,0x0,0x2006,0x0,0x0,0x0,0x0,0x0,0x400,0x20,0x400,0x420,0x10000,0xff804001,0x0,0x0,0x3f804001,0x0,0x0,0xc01806,0xc01806,0x0,0xc00006,0x0,0x0,0x0,0xc00006,0x0,0x1800,0x0,0x0,0x0,0x0,0x0,0xff804001,0xc00006,0x0,0xc00006,0x0,0xc00006,0xc00000,0x6,0xc00000,0x6,0x0,0x0,0x0,0x0,0x6,0x20000000,0x20000000,0x200000,0x20000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x6,0x6,0x0,0x6,0x0,0x0,0x0,0x200000,0x6,0x6,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xff804007,0x0,0xff804007,0x0,0xf000000,0x4001,0x30000000,0x800000,0x0,0x3f804001,0x0,0xc0000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xff804007,0x0,0x0,0x6,0x6,0x6,0x0,0x0,0x0,0x0,0x0,0xff804007,0x0,0x0,0xc0000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x6,0x6,0x0,0xff8043e7,0xff8043e7,0x0,0xff8043e7,0x0,0x40000000,0x0,0x0,0xff804007,0xff804007,0x0,0xff804007,0x6,0x0,0x0,0x0,0x6,0x0,0x0,0x0,0x0,0x0,}; + } + private static void jj_la1_init_6() { + jj_la1_6 = new int[] {0x0,0x4000002,0x4000002,0x4000002,0x4000002,0x4000002,0x0,0x0,0x413c3800,0x413c3800,0x413c3800,0x513c3800,0x0,0x0,0x0,0x40000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x80000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x513c3800,0x0,0x513c3800,0x0,0x0,0x0,0x1000000,0x10000000,0x413c3801,0x80000000,0x413c3801,0x0,0x0,0x413c3801,0x80000000,0x413c3801,0x0,0x413c3801,0x1000000,0x13c0000,0x1000000,0x413c3801,0x0,0x0,0x1000000,0x0,0x0,0x0,0x4000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000000,0x1000000,0x1000000,0x1000000,0x1000000,0x0,0x1000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000000,0x3c3800,0x0,0x3c3800,0x0,0x1000000,0x3c3800,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000001,0x0,0x0,0x0,0x1000000,0x0,0x553c3800,0x553c3800,0x0,0x513c3800,0x0,0x0,0x0,0x513c3800,0x0,0x0,0x0,0x1000000,0x0,0x0,0x0,0x1000001,0x513c3800,0x0,0x513c3800,0x0,0x513c3800,0x11000000,0x413c3800,0x0,0x513c3800,0x0,0x0,0x0,0x0,0x513c3800,0x1000020,0x1000020,0x0,0x1000020,0x0,0x0,0x0,0x800,0x0,0x800,0x4000000,0x4000000,0x1000020,0x0,0x1000000,0x14000800,0x38,0x3c0,0x11000000,0x0,0x0,0x0,0x513c3800,0x513c3800,0x11000000,0x413c3800,0x40000000,0x0,0x0,0x0,0x13c3800,0x413c3800,0x0,0x0,0x0,0x3c,0x3c,0x0,0x0,0x0,0x0,0x413c3801,0x0,0x413c3801,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0x4000000,0x0,0x2000000,0x0,0x0,0x0,0x1000000,0x413c3801,0x1000000,0x0,0x13c3800,0x13c3800,0x3c3800,0x0,0x0,0x0,0x0,0x1000000,0x413c3801,0x1000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1000000,0x0,0x0,0x0,0x1000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3c3800,0x3c3800,0x0,0x413c3801,0x413c3801,0x80000000,0x493c3801,0x2,0x0,0x0,0x0,0x413c3801,0x413c3801,0x80000000,0x413c3801,0x3c3800,0x3800,0x0,0x0,0x0,0x3c0000,0x0,0x0,0x0,0x40000000,}; + } + private static void jj_la1_init_7() { + jj_la1_7 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x10,0x0,0x10,0xc00010,0x2,0x0,0x0,0x800000,0x0,0x0,0x0,0x2,0x0,0x2,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xc00010,0x0,0xc00010,0x0,0x0,0x1,0x0,0x0,0xc00010,0x0,0xc00010,0x0,0x0,0xc00010,0x0,0xc00010,0x0,0xc00010,0x0,0x0,0x0,0xc00010,0x0,0x0,0x820000,0x0,0x0,0x0,0x0,0x800000,0x820000,0x0,0x0,0x0,0x0,0x0,0x0,0x820000,0x0,0x800000,0x0,0x820000,0x1,0x800000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800000,0x0,0x10,0x0,0x10,0x0,0x800000,0x10,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x800000,0x0,0x0,0x800000,0x0,0x0,0xc00010,0xc00010,0x2,0xc00010,0x2,0x0,0x2,0xc00010,0x0,0x0,0x1,0x800000,0x0,0x800000,0x0,0x0,0xc00010,0x2,0xc00010,0x2,0xc00010,0x400010,0x800010,0x0,0xc00000,0x100000,0x100000,0x2,0x1,0xc00010,0x800400,0x800400,0x0,0x400,0x102000,0x102000,0x40000,0x0,0x1,0x1,0xa8000,0xa8000,0x400,0xa8000,0x0,0x0,0xbcc,0x0,0x400000,0x400000,0x400000,0x400000,0xc00010,0xc00010,0x400000,0x800000,0x800000,0x800000,0x800000,0x0,0x0,0x0,0x2000,0x4000,0x0,0xbcc,0xbcc,0x18000,0x18000,0x60000,0x60000,0x818410,0x0,0x818410,0x0,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0x1,0x0,0x800001,0x1,0x800000,0xc18410,0x800000,0x400000,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0x800000,0xc18410,0x800000,0x0,0x0,0x0,0x1,0x0,0x800000,0x1,0x0,0x1,0x800000,0x1,0x900002,0x100000,0x2,0x2,0x900002,0x800001,0x1,0x800000,0x0,0x0,0x800000,0x800000,0x20000,0x818410,0x818410,0x0,0x818410,0x0,0x0,0x0,0x1,0x838410,0x838410,0x0,0x800010,0x0,0x0,0x200000,0x200000,0x0,0x0,0x0,0x0,0x0,0x0,}; + } + private static void jj_la1_init_8() { + jj_la1_8 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; + } final private JJCalls[] jj_2_rtns = new JJCalls[19]; private boolean jj_rescan = false; private int jj_gc = 0; + /** Constructor with InputStream. */ public SparqlCorese(java.io.InputStream stream) { - this(stream, null); + this(stream, null); } + /** Constructor with InputStream and supplied encoding */ public SparqlCorese(java.io.InputStream stream, String encoding) { - try { jj_input_stream = new JavaCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } - token_source = new SparqlCoreseTokenManager(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 273; i++) jj_la1[i] = -1; - for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); + try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } + token_source = new SparqlCoreseTokenManager(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 273; i++) jj_la1[i] = -1; + for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); } + /** Reinitialise. */ public void ReInit(java.io.InputStream stream) { - ReInit(stream, null); + ReInit(stream, null); } + /** Reinitialise. */ public void ReInit(java.io.InputStream stream, String encoding) { - try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } - token_source.ReInit(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 273; i++) jj_la1[i] = -1; - for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); + try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } + token_source.ReInit(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 273; i++) jj_la1[i] = -1; + for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); } + /** Constructor. */ public SparqlCorese(java.io.Reader stream) { - jj_input_stream = new JavaCharStream(stream, 1, 1); - token_source = new SparqlCoreseTokenManager(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 273; i++) jj_la1[i] = -1; - for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); + jj_input_stream = new SimpleCharStream(stream, 1, 1); + token_source = new SparqlCoreseTokenManager(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 273; i++) jj_la1[i] = -1; + for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); } + /** Reinitialise. */ public void ReInit(java.io.Reader stream) { - jj_input_stream.ReInit(stream, 1, 1); - token_source.ReInit(jj_input_stream); - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 273; i++) jj_la1[i] = -1; - for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); - } - + if (jj_input_stream == null) { + jj_input_stream = new SimpleCharStream(stream, 1, 1); + } else { + jj_input_stream.ReInit(stream, 1, 1); + } + if (token_source == null) { + token_source = new SparqlCoreseTokenManager(jj_input_stream); + } + + token_source.ReInit(jj_input_stream); + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 273; i++) jj_la1[i] = -1; + for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); + } + + /** Constructor with generated Token Manager. */ public SparqlCorese(SparqlCoreseTokenManager tm) { - token_source = tm; - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 273; i++) jj_la1[i] = -1; - for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); + token_source = tm; + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 273; i++) jj_la1[i] = -1; + for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); } + /** Reinitialise. */ public void ReInit(SparqlCoreseTokenManager tm) { - token_source = tm; - token = new Token(); - jj_ntk = -1; - jj_gen = 0; - for (int i = 0; i < 273; i++) jj_la1[i] = -1; - for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); - } - - final private Token jj_consume_token(int kind) throws ParseException { - Token oldToken; - if ((oldToken = token).next != null) token = token.next; - else token = token.next = token_source.getNextToken(); - jj_ntk = -1; - if (token.kind == kind) { - jj_gen++; - if (++jj_gc > 100) { - jj_gc = 0; - for (int i = 0; i < jj_2_rtns.length; i++) { - JJCalls c = jj_2_rtns[i]; - while (c != null) { - if (c.gen < jj_gen) c.first = null; - c = c.next; - } - } - } - return token; - } - token = oldToken; - jj_kind = kind; - throw generateParseException(); - } - - static private final class LookaheadSuccess extends java.lang.Error { } - final private LookaheadSuccess jj_ls = new LookaheadSuccess(); - final private boolean jj_scan_token(int kind) { - if (jj_scanpos == jj_lastpos) { - jj_la--; - if (jj_scanpos.next == null) { - jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.getNextToken(); - } else { - jj_lastpos = jj_scanpos = jj_scanpos.next; - } - } else { - jj_scanpos = jj_scanpos.next; - } - if (jj_rescan) { - int i = 0; Token tok = token; - while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; } - if (tok != null) jj_add_error_token(kind, i); - } - if (jj_scanpos.kind != kind) return true; - if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls; - return false; - } - + token_source = tm; + token = new Token(); + jj_ntk = -1; + jj_gen = 0; + for (int i = 0; i < 273; i++) jj_la1[i] = -1; + for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); + } + + private Token jj_consume_token(int kind) throws ParseException { + Token oldToken; + if ((oldToken = token).next != null) token = token.next; + else token = token.next = token_source.getNextToken(); + jj_ntk = -1; + if (token.kind == kind) { + jj_gen++; + if (++jj_gc > 100) { + jj_gc = 0; + for (int i = 0; i < jj_2_rtns.length; i++) { + JJCalls c = jj_2_rtns[i]; + while (c != null) { + if (c.gen < jj_gen) c.first = null; + c = c.next; + } + } + } + return token; + } + token = oldToken; + jj_kind = kind; + throw generateParseException(); + } + + @SuppressWarnings("serial") + static private final class LookaheadSuccess extends java.lang.Error { + @Override + public Throwable fillInStackTrace() { + return this; + } + } + static private final LookaheadSuccess jj_ls = new LookaheadSuccess(); + private boolean jj_scan_token(int kind) { + if (jj_scanpos == jj_lastpos) { + jj_la--; + if (jj_scanpos.next == null) { + jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.getNextToken(); + } else { + jj_lastpos = jj_scanpos = jj_scanpos.next; + } + } else { + jj_scanpos = jj_scanpos.next; + } + if (jj_rescan) { + int i = 0; Token tok = token; + while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; } + if (tok != null) jj_add_error_token(kind, i); + } + if (jj_scanpos.kind != kind) return true; + if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls; + return false; + } + + +/** Get the next Token. */ final public Token getNextToken() { - if (token.next != null) token = token.next; - else token = token.next = token_source.getNextToken(); - jj_ntk = -1; - jj_gen++; - return token; + if (token.next != null) token = token.next; + else token = token.next = token_source.getNextToken(); + jj_ntk = -1; + jj_gen++; + return token; } +/** Get the specific Token. */ final public Token getToken(int index) { - Token t = lookingAhead ? jj_scanpos : token; - for (int i = 0; i < index; i++) { - if (t.next != null) t = t.next; - else t = t.next = token_source.getNextToken(); - } - return t; + Token t = token; + for (int i = 0; i < index; i++) { + if (t.next != null) t = t.next; + else t = t.next = token_source.getNextToken(); + } + return t; } - final private int jj_ntk() { - if ((jj_nt=token.next) == null) - return (jj_ntk = (token.next=token_source.getNextToken()).kind); - else - return (jj_ntk = jj_nt.kind); + private int jj_ntk_f() { + if ((jj_nt=token.next) == null) + return (jj_ntk = (token.next=token_source.getNextToken()).kind); + else + return (jj_ntk = jj_nt.kind); } - private java.util.Vector jj_expentries = new java.util.Vector(); + private java.util.List jj_expentries = new java.util.ArrayList(); private int[] jj_expentry; private int jj_kind = -1; private int[] jj_lasttokens = new int[100]; private int jj_endpos; private void jj_add_error_token(int kind, int pos) { - if (pos >= 100) return; - if (pos == jj_endpos + 1) { - jj_lasttokens[jj_endpos++] = kind; - } else if (jj_endpos != 0) { - jj_expentry = new int[jj_endpos]; - for (int i = 0; i < jj_endpos; i++) { - jj_expentry[i] = jj_lasttokens[i]; - } - boolean exists = false; - for (java.util.Enumeration e = jj_expentries.elements(); e.hasMoreElements();) { - int[] oldentry = (int[])(e.nextElement()); - if (oldentry.length == jj_expentry.length) { - exists = true; - for (int i = 0; i < jj_expentry.length; i++) { - if (oldentry[i] != jj_expentry[i]) { - exists = false; - break; - } - } - if (exists) break; - } - } - if (!exists) jj_expentries.addElement(jj_expentry); - if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind; - } - } - + if (pos >= 100) { + return; + } + + if (pos == jj_endpos + 1) { + jj_lasttokens[jj_endpos++] = kind; + } else if (jj_endpos != 0) { + jj_expentry = new int[jj_endpos]; + + for (int i = 0; i < jj_endpos; i++) { + jj_expentry[i] = jj_lasttokens[i]; + } + + for (int[] oldentry : jj_expentries) { + if (oldentry.length == jj_expentry.length) { + boolean isMatched = true; + + for (int i = 0; i < jj_expentry.length; i++) { + if (oldentry[i] != jj_expentry[i]) { + isMatched = false; + break; + } + + } + if (isMatched) { + jj_expentries.add(jj_expentry); + break; + } + } + } + + if (pos != 0) { + jj_lasttokens[(jj_endpos = pos) - 1] = kind; + } + } + } + + /** Generate ParseException. */ public ParseException generateParseException() { - jj_expentries.removeAllElements(); - boolean[] la1tokens = new boolean[257]; - for (int i = 0; i < 257; i++) { - la1tokens[i] = false; - } - if (jj_kind >= 0) { - la1tokens[jj_kind] = true; - jj_kind = -1; - } - for (int i = 0; i < 273; i++) { - if (jj_la1[i] == jj_gen) { - for (int j = 0; j < 32; j++) { - if ((jj_la1_0[i] & (1<= 0) { + la1tokens[jj_kind] = true; + jj_kind = -1; + } + for (int i = 0; i < 273; i++) { + if (jj_la1[i] == jj_gen) { + for (int j = 0; j < 32; j++) { + if ((jj_la1_0[i] & (1< jj_gen) { - jj_la = p.arg; jj_lastpos = jj_scanpos = p.first; - switch (i) { - case 0: jj_3_1(); break; - case 1: jj_3_2(); break; - case 2: jj_3_3(); break; - case 3: jj_3_4(); break; - case 4: jj_3_5(); break; - case 5: jj_3_6(); break; - case 6: jj_3_7(); break; - case 7: jj_3_8(); break; - case 8: jj_3_9(); break; - case 9: jj_3_10(); break; - case 10: jj_3_11(); break; - case 11: jj_3_12(); break; - case 12: jj_3_13(); break; - case 13: jj_3_14(); break; - case 14: jj_3_15(); break; - case 15: jj_3_16(); break; - case 16: jj_3_17(); break; - case 17: jj_3_18(); break; - case 18: jj_3_19(); break; - } - } - p = p.next; - } while (p != null); - } catch(LookaheadSuccess ls) { } - } - jj_rescan = false; - } - - final private void jj_save(int index, int xla) { - JJCalls p = jj_2_rtns[index]; - while (p.gen > jj_gen) { - if (p.next == null) { p = p.next = new JJCalls(); break; } - p = p.next; - } - p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla; + private void jj_rescan_token() { + jj_rescan = true; + for (int i = 0; i < 19; i++) { + try { + JJCalls p = jj_2_rtns[i]; + + do { + if (p.gen > jj_gen) { + jj_la = p.arg; jj_lastpos = jj_scanpos = p.first; + switch (i) { + case 0: jj_3_1(); break; + case 1: jj_3_2(); break; + case 2: jj_3_3(); break; + case 3: jj_3_4(); break; + case 4: jj_3_5(); break; + case 5: jj_3_6(); break; + case 6: jj_3_7(); break; + case 7: jj_3_8(); break; + case 8: jj_3_9(); break; + case 9: jj_3_10(); break; + case 10: jj_3_11(); break; + case 11: jj_3_12(); break; + case 12: jj_3_13(); break; + case 13: jj_3_14(); break; + case 14: jj_3_15(); break; + case 15: jj_3_16(); break; + case 16: jj_3_17(); break; + case 17: jj_3_18(); break; + case 18: jj_3_19(); break; + } + } + p = p.next; + } while (p != null); + + } catch(LookaheadSuccess ls) { } + } + jj_rescan = false; + } + + private void jj_save(int index, int xla) { + JJCalls p = jj_2_rtns[index]; + while (p.gen > jj_gen) { + if (p.next == null) { p = p.next = new JJCalls(); break; } + p = p.next; + } + + p.gen = jj_gen + xla - jj_la; + p.first = token; + p.arg = xla; } static final class JJCalls { - int gen; - Token first; - int arg; - JJCalls next; + int gen; + Token first; + int arg; + JJCalls next; } } diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCoreseConstants.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCoreseConstants.java index 481c07bcd..924231e04 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCoreseConstants.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCoreseConstants.java @@ -1,263 +1,522 @@ /* Generated By:JavaCC: Do not edit this line. SparqlCoreseConstants.java */ package fr.inria.corese.sparql.triple.javacc1; + +/** + * Token literal values and constants. + * Generated by org.javacc.parser.OtherFilesGen#start() + */ public interface SparqlCoreseConstants { + /** End of File. */ int EOF = 0; + /** RegularExpression Id. */ int WS = 6; + /** RegularExpression Id. */ int SINGLE_LINE_COMMENT = 7; + /** RegularExpression Id. */ int Q_IRIref = 8; + /** RegularExpression Id. */ int QNAME_NS = 9; + /** RegularExpression Id. */ int QNAME = 10; + /** RegularExpression Id. */ int BLANK_NODE_LABEL = 11; + /** RegularExpression Id. */ int VAR1 = 12; + /** RegularExpression Id. */ int VAR2 = 13; + /** RegularExpression Id. */ int ABASE = 14; + /** RegularExpression Id. */ int APREFIX = 15; + /** RegularExpression Id. */ int ATLIST = 16; + /** RegularExpression Id. */ int ATPATH = 17; + /** RegularExpression Id. */ int LANGTAG = 18; + /** RegularExpression Id. */ int A2Z = 19; + /** RegularExpression Id. */ int A2ZN = 20; + /** RegularExpression Id. */ int KW_A = 21; + /** RegularExpression Id. */ int BASE = 22; + /** RegularExpression Id. */ int PREFIX = 23; + /** RegularExpression Id. */ int SELECT = 24; + /** RegularExpression Id. */ int DESCRIBE = 25; + /** RegularExpression Id. */ int CONSTRUCT = 26; + /** RegularExpression Id. */ int RULE = 27; + /** RegularExpression Id. */ int ASK = 28; + /** RegularExpression Id. */ int TEMPLATE = 29; + /** RegularExpression Id. */ int DISTINCT = 30; + /** RegularExpression Id. */ int SEPARATOR = 31; + /** RegularExpression Id. */ int REDUCED = 32; + /** RegularExpression Id. */ int LIMIT = 33; + /** RegularExpression Id. */ int OFFSET = 34; + /** RegularExpression Id. */ int ORDER = 35; + /** RegularExpression Id. */ int BY = 36; + /** RegularExpression Id. */ int RELAX = 37; + /** RegularExpression Id. */ int ASC = 38; + /** RegularExpression Id. */ int DESC = 39; + /** RegularExpression Id. */ int NAMED = 40; + /** RegularExpression Id. */ int FROM = 41; + /** RegularExpression Id. */ int WHERE = 42; + /** RegularExpression Id. */ int GRAPH = 43; + /** RegularExpression Id. */ int SERVICE = 44; + /** RegularExpression Id. */ int OPTIONAL = 45; + /** RegularExpression Id. */ int UNION = 46; + /** RegularExpression Id. */ int MINUSP = 47; + /** RegularExpression Id. */ int NOT = 48; + /** RegularExpression Id. */ int SCOPE = 49; + /** RegularExpression Id. */ int TRY = 50; + /** RegularExpression Id. */ int CATCH = 51; + /** RegularExpression Id. */ int THROW = 52; + /** RegularExpression Id. */ int STOP = 53; + /** RegularExpression Id. */ int LET = 54; + /** RegularExpression Id. */ int LETDYN = 55; + /** RegularExpression Id. */ int SET = 56; + /** RegularExpression Id. */ int STATIC = 57; + /** RegularExpression Id. */ int FOR = 58; + /** RegularExpression Id. */ int LOOP = 59; + /** RegularExpression Id. */ int IF = 60; + /** RegularExpression Id. */ int THEN = 61; + /** RegularExpression Id. */ int ELSE = 62; + /** RegularExpression Id. */ int EXIST = 63; + /** RegularExpression Id. */ int FILTER = 64; + /** RegularExpression Id. */ int LOAD = 65; + /** RegularExpression Id. */ int CLEAR = 66; + /** RegularExpression Id. */ int DROP = 67; + /** RegularExpression Id. */ int CREATE = 68; + /** RegularExpression Id. */ int ADD = 69; + /** RegularExpression Id. */ int TO = 70; + /** RegularExpression Id. */ int MOVE = 71; + /** RegularExpression Id. */ int COPY = 72; + /** RegularExpression Id. */ int INSERT = 73; + /** RegularExpression Id. */ int DELETE = 74; + /** RegularExpression Id. */ int WITH = 75; + /** RegularExpression Id. */ int USING = 76; + /** RegularExpression Id. */ int DEFAUT = 77; + /** RegularExpression Id. */ int ALL = 78; + /** RegularExpression Id. */ int INTO = 79; + /** RegularExpression Id. */ int SILENT = 80; + /** RegularExpression Id. */ int DATA = 81; + /** RegularExpression Id. */ int ERROR = 82; + /** RegularExpression Id. */ int RETURN = 83; + /** RegularExpression Id. */ int AGGREGATE = 84; + /** RegularExpression Id. */ int UNNEST = 85; + /** RegularExpression Id. */ int MAP = 86; + /** RegularExpression Id. */ int MAPLIST = 87; + /** RegularExpression Id. */ int MAPFIND = 88; + /** RegularExpression Id. */ int MAPFINDLIST = 89; + /** RegularExpression Id. */ int MAPMERGE = 90; + /** RegularExpression Id. */ int MAPEVERY = 91; + /** RegularExpression Id. */ int MAPANY = 92; + /** RegularExpression Id. */ int FUNCALL = 93; + /** RegularExpression Id. */ int EVAL = 94; + /** RegularExpression Id. */ int METHOD = 95; + /** RegularExpression Id. */ int APPLY = 96; + /** RegularExpression Id. */ int REDUCE = 97; + /** RegularExpression Id. */ int SELF = 98; + /** RegularExpression Id. */ int BOUND = 99; + /** RegularExpression Id. */ int COALESCE = 100; + /** RegularExpression Id. */ int SAMETERM = 101; + /** RegularExpression Id. */ int STR = 102; + /** RegularExpression Id. */ int STRDT = 103; + /** RegularExpression Id. */ int STRLANG = 104; + /** RegularExpression Id. */ int BNODE = 105; + /** RegularExpression Id. */ int URI = 106; + /** RegularExpression Id. */ int IRI = 107; + /** RegularExpression Id. */ int UUID = 108; + /** RegularExpression Id. */ int STRUUID = 109; + /** RegularExpression Id. */ int DTYPE = 110; + /** RegularExpression Id. */ int LANG = 111; + /** RegularExpression Id. */ int LANGMATCHES = 112; + /** RegularExpression Id. */ int CONTAINS = 113; + /** RegularExpression Id. */ int STRSTARTS = 114; + /** RegularExpression Id. */ int STRENDS = 115; + /** RegularExpression Id. */ int STRLEN = 116; + /** RegularExpression Id. */ int SUBSTR = 117; + /** RegularExpression Id. */ int UCASE = 118; + /** RegularExpression Id. */ int LCASE = 119; + /** RegularExpression Id. */ int STRBEFORE = 120; + /** RegularExpression Id. */ int STRAFTER = 121; + /** RegularExpression Id. */ int ENCODE_FOR_URI = 122; + /** RegularExpression Id. */ int CONCAT = 123; + /** RegularExpression Id. */ int REPLACE = 124; + /** RegularExpression Id. */ int IS_URI = 125; + /** RegularExpression Id. */ int IS_IRI = 126; + /** RegularExpression Id. */ int IS_BLANK = 127; + /** RegularExpression Id. */ int IS_LITERAL = 128; + /** RegularExpression Id. */ int IS_NUMERIC = 129; + /** RegularExpression Id. */ int IS_EXTENSION = 130; + /** RegularExpression Id. */ int IS_SAFE = 131; + /** RegularExpression Id. */ int IS_UNDEFINED = 132; + /** RegularExpression Id. */ int IS_WELLFORMED = 133; + /** RegularExpression Id. */ int REGEX = 134; + /** RegularExpression Id. */ int RAND = 135; + /** RegularExpression Id. */ int ROUND = 136; + /** RegularExpression Id. */ int FLOOR = 137; + /** RegularExpression Id. */ int CEIL = 138; + /** RegularExpression Id. */ int POWER = 139; + /** RegularExpression Id. */ int ABS = 140; + /** RegularExpression Id. */ int NOW = 141; + /** RegularExpression Id. */ int YEAR = 142; + /** RegularExpression Id. */ int MONTH = 143; + /** RegularExpression Id. */ int DAY = 144; + /** RegularExpression Id. */ int HOURS = 145; + /** RegularExpression Id. */ int MINUTES = 146; + /** RegularExpression Id. */ int SECONDS = 147; + /** RegularExpression Id. */ int TIMEZONE = 148; + /** RegularExpression Id. */ int TZ = 149; + /** RegularExpression Id. */ int MD5 = 150; + /** RegularExpression Id. */ int SHA1 = 151; + /** RegularExpression Id. */ int SHA256 = 152; + /** RegularExpression Id. */ int SHA384 = 153; + /** RegularExpression Id. */ int SHA512 = 154; + /** RegularExpression Id. */ int GROUP_CONCAT = 155; + /** RegularExpression Id. */ int SUM = 156; + /** RegularExpression Id. */ int SAMPLE = 157; + /** RegularExpression Id. */ int AVG = 158; + /** RegularExpression Id. */ int MIN = 159; + /** RegularExpression Id. */ int MAX = 160; + /** RegularExpression Id. */ int TRUE = 161; + /** RegularExpression Id. */ int FALSE = 162; + /** RegularExpression Id. */ int S_MORE = 163; + /** RegularExpression Id. */ int PRAGMA = 164; + /** RegularExpression Id. */ int GROUP = 165; + /** RegularExpression Id. */ int FORMAT = 166; + /** RegularExpression Id. */ int BOX = 167; + /** RegularExpression Id. */ int IBOX = 168; + /** RegularExpression Id. */ int SBOX = 169; + /** RegularExpression Id. */ int HAVING = 170; + /** RegularExpression Id. */ int VALUES = 171; + /** RegularExpression Id. */ int BIND = 172; + /** RegularExpression Id. */ int UNDEF = 173; + /** RegularExpression Id. */ int COUNT = 174; + /** RegularExpression Id. */ int SCORE = 175; + /** RegularExpression Id. */ int AS = 176; + /** RegularExpression Id. */ int SORTED = 177; + /** RegularExpression Id. */ int ALL2 = 178; + /** RegularExpression Id. */ int CURRENT = 179; + /** RegularExpression Id. */ int DIRECT2 = 180; + /** RegularExpression Id. */ int COLON2 = 181; + /** RegularExpression Id. */ int TUPLE = 182; + /** RegularExpression Id. */ int TRIPLE = 183; + /** RegularExpression Id. */ int SUBJECT = 184; + /** RegularExpression Id. */ int PREDICATE = 185; + /** RegularExpression Id. */ int OBJECT = 186; + /** RegularExpression Id. */ int IS_TRIPLE = 187; + /** RegularExpression Id. */ int SPARQL_COMPARE = 188; + /** RegularExpression Id. */ int XPATH = 189; + /** RegularExpression Id. */ int FUNCTION = 190; + /** RegularExpression Id. */ int LAMBDA = 191; + /** RegularExpression Id. */ int QUERY = 192; + /** RegularExpression Id. */ int PACKAGE = 193; + /** RegularExpression Id. */ int IN = 194; + /** RegularExpression Id. */ int EQ2 = 195; + /** RegularExpression Id. */ int NE2 = 196; + /** RegularExpression Id. */ int BEGIN_WITH = 197; + /** RegularExpression Id. */ int SPEC = 198; + /** RegularExpression Id. */ int SAME = 199; + /** RegularExpression Id. */ int GENERALISATION = 200; + /** RegularExpression Id. */ int STRICT_GENERALISATION = 201; + /** RegularExpression Id. */ int EQ_LANG = 202; + /** RegularExpression Id. */ int INTEGER = 203; + /** RegularExpression Id. */ int DECIMAL = 204; + /** RegularExpression Id. */ int DOUBLE = 205; + /** RegularExpression Id. */ int EXPONENT = 206; + /** RegularExpression Id. */ int QUOTE_3D = 207; + /** RegularExpression Id. */ int QUOTE_3S = 208; + /** RegularExpression Id. */ int ECHAR = 209; + /** RegularExpression Id. */ int STRING_LITERAL1 = 210; + /** RegularExpression Id. */ int STRING_LITERAL2 = 211; + /** RegularExpression Id. */ int STRING_LITERAL_LONG1 = 212; + /** RegularExpression Id. */ int STRING_LITERAL_LONG2 = 213; + /** RegularExpression Id. */ int DIGITS = 214; + /** RegularExpression Id. */ int HEX = 215; + /** RegularExpression Id. */ int LPAREN = 216; + /** RegularExpression Id. */ int RPAREN = 217; + /** RegularExpression Id. */ int LBRACE = 218; + /** RegularExpression Id. */ int RBRACE = 219; + /** RegularExpression Id. */ int LBRACKET = 220; + /** RegularExpression Id. */ int RBRACKET = 221; + /** RegularExpression Id. */ int ANON = 222; + /** RegularExpression Id. */ int SEMICOLON = 223; + /** RegularExpression Id. */ int COMMA = 224; + /** RegularExpression Id. */ int DOT = 225; + /** RegularExpression Id. */ int EQ = 226; + /** RegularExpression Id. */ int NE = 227; + /** RegularExpression Id. */ int LTLT = 228; + /** RegularExpression Id. */ int GTGT = 229; + /** RegularExpression Id. */ int LT = 230; + /** RegularExpression Id. */ int GT = 231; + /** RegularExpression Id. */ int LE = 232; + /** RegularExpression Id. */ int GE = 233; + /** RegularExpression Id. */ int BANG = 234; + /** RegularExpression Id. */ int TILDE = 235; + /** RegularExpression Id. */ int COLON = 236; + /** RegularExpression Id. */ int SC_OR = 237; + /** RegularExpression Id. */ int SC_AND = 238; + /** RegularExpression Id. */ int PLUS = 239; + /** RegularExpression Id. */ int MINUS = 240; + /** RegularExpression Id. */ int STAR = 241; + /** RegularExpression Id. */ int SLASH = 242; + /** RegularExpression Id. */ int QM = 243; + /** RegularExpression Id. */ int BAR = 244; + /** RegularExpression Id. */ int DATATYPE = 245; + /** RegularExpression Id. */ int AT = 246; + /** RegularExpression Id. */ int VAR3 = 247; + /** RegularExpression Id. */ int NCCHAR1p = 248; + /** RegularExpression Id. */ int NCCHAR1 = 249; + /** RegularExpression Id. */ int NCCHAR = 250; + /** RegularExpression Id. */ int NCNAME_PREFIX = 251; + /** RegularExpression Id. */ int NCNAME = 252; + /** RegularExpression Id. */ int BLANKNAME = 253; + /** RegularExpression Id. */ int VARNAME = 254; + /** RegularExpression Id. */ int LOCAL_ESC = 255; + /** RegularExpression Id. */ int PERCENT = 256; + /** Lexical state. */ int DEFAULT = 0; + /** Literal token values. */ String[] tokenImage = { "", "\" \"", diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCoreseTokenManager.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCoreseTokenManager.java index aa62c7af1..b73ae089d 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCoreseTokenManager.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/SparqlCoreseTokenManager.java @@ -1,3 +1,4 @@ +/* SparqlCoreseTokenManager.java */ /* Generated By:JavaCC: Do not edit this line. SparqlCoreseTokenManager.java */ package fr.inria.corese.sparql.triple.javacc1; import org.slf4j.Logger; @@ -13,21 +14,24 @@ import fr.inria.corese.sparql.api.IDatatype; import fr.inria.corese.sparql.datatype.DatatypeMap; -public class SparqlCoreseTokenManager implements SparqlCoreseConstants -{ +/** Token Manager. */ +@SuppressWarnings ("unused") +public class SparqlCoreseTokenManager implements SparqlCoreseConstants { void CommonTokenAction(Token token) { //System.out.println(token+" "); } + + /** Debug output. */ public java.io.PrintStream debugStream = System.out; + /** Set debug output. */ public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; } -private final int jjStopAtPos(int pos, int kind) +private int jjStopAtPos(int pos, int kind) { jjmatchedKind = kind; jjmatchedPos = pos; return pos + 1; } -private final int jjMoveStringLiteralDfa0_0() -{ +private int jjMoveStringLiteralDfa0_0(){ switch(curChar) { case 9: @@ -213,8 +217,7 @@ private final int jjMoveStringLiteralDfa0_0() return jjMoveNfa_0(0, 0); } } -private final int jjMoveStringLiteralDfa1_0(long active0, long active1, long active2, long active3) -{ +private int jjMoveStringLiteralDfa1_0(long active0, long active1, long active2, long active3){ try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { return jjMoveNfa_0(0, 0); @@ -449,8 +452,7 @@ else if ((active3 & 0x20000000000L) != 0L) } return jjMoveNfa_0(0, 1); } -private final int jjMoveStringLiteralDfa2_0(long old0, long active0, long old1, long active1, long old2, long active2, long old3, long active3) -{ +private int jjMoveStringLiteralDfa2_0(long old0, long active0, long old1, long active1, long old2, long active2, long old3, long active3){ if (((active0 &= old0) | (active1 &= old1) | (active2 &= old2) | (active3 &= old3)) == 0L) return jjMoveNfa_0(0, 1); try { curChar = input_stream.readChar(); } @@ -792,8 +794,7 @@ else if ((active2 & 0x10000L) != 0L) } return jjMoveNfa_0(0, 2); } -private final int jjMoveStringLiteralDfa3_0(long old0, long active0, long old1, long active1, long old2, long active2, long old3, long active3) -{ +private int jjMoveStringLiteralDfa3_0(long old0, long active0, long old1, long active1, long old2, long active2, long old3, long active3){ if (((active0 &= old0) | (active1 &= old1) | (active2 &= old2) | (active3 &= old3)) == 0L) return jjMoveNfa_0(0, 2); try { curChar = input_stream.readChar(); } @@ -1194,8 +1195,7 @@ else if ((active2 & 0x20000000000L) != 0L) } return jjMoveNfa_0(0, 3); } -private final int jjMoveStringLiteralDfa4_0(long old0, long active0, long old1, long active1, long old2, long active2, long old3, long active3) -{ +private int jjMoveStringLiteralDfa4_0(long old0, long active0, long old1, long active1, long old2, long active2, long old3, long active3){ if (((active0 &= old0) | (active1 &= old1) | (active2 &= old2) | (active3 &= old3)) == 0L) return jjMoveNfa_0(0, 3); try { curChar = input_stream.readChar(); } @@ -1689,8 +1689,7 @@ else if ((active3 & 0x1L) != 0L) } return jjMoveNfa_0(0, 4); } -private final int jjMoveStringLiteralDfa5_0(long old0, long active0, long old1, long active1, long old2, long active2, long old3, long active3) -{ +private int jjMoveStringLiteralDfa5_0(long old0, long active0, long old1, long active1, long old2, long active2, long old3, long active3){ if (((active0 &= old0) | (active1 &= old1) | (active2 &= old2) | (active3 &= old3)) == 0L) return jjMoveNfa_0(0, 4); try { curChar = input_stream.readChar(); } @@ -2071,8 +2070,7 @@ else if ((active2 & 0x400000000000000L) != 0L) } return jjMoveNfa_0(0, 5); } -private final int jjMoveStringLiteralDfa6_0(long old0, long active0, long old1, long active1, long old2, long active2, long old3, long active3) -{ +private int jjMoveStringLiteralDfa6_0(long old0, long active0, long old1, long active1, long old2, long active2, long old3, long active3){ if (((active0 &= old0) | (active1 &= old1) | (active2 &= old2) | (active3 &= old3)) == 0L) return jjMoveNfa_0(0, 5); try { curChar = input_stream.readChar(); } @@ -2313,8 +2311,7 @@ else if ((active2 & 0x100000000000000L) != 0L) } return jjMoveNfa_0(0, 6); } -private final int jjMoveStringLiteralDfa7_0(long old0, long active0, long old1, long active1, long old2, long active2, long old3, long active3) -{ +private int jjMoveStringLiteralDfa7_0(long old0, long active0, long old1, long active1, long old2, long active2, long old3, long active3){ if (((active0 &= old0) | (active1 &= old1) | (active2 &= old2) | (active3 &= old3)) == 0L) return jjMoveNfa_0(0, 6); try { curChar = input_stream.readChar(); } @@ -2527,8 +2524,7 @@ else if ((active2 & 0x800000000000000L) != 0L) } return jjMoveNfa_0(0, 7); } -private final int jjMoveStringLiteralDfa8_0(long old0, long active0, long old1, long active1, long old2, long active2) -{ +private int jjMoveStringLiteralDfa8_0(long old0, long active0, long old1, long active1, long old2, long active2){ if (((active0 &= old0) | (active1 &= old1) | (active2 &= old2)) == 0L) return jjMoveNfa_0(0, 7); try { curChar = input_stream.readChar(); } @@ -2669,8 +2665,7 @@ else if ((active2 & 0x200000000000000L) != 0L) } return jjMoveNfa_0(0, 8); } -private final int jjMoveStringLiteralDfa9_0(long old0, long active0, long old1, long active1, long old2, long active2) -{ +private int jjMoveStringLiteralDfa9_0(long old0, long active0, long old1, long active1, long old2, long active2){ if (((active0 &= old0) | (active1 &= old1) | (active2 &= old2)) == 0L) return jjMoveNfa_0(0, 8); try { curChar = input_stream.readChar(); } @@ -2708,8 +2703,7 @@ private final int jjMoveStringLiteralDfa9_0(long old0, long active0, long old1, } return jjMoveNfa_0(0, 9); } -private final int jjMoveStringLiteralDfa10_0(long old1, long active1, long old2, long active2) -{ +private int jjMoveStringLiteralDfa10_0(long old1, long active1, long old2, long active2){ if (((active1 &= old1) | (active2 &= old2)) == 0L) return jjMoveNfa_0(0, 9); try { curChar = input_stream.readChar(); } @@ -2793,8 +2787,7 @@ private final int jjMoveStringLiteralDfa10_0(long old1, long active1, long old2, } return jjMoveNfa_0(0, 10); } -private final int jjMoveStringLiteralDfa11_0(long old1, long active1, long old2, long active2) -{ +private int jjMoveStringLiteralDfa11_0(long old1, long active1, long old2, long active2){ if (((active1 &= old1) | (active2 &= old2)) == 0L) return jjMoveNfa_0(0, 10); try { curChar = input_stream.readChar(); } @@ -2844,8 +2837,7 @@ private final int jjMoveStringLiteralDfa11_0(long old1, long active1, long old2, } return jjMoveNfa_0(0, 11); } -private final int jjMoveStringLiteralDfa12_0(long old1, long active1, long old2, long active2) -{ +private int jjMoveStringLiteralDfa12_0(long old1, long active1, long old2, long active2){ if (((active1 &= old1) | (active2 &= old2)) == 0L) return jjMoveNfa_0(0, 11); try { curChar = input_stream.readChar(); } @@ -2863,8 +2855,7 @@ private final int jjMoveStringLiteralDfa12_0(long old1, long active1, long old2, } return jjMoveNfa_0(0, 12); } -private final int jjMoveStringLiteralDfa13_0(long old1, long active1, long old2, long active2) -{ +private int jjMoveStringLiteralDfa13_0(long old1, long active1, long old2, long active2){ if (((active1 &= old1) | (active2 &= old2)) == 0L) return jjMoveNfa_0(0, 12); try { curChar = input_stream.readChar(); } @@ -2906,36 +2897,6 @@ private final int jjMoveStringLiteralDfa13_0(long old1, long active1, long old2, } return jjMoveNfa_0(0, 13); } -private final void jjCheckNAdd(int state) -{ - if (jjrounds[state] != jjround) - { - jjstateSet[jjnewStateCnt++] = state; - jjrounds[state] = jjround; - } -} -private final void jjAddStates(int start, int end) -{ - do { - jjstateSet[jjnewStateCnt++] = jjnextStates[start]; - } while (start++ != end); -} -private final void jjCheckNAddTwoStates(int state1, int state2) -{ - jjCheckNAdd(state1); - jjCheckNAdd(state2); -} -private final void jjCheckNAddStates(int start, int end) -{ - do { - jjCheckNAdd(jjnextStates[start]); - } while (start++ != end); -} -private final void jjCheckNAddStates(int start) -{ - jjCheckNAdd(jjnextStates[start]); - jjCheckNAdd(jjnextStates[start + 1]); -} static final long[] jjbitVec0 = { 0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL }; @@ -2975,7 +2936,7 @@ private final void jjCheckNAddStates(int start) static final long[] jjbitVec13 = { 0x8000000000003000L, 0xffff000000000001L, 0xffffffffffffffffL, 0xffffffffffffffffL }; -private final int jjMoveNfa_0(int startState, int curPos) +private int jjMoveNfa_0(int startState, int curPos) { int strKind = jjmatchedKind; int strPos = jjmatchedPos; @@ -2984,12 +2945,11 @@ private final int jjMoveNfa_0(int startState, int curPos) try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { throw new Error("Internal Error"); } curPos = 0; - int[] nextStates; int startsAt = 0; - jjnewStateCnt = 115; + jjnewStateCnt = 180; int i = 1; jjstateSet[0] = startState; - int j, kind = 0x7fffffff; + int kind = 0x7fffffff; for (;;) { if (++jjround == 0x7fffffff) @@ -2997,7 +2957,7 @@ private final int jjMoveNfa_0(int startState, int curPos) if (curChar < 64) { long l = 1L << curChar; - MatchLoop: do + do { switch(jjstateSet[--i]) { @@ -3006,31 +2966,31 @@ private final int jjMoveNfa_0(int startState, int curPos) { if (kind > 203) kind = 203; - jjCheckNAddStates(0, 7); + { jjCheckNAddStates(0, 7); } } else if (curChar == 46) - jjCheckNAddTwoStates(114, 96); + { jjCheckNAddTwoStates(179, 161); } else if (curChar == 45) - jjCheckNAddStates(8, 11); + { jjCheckNAddStates(8, 11); } else if (curChar == 58) { if (kind > 9) kind = 9; - jjCheckNAddTwoStates(77, 78); + { jjCheckNAddTwoStates(142, 143); } } else if (curChar == 34) - jjstateSet[jjnewStateCnt++] = 62; + jjstateSet[jjnewStateCnt++] = 127; else if (curChar == 39) - jjstateSet[jjnewStateCnt++] = 51; + jjstateSet[jjnewStateCnt++] = 103; else if (curChar == 36) jjstateSet[jjnewStateCnt++] = 24; else if (curChar == 60) - jjCheckNAddTwoStates(6, 7); + { jjCheckNAddTwoStates(6, 7); } else if (curChar == 35) { if (kind > 7) kind = 7; - jjCheckNAddStates(12, 14); + { jjCheckNAddStates(12, 14); } } else if (curChar == 63) jjstateSet[jjnewStateCnt++] = 21; @@ -3038,12 +2998,12 @@ else if (curChar == 63) { if (kind > 247) kind = 247; - jjCheckNAdd(69); + { jjCheckNAdd(134); } } else if (curChar == 34) - jjCheckNAddStates(15, 17); + { jjCheckNAddStates(15, 17); } else if (curChar == 39) - jjCheckNAddStates(18, 20); + { jjCheckNAddStates(18, 20); } if ((0x3ff000000000000L & l) != 0L) { if (kind > 215) @@ -3055,7 +3015,7 @@ else if (curChar == 39) break; if (kind > 7) kind = 7; - jjCheckNAddStates(12, 14); + { jjCheckNAddStates(12, 14); } break; case 2: if ((0x2400L & l) != 0L && kind > 7) @@ -3071,11 +3031,11 @@ else if (curChar == 39) break; case 5: if (curChar == 60) - jjCheckNAddTwoStates(6, 7); + { jjCheckNAddTwoStates(6, 7); } break; case 6: if ((0xaffffffe00000000L & l) != 0L) - jjCheckNAddTwoStates(6, 7); + { jjCheckNAddTwoStates(6, 7); } break; case 7: if (curChar == 62 && kind > 8) @@ -3083,30 +3043,30 @@ else if (curChar == 39) break; case 8: if (curChar == 58) - jjAddStates(21, 22); + { jjAddStates(21, 22); } break; case 9: if ((0x3ff000000000000L & l) == 0L) break; if (kind > 11) kind = 11; - jjCheckNAddTwoStates(10, 11); + { jjCheckNAddTwoStates(10, 11); } break; case 10: if (curChar == 46) - jjCheckNAddTwoStates(10, 11); + { jjCheckNAddTwoStates(10, 11); } break; case 12: if ((0x3ff600000000000L & l) != 0L) - jjAddStates(23, 24); + { jjAddStates(23, 24); } break; case 13: if ((0x3ff200000000000L & l) != 0L) - jjCheckNAddTwoStates(10, 11); + { jjCheckNAddTwoStates(10, 11); } break; case 14: if ((0x3ff600000000000L & l) != 0L) - jjAddStates(25, 26); + { jjAddStates(25, 26); } break; case 15: if ((0x3ff200000000000L & l) != 0L && kind > 11) @@ -3114,14 +3074,14 @@ else if (curChar == 39) break; case 17: if ((0x3ff600000000000L & l) != 0L) - jjAddStates(27, 28); + { jjAddStates(27, 28); } break; case 18: if ((0x3ff200000000000L & l) == 0L) break; if (kind > 11) kind = 11; - jjCheckNAddTwoStates(10, 11); + { jjCheckNAddTwoStates(10, 11); } break; case 20: if (curChar == 63) @@ -3133,7 +3093,7 @@ else if (curChar == 39) break; if (kind > 12) kind = 12; - jjCheckNAdd(22); + { jjCheckNAdd(22); } break; case 23: if (curChar == 36) @@ -3145,336 +3105,541 @@ else if (curChar == 39) break; if (kind > 13) kind = 13; - jjCheckNAdd(25); + { jjCheckNAdd(25); } break; case 28: if (curChar == 45) - jjCheckNAdd(29); + { jjCheckNAdd(29); } break; case 29: if ((0x3ff000000000000L & l) == 0L) break; if (kind > 18) kind = 18; - jjCheckNAddTwoStates(28, 29); + { jjCheckNAddTwoStates(28, 29); } break; case 31: if ((0x8400000000L & l) != 0L && kind > 209) kind = 209; break; - case 32: - if (curChar == 39) - jjCheckNAddStates(18, 20); - break; case 33: - if ((0xffffff7fffffdbffL & l) != 0L) - jjCheckNAddStates(18, 20); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 34; + break; + case 34: + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 35; break; case 35: - if ((0x8400000000L & l) != 0L) - jjCheckNAddStates(18, 20); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 36; break; case 36: - if (curChar == 39 && kind > 210) - kind = 210; + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 37; break; case 37: - if (curChar == 34) - jjCheckNAddStates(15, 17); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 38; break; case 38: - if ((0xfffffffbffffdbffL & l) != 0L) - jjCheckNAddStates(15, 17); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 39; break; - case 40: - if ((0x8400000000L & l) != 0L) - jjCheckNAddStates(15, 17); + case 39: + case 44: + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAdd(40); } break; - case 41: - if (curChar == 34 && kind > 211) - kind = 211; + case 40: + if ((0x3ff000000000000L & l) != 0L && kind > 209) + kind = 209; break; case 42: - if (curChar == 39) - jjCheckNAddStates(29, 32); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 43; break; case 43: - case 48: - if (curChar == 39) - jjCheckNAddTwoStates(44, 45); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 44; break; - case 44: - if ((0xffffff7fffffffffL & l) != 0L) - jjCheckNAddStates(29, 32); + case 45: + if (curChar == 39) + { jjCheckNAddStates(18, 20); } break; case 46: - if ((0x8400000000L & l) != 0L) - jjCheckNAddStates(29, 32); + if ((0xffffff7fffffdbffL & l) != 0L) + { jjCheckNAddStates(18, 20); } break; - case 47: - if (curChar == 39) - jjAddStates(33, 34); + case 48: + if ((0x8400000000L & l) != 0L) + { jjCheckNAddStates(18, 20); } break; case 49: - if (curChar == 39 && kind > 212) - kind = 212; - break; - case 50: - if (curChar == 39) - jjstateSet[jjnewStateCnt++] = 49; + if (curChar == 39 && kind > 210) + kind = 210; break; case 51: - if (curChar == 39) - jjstateSet[jjnewStateCnt++] = 42; + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 52; break; case 52: - if (curChar == 39) - jjstateSet[jjnewStateCnt++] = 51; + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 53; break; case 53: - if (curChar == 34) - jjCheckNAddStates(35, 38); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 54; break; case 54: - case 59: - if (curChar == 34) - jjCheckNAddTwoStates(55, 56); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 55; break; case 55: - if ((0xfffffffbffffffffL & l) != 0L) - jjCheckNAddStates(35, 38); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 56; + break; + case 56: + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 57; break; case 57: - if ((0x8400000000L & l) != 0L) - jjCheckNAddStates(35, 38); + case 62: + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAdd(58); } break; case 58: - if (curChar == 34) - jjAddStates(39, 40); + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAddStates(18, 20); } break; case 60: - if (curChar == 34 && kind > 213) - kind = 213; + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 61; break; case 61: - if (curChar == 34) - jjstateSet[jjnewStateCnt++] = 60; - break; - case 62: - if (curChar == 34) - jjstateSet[jjnewStateCnt++] = 53; + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 62; break; case 63: if (curChar == 34) - jjstateSet[jjnewStateCnt++] = 62; + { jjCheckNAddStates(15, 17); } break; case 64: - if ((0x3ff000000000000L & l) != 0L && kind > 215) - kind = 215; + if ((0xfffffffbffffdbffL & l) != 0L) + { jjCheckNAddStates(15, 17); } break; case 66: - if ((0x100003600L & l) != 0L) - jjAddStates(41, 42); + if ((0x8400000000L & l) != 0L) + { jjCheckNAddStates(15, 17); } + break; + case 67: + if (curChar == 34 && kind > 211) + kind = 211; break; - case 68: case 69: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 247) - kind = 247; - jjCheckNAdd(69); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 70; + break; + case 70: + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 71; break; case 71: - if ((0x3ff600000000000L & l) != 0L) - jjAddStates(43, 44); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 72; break; case 72: - if ((0x3ff200000000000L & l) != 0L) + if ((0x3ff000000000000L & l) != 0L) jjstateSet[jjnewStateCnt++] = 73; break; case 73: - if (curChar == 58 && kind > 9) - kind = 9; + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 74; break; case 74: - if ((0x3ff600000000000L & l) != 0L) - jjAddStates(45, 46); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 75; break; case 75: - if ((0x3ff200000000000L & l) != 0L) - jjstateSet[jjnewStateCnt++] = 76; + case 80: + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAdd(76); } break; case 76: - if (curChar != 58) - break; - if (kind > 10) - kind = 10; - jjCheckNAddTwoStates(77, 78); + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAddStates(15, 17); } break; - case 77: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 10) - kind = 10; - jjCheckNAddTwoStates(77, 78); + case 78: + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 79; break; case 79: - if ((0x7ff600000000000L & l) != 0L) - jjCheckNAddStates(47, 50); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 80; break; - case 80: - if ((0x7ff200000000000L & l) != 0L && kind > 10) - kind = 10; + case 81: + if (curChar == 39) + { jjCheckNAddStates(29, 32); } break; case 82: - if ((0xa800ff7e00000000L & l) != 0L) - jjCheckNAddStates(47, 50); + case 87: + if (curChar == 39) + { jjCheckNAddTwoStates(83, 84); } break; case 83: - if (curChar == 37) - jjAddStates(51, 52); - break; - case 84: - if ((0x3ff000000000000L & l) != 0L) - jjstateSet[jjnewStateCnt++] = 85; + if ((0xffffff7fffffffffL & l) != 0L) + { jjCheckNAddStates(29, 32); } break; case 85: - if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddStates(47, 50); + if ((0x8400000000L & l) != 0L) + { jjCheckNAddStates(29, 32); } break; case 86: - if ((0x3ff000000000000L & l) != 0L) - jjstateSet[jjnewStateCnt++] = 87; - break; - case 87: - if ((0x3ff000000000000L & l) != 0L && kind > 10) - kind = 10; + if (curChar == 39) + { jjAddStates(33, 34); } break; case 88: - if ((0xa800ff7e00000000L & l) != 0L && kind > 10) - kind = 10; + if (curChar == 39 && kind > 212) + kind = 212; break; case 89: - if (curChar != 58) - break; - if (kind > 9) - kind = 9; - jjCheckNAddTwoStates(77, 78); - break; - case 90: - if (curChar == 45) - jjCheckNAddStates(8, 11); + if (curChar == 39) + jjstateSet[jjnewStateCnt++] = 88; break; case 91: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 203) - kind = 203; - jjCheckNAdd(91); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 92; break; case 92: if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddTwoStates(92, 93); + jjstateSet[jjnewStateCnt++] = 93; break; case 93: - if (curChar != 46) - break; - if (kind > 204) - kind = 204; - jjCheckNAdd(94); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 94; break; case 94: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 204) - kind = 204; - jjCheckNAdd(94); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 95; break; case 95: - if (curChar == 46) - jjCheckNAdd(96); + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 96; break; case 96: if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddTwoStates(96, 97); + jjstateSet[jjnewStateCnt++] = 97; break; - case 98: - if ((0x280000000000L & l) != 0L) - jjCheckNAdd(99); + case 97: + case 102: + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAdd(98); } break; - case 99: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 205) - kind = 205; - jjCheckNAdd(99); + case 98: + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAddStates(29, 32); } break; case 100: if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddStates(53, 56); + jjstateSet[jjnewStateCnt++] = 101; break; case 101: if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddTwoStates(101, 102); - break; - case 102: - if (curChar == 46) - jjCheckNAddTwoStates(103, 104); + jjstateSet[jjnewStateCnt++] = 102; break; case 103: - if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddTwoStates(103, 104); + if (curChar == 39) + jjstateSet[jjnewStateCnt++] = 81; + break; + case 104: + if (curChar == 39) + jjstateSet[jjnewStateCnt++] = 103; break; case 105: - if ((0x280000000000L & l) != 0L) - jjCheckNAdd(106); + if (curChar == 34) + { jjCheckNAddStates(35, 38); } break; case 106: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 205) - kind = 205; - jjCheckNAdd(106); + case 111: + if (curChar == 34) + { jjCheckNAddTwoStates(107, 108); } break; case 107: - if ((0x3ff000000000000L & l) != 0L) - jjCheckNAddTwoStates(107, 108); + if ((0xfffffffbffffffffL & l) != 0L) + { jjCheckNAddStates(35, 38); } break; case 109: - if ((0x280000000000L & l) != 0L) - jjCheckNAdd(110); + if ((0x8400000000L & l) != 0L) + { jjCheckNAddStates(35, 38); } break; case 110: - if ((0x3ff000000000000L & l) == 0L) - break; - if (kind > 205) - kind = 205; - jjCheckNAdd(110); + if (curChar == 34) + { jjAddStates(39, 40); } break; - case 111: + case 112: + if (curChar == 34 && kind > 213) + kind = 213; + break; + case 113: + if (curChar == 34) + jjstateSet[jjnewStateCnt++] = 112; + break; + case 115: + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 116; + break; + case 116: + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 117; + break; + case 117: + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 118; + break; + case 118: + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 119; + break; + case 119: + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 120; + break; + case 120: + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 121; + break; + case 121: + case 126: + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAdd(122); } + break; + case 122: + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAddStates(35, 38); } + break; + case 124: + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 125; + break; + case 125: + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 126; + break; + case 127: + if (curChar == 34) + jjstateSet[jjnewStateCnt++] = 105; + break; + case 128: + if (curChar == 34) + jjstateSet[jjnewStateCnt++] = 127; + break; + case 129: + if ((0x3ff000000000000L & l) != 0L && kind > 215) + kind = 215; + break; + case 131: + if ((0x100003600L & l) != 0L) + { jjAddStates(41, 42); } + break; + case 133: + case 134: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 247) + kind = 247; + { jjCheckNAdd(134); } + break; + case 136: + if ((0x3ff600000000000L & l) != 0L) + { jjAddStates(43, 44); } + break; + case 137: + if ((0x3ff200000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 138; + break; + case 138: + if (curChar == 58 && kind > 9) + kind = 9; + break; + case 139: + if ((0x3ff600000000000L & l) != 0L) + { jjAddStates(45, 46); } + break; + case 140: + if ((0x3ff200000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 141; + break; + case 141: + if (curChar != 58) + break; + if (kind > 10) + kind = 10; + { jjCheckNAddTwoStates(142, 143); } + break; + case 142: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 10) + kind = 10; + { jjCheckNAddTwoStates(142, 143); } + break; + case 144: + if ((0x7ff600000000000L & l) != 0L) + { jjCheckNAddStates(47, 50); } + break; + case 145: + if ((0x7ff200000000000L & l) != 0L && kind > 10) + kind = 10; + break; + case 147: + if ((0xa800ff7e00000000L & l) != 0L) + { jjCheckNAddStates(47, 50); } + break; + case 148: + if (curChar == 37) + { jjAddStates(51, 52); } + break; + case 149: + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 150; + break; + case 150: + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAddStates(47, 50); } + break; + case 151: + if ((0x3ff000000000000L & l) != 0L) + jjstateSet[jjnewStateCnt++] = 152; + break; + case 152: + if ((0x3ff000000000000L & l) != 0L && kind > 10) + kind = 10; + break; + case 153: + if ((0xa800ff7e00000000L & l) != 0L && kind > 10) + kind = 10; + break; + case 154: + if (curChar != 58) + break; + if (kind > 9) + kind = 9; + { jjCheckNAddTwoStates(142, 143); } + break; + case 155: + if (curChar == 45) + { jjCheckNAddStates(8, 11); } + break; + case 156: if ((0x3ff000000000000L & l) == 0L) break; if (kind > 203) kind = 203; - jjCheckNAddStates(0, 7); + { jjCheckNAdd(156); } break; - case 112: + case 157: + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAddTwoStates(157, 158); } + break; + case 158: + if (curChar != 46) + break; + if (kind > 204) + kind = 204; + { jjCheckNAdd(159); } + break; + case 159: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 204) + kind = 204; + { jjCheckNAdd(159); } + break; + case 160: + if (curChar == 46) + { jjCheckNAdd(161); } + break; + case 161: + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAddTwoStates(161, 162); } + break; + case 163: + if ((0x280000000000L & l) != 0L) + { jjCheckNAdd(164); } + break; + case 164: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 205) + kind = 205; + { jjCheckNAdd(164); } + break; + case 165: + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAddStates(53, 56); } + break; + case 166: + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAddTwoStates(166, 167); } + break; + case 167: + if (curChar == 46) + { jjCheckNAddTwoStates(168, 169); } + break; + case 168: + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAddTwoStates(168, 169); } + break; + case 170: + if ((0x280000000000L & l) != 0L) + { jjCheckNAdd(171); } + break; + case 171: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 205) + kind = 205; + { jjCheckNAdd(171); } + break; + case 172: + if ((0x3ff000000000000L & l) != 0L) + { jjCheckNAddTwoStates(172, 173); } + break; + case 174: + if ((0x280000000000L & l) != 0L) + { jjCheckNAdd(175); } + break; + case 175: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 205) + kind = 205; + { jjCheckNAdd(175); } + break; + case 176: + if ((0x3ff000000000000L & l) == 0L) + break; + if (kind > 203) + kind = 203; + { jjCheckNAddStates(0, 7); } + break; + case 177: if ((0x3ff000000000000L & l) == 0L) break; if (kind > 214) kind = 214; - jjCheckNAdd(112); + { jjCheckNAdd(177); } break; - case 113: + case 178: if (curChar == 46) - jjCheckNAddTwoStates(114, 96); + { jjCheckNAddTwoStates(179, 161); } break; - case 114: + case 179: if ((0x3ff000000000000L & l) == 0L) break; if (kind > 204) kind = 204; - jjCheckNAdd(114); + { jjCheckNAdd(179); } break; default : break; } @@ -3483,7 +3648,7 @@ else if (curChar == 39) else if (curChar < 128) { long l = 1L << (curChar & 077); - MatchLoop: do + do { switch(jjstateSet[--i]) { @@ -3492,16 +3657,16 @@ else if (curChar < 128) { if (kind > 247) kind = 247; - jjCheckNAdd(69); + { jjCheckNAdd(134); } } else if (curChar == 91) - jjAddStates(41, 42); + { jjAddStates(41, 42); } else if (curChar == 92) - jjstateSet[jjnewStateCnt++] = 31; + { jjAddStates(57, 59); } else if (curChar == 64) - jjCheckNAdd(27); + { jjCheckNAdd(27); } if ((0x7fffffe07fffffeL & l) != 0L) - jjCheckNAddStates(57, 62); + { jjCheckNAddStates(60, 65); } else if (curChar == 95) jjstateSet[jjnewStateCnt++] = 8; if ((0x7e0000007eL & l) != 0L) @@ -3513,29 +3678,29 @@ else if (curChar == 95) case 1: if (kind > 7) kind = 7; - jjAddStates(12, 14); + { jjAddStates(12, 14); } break; case 6: - jjAddStates(63, 64); + { jjAddStates(66, 67); } break; case 11: if ((0x7fffffe87fffffeL & l) == 0L) break; if (kind > 11) kind = 11; - jjCheckNAddStates(65, 70); + { jjCheckNAddStates(68, 73); } break; case 12: if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAddTwoStates(12, 13); + { jjCheckNAddTwoStates(12, 13); } break; case 13: if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAddTwoStates(10, 11); + { jjCheckNAddTwoStates(10, 11); } break; case 14: if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAddTwoStates(14, 15); + { jjCheckNAddTwoStates(14, 15); } break; case 15: if ((0x7fffffe87fffffeL & l) != 0L && kind > 11) @@ -3546,18 +3711,18 @@ else if (curChar == 95) break; if (kind > 11) kind = 11; - jjCheckNAddStates(71, 74); + { jjCheckNAddStates(74, 77); } break; case 17: if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAddTwoStates(17, 18); + { jjCheckNAddTwoStates(17, 18); } break; case 18: if ((0x7fffffe87fffffeL & l) == 0L) break; if (kind > 11) kind = 11; - jjCheckNAddTwoStates(10, 11); + { jjCheckNAddTwoStates(10, 11); } break; case 19: if (curChar == 95) @@ -3569,7 +3734,7 @@ else if (curChar == 95) break; if (kind > 12) kind = 12; - jjCheckNAdd(22); + { jjCheckNAdd(22); } break; case 24: case 25: @@ -3577,176 +3742,421 @@ else if (curChar == 95) break; if (kind > 13) kind = 13; - jjCheckNAdd(25); + { jjCheckNAdd(25); } break; case 26: if (curChar == 64) - jjCheckNAdd(27); + { jjCheckNAdd(27); } break; case 27: if ((0x7fffffe07fffffeL & l) == 0L) break; if (kind > 18) kind = 18; - jjCheckNAddTwoStates(27, 28); + { jjCheckNAddTwoStates(27, 28); } break; case 29: if ((0x7fffffe07fffffeL & l) == 0L) break; if (kind > 18) kind = 18; - jjCheckNAddTwoStates(28, 29); + { jjCheckNAddTwoStates(28, 29); } break; case 30: if (curChar == 92) - jjstateSet[jjnewStateCnt++] = 31; + { jjAddStates(57, 59); } break; case 31: if ((0x14404410000000L & l) != 0L && kind > 209) kind = 209; break; - case 33: + case 32: + if (curChar == 85) + jjstateSet[jjnewStateCnt++] = 33; + break; + case 33: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 34; + break; + case 34: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 35; + break; + case 35: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 36; + break; + case 36: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 37; + break; + case 37: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 38; + break; + case 38: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 39; + break; + case 39: + case 44: + if ((0x7e0000007eL & l) != 0L) + { jjCheckNAdd(40); } + break; + case 40: + if ((0x7e0000007eL & l) != 0L && kind > 209) + kind = 209; + break; + case 41: + if (curChar == 117) + jjstateSet[jjnewStateCnt++] = 42; + break; + case 42: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 43; + break; + case 43: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 44; + break; + case 46: + if ((0xffffffffefffffffL & l) != 0L) + { jjCheckNAddStates(18, 20); } + break; + case 47: + if (curChar == 92) + { jjAddStates(78, 80); } + break; + case 48: + if ((0x14404410000000L & l) != 0L) + { jjCheckNAddStates(18, 20); } + break; + case 50: + if (curChar == 85) + jjstateSet[jjnewStateCnt++] = 51; + break; + case 51: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 52; + break; + case 52: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 53; + break; + case 53: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 54; + break; + case 54: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 55; + break; + case 55: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 56; + break; + case 56: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 57; + break; + case 57: + case 62: + if ((0x7e0000007eL & l) != 0L) + { jjCheckNAdd(58); } + break; + case 58: + if ((0x7e0000007eL & l) != 0L) + { jjCheckNAddStates(18, 20); } + break; + case 59: + if (curChar == 117) + jjstateSet[jjnewStateCnt++] = 60; + break; + case 60: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 61; + break; + case 61: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 62; + break; + case 64: + if ((0xffffffffefffffffL & l) != 0L) + { jjCheckNAddStates(15, 17); } + break; + case 65: + if (curChar == 92) + { jjAddStates(81, 83); } + break; + case 66: + if ((0x14404410000000L & l) != 0L) + { jjCheckNAddStates(15, 17); } + break; + case 68: + if (curChar == 85) + jjstateSet[jjnewStateCnt++] = 69; + break; + case 69: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 70; + break; + case 70: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 71; + break; + case 71: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 72; + break; + case 72: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 73; + break; + case 73: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 74; + break; + case 74: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 75; + break; + case 75: + case 80: + if ((0x7e0000007eL & l) != 0L) + { jjCheckNAdd(76); } + break; + case 76: + if ((0x7e0000007eL & l) != 0L) + { jjCheckNAddStates(15, 17); } + break; + case 77: + if (curChar == 117) + jjstateSet[jjnewStateCnt++] = 78; + break; + case 78: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 79; + break; + case 79: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 80; + break; + case 83: + if ((0xffffffffefffffffL & l) != 0L) + { jjCheckNAddStates(29, 32); } + break; + case 84: + if (curChar == 92) + { jjAddStates(84, 86); } + break; + case 85: + if ((0x14404410000000L & l) != 0L) + { jjCheckNAddStates(29, 32); } + break; + case 90: + if (curChar == 85) + jjstateSet[jjnewStateCnt++] = 91; + break; + case 91: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 92; + break; + case 92: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 93; + break; + case 93: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 94; + break; + case 94: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 95; + break; + case 95: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 96; + break; + case 96: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 97; + break; + case 97: + case 102: + if ((0x7e0000007eL & l) != 0L) + { jjCheckNAdd(98); } + break; + case 98: + if ((0x7e0000007eL & l) != 0L) + { jjCheckNAddStates(29, 32); } + break; + case 99: + if (curChar == 117) + jjstateSet[jjnewStateCnt++] = 100; + break; + case 100: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 101; + break; + case 101: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 102; + break; + case 107: if ((0xffffffffefffffffL & l) != 0L) - jjCheckNAddStates(18, 20); + { jjCheckNAddStates(35, 38); } break; - case 34: + case 108: if (curChar == 92) - jjstateSet[jjnewStateCnt++] = 35; + { jjAddStates(87, 89); } break; - case 35: + case 109: if ((0x14404410000000L & l) != 0L) - jjCheckNAddStates(18, 20); + { jjCheckNAddStates(35, 38); } break; - case 38: - if ((0xffffffffefffffffL & l) != 0L) - jjCheckNAddStates(15, 17); + case 114: + if (curChar == 85) + jjstateSet[jjnewStateCnt++] = 115; break; - case 39: - if (curChar == 92) - jjstateSet[jjnewStateCnt++] = 40; + case 115: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 116; break; - case 40: - if ((0x14404410000000L & l) != 0L) - jjCheckNAddStates(15, 17); + case 116: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 117; break; - case 44: - if ((0xffffffffefffffffL & l) != 0L) - jjCheckNAddStates(29, 32); + case 117: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 118; break; - case 45: - if (curChar == 92) - jjstateSet[jjnewStateCnt++] = 46; + case 118: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 119; break; - case 46: - if ((0x14404410000000L & l) != 0L) - jjCheckNAddStates(29, 32); + case 119: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 120; break; - case 55: - if ((0xffffffffefffffffL & l) != 0L) - jjCheckNAddStates(35, 38); + case 120: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 121; break; - case 56: - if (curChar == 92) - jjstateSet[jjnewStateCnt++] = 57; + case 121: + case 126: + if ((0x7e0000007eL & l) != 0L) + { jjCheckNAdd(122); } break; - case 57: - if ((0x14404410000000L & l) != 0L) - jjCheckNAddStates(35, 38); + case 122: + if ((0x7e0000007eL & l) != 0L) + { jjCheckNAddStates(35, 38); } break; - case 64: + case 123: + if (curChar == 117) + jjstateSet[jjnewStateCnt++] = 124; + break; + case 124: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 125; + break; + case 125: + if ((0x7e0000007eL & l) != 0L) + jjstateSet[jjnewStateCnt++] = 126; + break; + case 129: if ((0x7e0000007eL & l) != 0L && kind > 215) kind = 215; break; - case 65: + case 130: if (curChar == 91) - jjAddStates(41, 42); + { jjAddStates(41, 42); } break; - case 67: + case 132: if (curChar == 93 && kind > 222) kind = 222; break; - case 68: - case 69: + case 133: + case 134: if ((0x7fffffe87fffffeL & l) == 0L) break; if (kind > 247) kind = 247; - jjCheckNAdd(69); + { jjCheckNAdd(134); } break; - case 70: + case 135: if ((0x7fffffe07fffffeL & l) != 0L) - jjCheckNAddStates(57, 62); + { jjCheckNAddStates(60, 65); } break; - case 71: + case 136: if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAddTwoStates(71, 72); + { jjCheckNAddTwoStates(136, 137); } break; - case 72: + case 137: if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAdd(73); + { jjCheckNAdd(138); } break; - case 74: + case 139: if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAddTwoStates(74, 75); + { jjCheckNAddTwoStates(139, 140); } break; - case 75: + case 140: if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAdd(76); + { jjCheckNAdd(141); } break; - case 78: + case 143: if ((0x7fffffe87fffffeL & l) == 0L) break; if (kind > 10) kind = 10; - jjCheckNAddStates(47, 50); + { jjCheckNAddStates(47, 50); } break; - case 79: + case 144: if ((0x7fffffe87fffffeL & l) != 0L) - jjCheckNAddStates(47, 50); + { jjCheckNAddStates(47, 50); } break; - case 80: + case 145: if ((0x7fffffe87fffffeL & l) != 0L && kind > 10) kind = 10; break; - case 81: + case 146: if (curChar == 92) - jjAddStates(75, 76); + { jjAddStates(90, 91); } break; - case 82: + case 147: if ((0x4000000080000001L & l) != 0L) - jjCheckNAddStates(47, 50); + { jjCheckNAddStates(47, 50); } break; - case 84: + case 149: if ((0x7e0000007eL & l) != 0L) - jjstateSet[jjnewStateCnt++] = 85; + jjstateSet[jjnewStateCnt++] = 150; break; - case 85: + case 150: if ((0x7e0000007eL & l) != 0L) - jjCheckNAddStates(47, 50); + { jjCheckNAddStates(47, 50); } break; - case 86: + case 151: if ((0x7e0000007eL & l) != 0L) - jjstateSet[jjnewStateCnt++] = 87; + jjstateSet[jjnewStateCnt++] = 152; break; - case 87: + case 152: if ((0x7e0000007eL & l) != 0L && kind > 10) kind = 10; break; - case 88: + case 153: if ((0x4000000080000001L & l) != 0L && kind > 10) kind = 10; break; - case 97: + case 162: if ((0x2000000020L & l) != 0L) - jjAddStates(77, 78); + { jjAddStates(92, 93); } break; - case 104: + case 169: if ((0x2000000020L & l) != 0L) - jjAddStates(79, 80); + { jjAddStates(94, 95); } break; - case 108: + case 173: if ((0x2000000020L & l) != 0L) - jjAddStates(81, 82); + { jjAddStates(96, 97); } break; default : break; } @@ -3754,12 +4164,12 @@ else if (curChar == 95) } else { - int hiByte = (int)(curChar >> 8); + int hiByte = (curChar >> 8); int i1 = hiByte >> 6; long l1 = 1L << (hiByte & 077); int i2 = (curChar & 0xff) >> 6; long l2 = 1L << (curChar & 077); - MatchLoop: do + do { switch(jjstateSet[--i]) { @@ -3768,40 +4178,40 @@ else if (curChar == 95) { if (kind > 247) kind = 247; - jjCheckNAdd(69); + { jjCheckNAdd(134); } } if (jjCanMove_1(hiByte, i1, i2, l1, l2)) - jjCheckNAddStates(57, 62); + { jjCheckNAddStates(60, 65); } break; case 1: if (!jjCanMove_0(hiByte, i1, i2, l1, l2)) break; if (kind > 7) kind = 7; - jjAddStates(12, 14); + { jjAddStates(12, 14); } break; case 6: if (jjCanMove_0(hiByte, i1, i2, l1, l2)) - jjAddStates(63, 64); + { jjAddStates(66, 67); } break; case 11: if (!jjCanMove_1(hiByte, i1, i2, l1, l2)) break; if (kind > 11) kind = 11; - jjCheckNAddStates(65, 70); + { jjCheckNAddStates(68, 73); } break; case 12: if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAddTwoStates(12, 13); + { jjCheckNAddTwoStates(12, 13); } break; case 13: if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAddTwoStates(10, 11); + { jjCheckNAddTwoStates(10, 11); } break; case 14: if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAddTwoStates(14, 15); + { jjCheckNAddTwoStates(14, 15); } break; case 15: if (jjCanMove_2(hiByte, i1, i2, l1, l2) && kind > 11) @@ -3812,113 +4222,113 @@ else if (curChar == 95) break; if (kind > 11) kind = 11; - jjCheckNAddStates(71, 74); + { jjCheckNAddStates(74, 77); } break; case 17: if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAddTwoStates(17, 18); + { jjCheckNAddTwoStates(17, 18); } break; case 18: if (!jjCanMove_2(hiByte, i1, i2, l1, l2)) break; if (kind > 11) kind = 11; - jjCheckNAddTwoStates(10, 11); + { jjCheckNAddTwoStates(10, 11); } break; case 21: if (!jjCanMove_1(hiByte, i1, i2, l1, l2)) break; if (kind > 12) kind = 12; - jjCheckNAdd(22); + { jjCheckNAdd(22); } break; case 22: if (!jjCanMove_2(hiByte, i1, i2, l1, l2)) break; if (kind > 12) kind = 12; - jjCheckNAdd(22); + { jjCheckNAdd(22); } break; case 24: if (!jjCanMove_1(hiByte, i1, i2, l1, l2)) break; if (kind > 13) kind = 13; - jjCheckNAdd(25); + { jjCheckNAdd(25); } break; case 25: if (!jjCanMove_2(hiByte, i1, i2, l1, l2)) break; if (kind > 13) kind = 13; - jjCheckNAdd(25); + { jjCheckNAdd(25); } break; - case 33: + case 46: if (jjCanMove_0(hiByte, i1, i2, l1, l2)) - jjAddStates(18, 20); + { jjAddStates(18, 20); } break; - case 38: + case 64: if (jjCanMove_0(hiByte, i1, i2, l1, l2)) - jjAddStates(15, 17); + { jjAddStates(15, 17); } break; - case 44: + case 83: if (jjCanMove_0(hiByte, i1, i2, l1, l2)) - jjAddStates(29, 32); + { jjAddStates(29, 32); } break; - case 55: + case 107: if (jjCanMove_0(hiByte, i1, i2, l1, l2)) - jjAddStates(35, 38); + { jjAddStates(35, 38); } break; - case 68: + case 133: if (!jjCanMove_1(hiByte, i1, i2, l1, l2)) break; if (kind > 247) kind = 247; - jjCheckNAdd(69); + { jjCheckNAdd(134); } break; - case 69: + case 134: if (!jjCanMove_2(hiByte, i1, i2, l1, l2)) break; if (kind > 247) kind = 247; - jjCheckNAdd(69); + { jjCheckNAdd(134); } break; - case 70: + case 135: if (jjCanMove_1(hiByte, i1, i2, l1, l2)) - jjCheckNAddStates(57, 62); + { jjCheckNAddStates(60, 65); } break; - case 71: + case 136: if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAddTwoStates(71, 72); + { jjCheckNAddTwoStates(136, 137); } break; - case 72: + case 137: if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAdd(73); + { jjCheckNAdd(138); } break; - case 74: + case 139: if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAddTwoStates(74, 75); + { jjCheckNAddTwoStates(139, 140); } break; - case 75: + case 140: if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAdd(76); + { jjCheckNAdd(141); } break; - case 78: + case 143: if (!jjCanMove_1(hiByte, i1, i2, l1, l2)) break; if (kind > 10) kind = 10; - jjCheckNAddStates(47, 50); + { jjCheckNAddStates(47, 50); } break; - case 79: + case 144: if (jjCanMove_2(hiByte, i1, i2, l1, l2)) - jjCheckNAddStates(47, 50); + { jjCheckNAddStates(47, 50); } break; - case 80: + case 145: if (jjCanMove_2(hiByte, i1, i2, l1, l2) && kind > 10) kind = 10; break; - default : break; + default : if (i1 == 0 || l1 == 0 || i2 == 0 || l2 == 0) break; else break; } } while(i != startsAt); } @@ -3929,7 +4339,7 @@ else if (curChar == 95) kind = 0x7fffffff; } ++curPos; - if ((i = jjnewStateCnt) == (startsAt = 115 - (jjnewStateCnt = startsAt))) + if ((i = jjnewStateCnt) == (startsAt = 180 - (jjnewStateCnt = startsAt))) break; try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { break; } @@ -3954,13 +4364,61 @@ else if (jjmatchedPos == strPos && jjmatchedKind > strKind) return toRet; } + +/** Token literal values. */ +public static final String[] jjstrLiteralImages = { +"", null, null, null, null, null, null, null, null, null, null, null, null, +null, "\100\142\141\163\145", "\100\160\162\145\146\151\170", +"\100\154\151\163\164", "\100\160\141\164\150", null, null, null, "\141", null, null, null, null, null, +null, null, null, null, null, null, null, null, null, null, null, null, null, null, +null, null, null, null, null, null, null, null, null, null, null, null, null, null, +null, null, null, null, null, null, null, null, null, null, null, null, null, null, +null, null, null, null, null, null, null, null, null, null, null, null, null, null, +null, null, null, null, null, null, null, null, null, null, null, null, null, null, +null, null, null, null, null, null, null, null, null, null, null, null, null, null, +null, null, null, null, null, null, null, null, null, null, null, null, null, null, +null, null, null, null, null, null, null, null, null, null, null, null, null, null, +null, null, null, null, null, null, null, null, null, null, null, null, null, null, +null, null, null, null, null, null, null, null, null, null, null, null, null, null, +null, null, null, null, null, null, null, null, null, null, null, null, null, null, +"\72\72", null, null, null, null, null, null, null, null, null, null, null, null, null, +"\75\75", "\41\75\75", "\136", "\74\75\72", "\75\72", "\76\75\72", "\76\72", "\176\75", +null, null, null, null, null, null, null, null, null, null, null, null, null, "\50", +"\51", "\173", "\175", "\133", "\135", null, "\73", "\54", "\56", "\75", "\41\75", +"\74\74", "\76\76", "\74", "\76", "\74\75", "\76\75", "\41", "\176", "\72", "\174\174", +"\46\46", "\53", "\55", "\52", "\57", "\77", "\174", "\136\136", "\100", null, null, +null, null, null, null, null, null, null, null, }; +protected Token jjFillToken() +{ + final Token t; + final String curTokenImage; + final int beginLine; + final int endLine; + final int beginColumn; + final int endColumn; + String im = jjstrLiteralImages[jjmatchedKind]; + curTokenImage = (im == null) ? input_stream.GetImage() : im; + beginLine = input_stream.getBeginLine(); + beginColumn = input_stream.getBeginColumn(); + endLine = input_stream.getEndLine(); + endColumn = input_stream.getEndColumn(); + t = Token.newToken(jjmatchedKind, curTokenImage); + + t.beginLine = beginLine; + t.endLine = endLine; + t.beginColumn = beginColumn; + t.endColumn = endColumn; + + return t; +} static final int[] jjnextStates = { - 91, 92, 93, 101, 102, 107, 108, 112, 91, 92, 95, 100, 1, 2, 4, 38, - 39, 41, 33, 34, 36, 9, 16, 12, 13, 14, 15, 17, 18, 43, 44, 45, - 47, 48, 50, 54, 55, 56, 58, 59, 61, 66, 67, 71, 72, 74, 75, 79, - 80, 81, 83, 84, 86, 101, 102, 107, 108, 71, 72, 73, 74, 75, 76, 6, - 7, 12, 13, 10, 14, 15, 11, 17, 18, 10, 11, 82, 88, 98, 99, 105, - 106, 109, 110, + 156, 157, 158, 166, 167, 172, 173, 177, 156, 157, 160, 165, 1, 2, 4, 64, + 65, 67, 46, 47, 49, 9, 16, 12, 13, 14, 15, 17, 18, 82, 83, 84, + 86, 87, 89, 106, 107, 108, 110, 111, 113, 131, 132, 136, 137, 139, 140, 144, + 145, 146, 148, 149, 151, 166, 167, 172, 173, 31, 32, 41, 136, 137, 138, 139, + 140, 141, 6, 7, 12, 13, 10, 14, 15, 11, 17, 18, 10, 11, 48, 50, + 59, 66, 68, 77, 85, 90, 99, 109, 114, 123, 147, 153, 163, 164, 170, 171, + 174, 175, }; private static final boolean jjCanMove_0(int hiByte, int i1, int i2, long l1, long l2) { @@ -3968,7 +4426,7 @@ private static final boolean jjCanMove_0(int hiByte, int i1, int i2, long l1, lo { case 0: return ((jjbitVec2[i2] & l2) != 0L); - default : + default : if ((jjbitVec0[i1] & l1) != 0L) return true; return false; @@ -3994,7 +4452,7 @@ private static final boolean jjCanMove_1(int hiByte, int i1, int i2, long l1, lo return ((jjbitVec9[i2] & l2) != 0L); case 255: return ((jjbitVec10[i2] & l2) != 0L); - default : + default : if ((jjbitVec3[i1] & l1) != 0L) return true; return false; @@ -4020,101 +4478,12 @@ private static final boolean jjCanMove_2(int hiByte, int i1, int i2, long l1, lo return ((jjbitVec9[i2] & l2) != 0L); case 255: return ((jjbitVec10[i2] & l2) != 0L); - default : + default : if ((jjbitVec3[i1] & l1) != 0L) return true; return false; } } -public static final String[] jjstrLiteralImages = { -"", null, null, null, null, null, null, null, null, null, null, null, null, -null, "\100\142\141\163\145", "\100\160\162\145\146\151\170", -"\100\154\151\163\164", "\100\160\141\164\150", null, null, null, "\141", null, null, null, null, null, -null, null, null, null, null, null, null, null, null, null, null, null, null, null, -null, null, null, null, null, null, null, null, null, null, null, null, null, null, -null, null, null, null, null, null, null, null, null, null, null, null, null, null, -null, null, null, null, null, null, null, null, null, null, null, null, null, null, -null, null, null, null, null, null, null, null, null, null, null, null, null, null, -null, null, null, null, null, null, null, null, null, null, null, null, null, null, -null, null, null, null, null, null, null, null, null, null, null, null, null, null, -null, null, null, null, null, null, null, null, null, null, null, null, null, null, -null, null, null, null, null, null, null, null, null, null, null, null, null, null, -null, null, null, null, null, null, null, null, null, null, null, null, null, null, -null, null, null, null, null, null, null, null, null, null, null, null, null, null, -"\72\72", null, null, null, null, null, null, null, null, null, null, null, null, null, -"\75\75", "\41\75\75", "\136", "\74\75\72", "\75\72", "\76\75\72", "\76\72", "\176\75", -null, null, null, null, null, null, null, null, null, null, null, null, null, "\50", -"\51", "\173", "\175", "\133", "\135", null, "\73", "\54", "\56", "\75", "\41\75", -"\74\74", "\76\76", "\74", "\76", "\74\75", "\76\75", "\41", "\176", "\72", "\174\174", -"\46\46", "\53", "\55", "\52", "\57", "\77", "\174", "\136\136", "\100", null, null, -null, null, null, null, null, null, null, null, }; -public static final String[] lexStateNames = { - "DEFAULT", -}; -static final long[] jjtoToken = { - 0xffffffffffe7ff01L, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xfffffffffe3fffL, - 0x0L, -}; -static final long[] jjtoSkip = { - 0xbeL, 0x0L, 0x0L, 0x0L, - 0x0L, -}; -static final long[] jjtoSpecial = { - 0x80L, 0x0L, 0x0L, 0x0L, - 0x0L, -}; -protected JavaCharStream input_stream; -private final int[] jjrounds = new int[115]; -private final int[] jjstateSet = new int[230]; -protected char curChar; -public SparqlCoreseTokenManager(JavaCharStream stream){ - if (JavaCharStream.staticFlag) - throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer."); - input_stream = stream; -} -public SparqlCoreseTokenManager(JavaCharStream stream, int lexState){ - this(stream); - SwitchTo(lexState); -} -public void ReInit(JavaCharStream stream) -{ - jjmatchedPos = jjnewStateCnt = 0; - curLexState = defaultLexState; - input_stream = stream; - ReInitRounds(); -} -private final void ReInitRounds() -{ - int i; - jjround = 0x80000001; - for (i = 115; i-- > 0;) - jjrounds[i] = 0x80000000; -} -public void ReInit(JavaCharStream stream, int lexState) -{ - ReInit(stream); - SwitchTo(lexState); -} -public void SwitchTo(int lexState) -{ - if (lexState >= 1 || lexState < 0) - throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE); - else - curLexState = lexState; -} - -protected Token jjFillToken() -{ - Token t = Token.newToken(jjmatchedKind); - t.kind = jjmatchedKind; - String im = jjstrLiteralImages[jjmatchedKind]; - t.image = (im == null) ? input_stream.GetImage() : im; - t.beginLine = input_stream.getBeginLine(); - t.beginColumn = input_stream.getBeginColumn(); - t.endLine = input_stream.getEndLine(); - t.endColumn = input_stream.getEndColumn(); - return t; -} int curLexState = 0; int defaultLexState = 0; @@ -4123,23 +4492,24 @@ protected Token jjFillToken() int jjmatchedPos; int jjmatchedKind; +/** Get the next Token. */ public Token getNextToken() { - int kind; Token specialToken = null; Token matchedToken; int curPos = 0; EOFLoop : for (;;) - { - try - { + { + try + { curChar = input_stream.BeginToken(); - } - catch(java.io.IOException e) - { + } + catch(Exception e) + { jjmatchedKind = 0; + jjmatchedPos = -1; matchedToken = jjFillToken(); matchedToken.specialToken = specialToken; CommonTokenAction(matchedToken); @@ -4199,4 +4569,155 @@ public Token getNextToken() } } +void SkipLexicalActions(Token matchedToken) +{ + switch(jjmatchedKind) + { + default : + break; + } +} +void MoreLexicalActions() +{ + jjimageLen += (lengthOfMatch = jjmatchedPos + 1); + switch(jjmatchedKind) + { + default : + break; + } +} +void TokenLexicalActions(Token matchedToken) +{ + switch(jjmatchedKind) + { + default : + break; + } +} +private void jjCheckNAdd(int state) +{ + if (jjrounds[state] != jjround) + { + jjstateSet[jjnewStateCnt++] = state; + jjrounds[state] = jjround; + } +} +private void jjAddStates(int start, int end) +{ + do { + jjstateSet[jjnewStateCnt++] = jjnextStates[start]; + } while (start++ != end); +} +private void jjCheckNAddTwoStates(int state1, int state2) +{ + jjCheckNAdd(state1); + jjCheckNAdd(state2); +} + +private void jjCheckNAddStates(int start, int end) +{ + do { + jjCheckNAdd(jjnextStates[start]); + } while (start++ != end); +} + + /** Constructor. */ + public SparqlCoreseTokenManager(SimpleCharStream stream){ + + if (SimpleCharStream.staticFlag) + throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer."); + + input_stream = stream; + } + + /** Constructor. */ + public SparqlCoreseTokenManager (SimpleCharStream stream, int lexState){ + ReInit(stream); + SwitchTo(lexState); + } + + /** Reinitialise parser. */ + + public void ReInit(SimpleCharStream stream) + { + + + jjmatchedPos = + jjnewStateCnt = + 0; + curLexState = defaultLexState; + input_stream = stream; + ReInitRounds(); + } + + private void ReInitRounds() + { + int i; + jjround = 0x80000001; + for (i = 180; i-- > 0;) + jjrounds[i] = 0x80000000; + } + + /** Reinitialise parser. */ + public void ReInit(SimpleCharStream stream, int lexState) + + { + ReInit(stream); + SwitchTo(lexState); + } + + /** Switch to specified lex state. */ + public void SwitchTo(int lexState) + { + if (lexState >= 1 || lexState < 0) + throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE); + else + curLexState = lexState; + } + + +/** Lexer state names. */ +public static final String[] lexStateNames = { + "DEFAULT", +}; + +/** Lex State array. */ +public static final int[] jjnewLexState = { + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, +}; +static final long[] jjtoToken = { + 0xffffffffffe7ff01L, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xfffffffffe3fffL, + 0x0L, +}; +static final long[] jjtoSkip = { + 0xbeL, 0x0L, 0x0L, 0x0L, + 0x0L, +}; +static final long[] jjtoSpecial = { + 0x80L, 0x0L, 0x0L, 0x0L, + 0x0L, +}; +static final long[] jjtoMore = { + 0x0L, 0x0L, 0x0L, 0x0L, + 0x0L, +}; + protected SimpleCharStream input_stream; + + private final int[] jjrounds = new int[180]; + private final int[] jjstateSet = new int[2 * 180]; + private final StringBuilder jjimage = new StringBuilder(); + private StringBuilder image = jjimage; + private int jjimageLen; + private int lengthOfMatch; + protected int curChar; } diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/Token.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/Token.java index 40b75a313..8e85357ae 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/Token.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/Token.java @@ -1,11 +1,19 @@ -/* Generated By:JavaCC: Do not edit this line. Token.java Version 3.0 */ +/* Generated By:JavaCC: Do not edit this line. Token.java Version 7.0 */ +/* JavaCCOptions:TOKEN_EXTENDS=,KEEP_LINE_COLUMN=true,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ package fr.inria.corese.sparql.triple.javacc1; /** * Describes the input token stream. */ -public class Token { +public class Token implements java.io.Serializable { + + /** + * The version identifier for this Serializable class. + * Increment only if the serialized form of the + * class changes. + */ + private static final long serialVersionUID = 1L; /** * An integer that describes the kind of this token. This numbering @@ -14,12 +22,14 @@ public class Token { */ public int kind; - /** - * beginLine and beginColumn describe the position of the first character - * of this token; endLine and endColumn describe the position of the - * last character of this token. - */ - public int beginLine, beginColumn, endLine, endColumn; + /** The line number of the first character of this Token. */ + public int beginLine; + /** The column number of the first character of this Token. */ + public int beginColumn; + /** The line number of the last character of this Token. */ + public int endLine; + /** The column number of the last character of this Token. */ + public int endColumn; /** * The string image of the token. @@ -50,12 +60,47 @@ public class Token { */ public Token specialToken; + /** + * An optional attribute value of the Token. + * Tokens which are not used as syntactic sugar will often contain + * meaningful values that will be used later on by the compiler or + * interpreter. This attribute value is often different from the image. + * Any subclass of Token that actually wants to return a non-null value can + * override this method as appropriate. + */ + public Object getValue() { + return null; + } + + /** + * No-argument constructor + */ + public Token() {} + + /** + * Constructs a new token for the specified Image. + */ + public Token(int kind) + { + this(kind, null); + } + + /** + * Constructs a new token for the specified Image and Kind. + */ + public Token(int kind, String image) + { + this.kind = kind; + this.image = image; + } + /** * Returns the image. */ + @Override public String toString() { - return image; + return image; } /** @@ -63,19 +108,25 @@ public String toString() * can create and return subclass objects based on the value of ofKind. * Simply add the cases to the switch for all those special cases. * For example, if you have a subclass of Token called IDToken that - * you want to create if ofKind is ID, simlpy add something like : + * you want to create if ofKind is ID, simply add something like : * - * case MyParserConstants.ID : return new IDToken(); + * case MyParserConstants.ID : return new IDToken(ofKind, image); * * to the following switch statement. Then you can cast matchedToken - * variable to the appropriate type and use it in your lexical actions. + * variable to the appropriate type and use sit in your lexical actions. */ - public static final Token newToken(int ofKind) + public static Token newToken(int ofKind, String image) + { + switch(ofKind) + { + default : return new Token(ofKind, image); + } + } + + public static Token newToken(int ofKind) { - switch(ofKind) - { - default : return new Token(); - } + return newToken(ofKind, null); } } +/* JavaCC - OriginalChecksum=8e87a0b7c802e38de47fce6743747b86 (do not edit this line) */ diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/TokenMgrError.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/TokenMgrError.java index 5ede2f79e..2979cf9f2 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/TokenMgrError.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/TokenMgrError.java @@ -1,133 +1,148 @@ -/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 3.0 */ +/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 7.0 */ +/* JavaCCOptions: */ package fr.inria.corese.sparql.triple.javacc1; +/** Token Manager Error. */ +@SuppressWarnings("all") public class TokenMgrError extends Error { - /* - * Ordinals for various reasons why an Error of this type can be thrown. - */ - /** - * Lexical error occured. - */ - static final int LEXICAL_ERROR = 0; + /** + * The version identifier for this Serializable class. + * Increment only if the serialized form of the + * class changes. + */ + private static final long serialVersionUID = 1L; - /** - * An attempt wass made to create a second instance of a static token manager. - */ - static final int STATIC_LEXER_ERROR = 1; + /* + * Ordinals for various reasons why an Error of this type can be thrown. + */ - /** - * Tried to change to an invalid lexical state. - */ - static final int INVALID_LEXICAL_STATE = 2; + /** + * Lexical error occurred. + */ + public static final int LEXICAL_ERROR = 0; - /** - * Detected (and bailed out of) an infinite loop in the token manager. - */ - static final int LOOP_DETECTED = 3; + /** + * An attempt was made to create a second instance of a static token manager. + */ + public static final int STATIC_LEXER_ERROR = 1; - /** - * Indicates the reason why the exception is thrown. It will have - * one of the above 4 values. - */ - int errorCode; + /** + * Tried to change to an invalid lexical state. + */ + public static final int INVALID_LEXICAL_STATE = 2; - /** - * Replaces unprintable characters by their espaced (or unicode escaped) - * equivalents in the given string - */ - protected static final String addEscapes(String str) { - StringBuffer retval = new StringBuffer(); - char ch; - for (int i = 0; i < str.length(); i++) { - switch (str.charAt(i)) - { - case 0 : - continue; - case '\b': - retval.append("\\b"); - continue; - case '\t': - retval.append("\\t"); - continue; - case '\n': - retval.append("\\n"); - continue; - case '\f': - retval.append("\\f"); - continue; - case '\r': - retval.append("\\r"); - continue; - case '\"': - retval.append("\\\""); - continue; - case '\'': - retval.append("\\\'"); - continue; - case '\\': - retval.append("\\\\"); - continue; - default: - if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { - String s = "0000" + Integer.toString(ch, 16); - retval.append("\\u" + s.substring(s.length() - 4, s.length())); - } else { - retval.append(ch); - } - continue; - } + /** + * Detected (and bailed out of) an infinite loop in the token manager. + */ + public static final int LOOP_DETECTED = 3; + + /** + * Indicates the reason why the exception is thrown. It will have + * one of the above 4 values. + */ + int errorCode; + + /** + * Replaces unprintable characters by their escaped (or unicode escaped) + * equivalents in the given string + */ + protected static final String addEscapes(String str) { + StringBuilder retval = new StringBuilder(); + char ch; + for (int i = 0; i < str.length(); i++) { + switch (str.charAt(i)) + { + case '\b': + retval.append("\\b"); + continue; + case '\t': + retval.append("\\t"); + continue; + case '\n': + retval.append("\\n"); + continue; + case '\f': + retval.append("\\f"); + continue; + case '\r': + retval.append("\\r"); + continue; + case '\"': + retval.append("\\\""); + continue; + case '\'': + retval.append("\\\'"); + continue; + case '\\': + retval.append("\\\\"); + continue; + default: + if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) { + String s = "0000" + Integer.toString(ch, 16); + retval.append("\\u" + s.substring(s.length() - 4, s.length())); + } else { + retval.append(ch); + } + continue; } - return retval.toString(); - } + } + return retval.toString(); + } - /** - * Returns a detailed message for the Error when it is thrown by the - * token manager to indicate a lexical error. - * Parameters : - * EOFSeen : indicates if EOF caused the lexicl error - * curLexState : lexical state in which this error occured - * errorLine : line number when the error occured - * errorColumn : column number when the error occured - * errorAfter : prefix that was seen before this error occured - * curchar : the offending character - * Note: You can customize the lexical error message by modifying this method. - */ - protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) { - return("Lexical error at line " + - errorLine + ", column " + - errorColumn + ". Encountered: " + - (EOFSeen ? " " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") + - "after : \"" + addEscapes(errorAfter) + "\""); - } + /** + * Returns a detailed message for the Error when it is thrown by the + * token manager to indicate a lexical error. + * Parameters : + * EOFSeen : indicates if EOF caused the lexical error + * lexState : lexical state in which this error occurred + * errorLine : line number when the error occurred + * errorColumn : column number when the error occurred + * errorAfter : prefix that was seen before this error occurred + * curchar : the offending character + * Note: You can customize the lexical error message by modifying this method. + */ + protected static String LexicalErr(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, int curChar) { + return("Lexical error at line " + // + errorLine + ", column " + // + errorColumn + ". Encountered: " + // + (EOFSeen ? "" : ("'" + addEscapes(String.valueOf((char) curChar)) + "' (" + curChar + "),")) + // + (errorAfter == null || errorAfter.length() == 0 ? "" : " after prefix \"" + addEscapes(errorAfter) + "\"")) + // + (lexState == 0 ? "" : " (in lexical state " + lexState + ")"); + } - /** - * You can also modify the body of this method to customize your error messages. - * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not - * of end-users concern, so you can return something like : - * - * "Internal Error : Please file a bug report .... " - * - * from this method for such cases in the release version of your parser. - */ - public String getMessage() { - return super.getMessage(); - } + /** + * You can also modify the body of this method to customize your error messages. + * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not + * of end-users concern, so you can return something like : + * + * "Internal Error : Please file a bug report .... " + * + * from this method for such cases in the release version of your parser. + */ + @Override + public String getMessage() { + return super.getMessage(); + } - /* - * Constructors of various flavors follow. - */ + /* + * Constructors of various flavors follow. + */ - public TokenMgrError() { - } + /** No arg constructor. */ + public TokenMgrError() { + } - public TokenMgrError(String message, int reason) { - super(message); - errorCode = reason; - } + /** Constructor with message and reason. */ + public TokenMgrError(String message, int reason) { + super(message); + errorCode = reason; + } - public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) { - this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason); - } + /** Full Constructor. */ + public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, int curChar, int reason) { + this(LexicalErr(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason); + } } +/* JavaCC - OriginalChecksum=0df0839f31e5c4f6534e4b4b9e888f39 (do not edit this line) */ diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/sparql_corese.jj b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/sparql_corese.jj index 588deb186..3e6ca5d2d 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/sparql_corese.jj +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/javacc1/sparql_corese.jj @@ -6,8 +6,8 @@ options { - JAVA_UNICODE_ESCAPE = true; - UNICODE_INPUT = false ; + JAVA_UNICODE_ESCAPE = false; + UNICODE_INPUT = true ; STATIC = false ; COMMON_TOKEN_ACTION = true; // Do \ u processing outside javacc @@ -104,6 +104,47 @@ PARSER_BEGIN(SparqlCorese) } } + // Method for converting Unicode escape sequences \\uxxxx and \\Uxxxxxxxx + String convertUnicodeSequences(String str) { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < str.length(); i++) { + char ch = str.charAt(i); + // Checks whether the current character is a backslash and whether there is another character before it + if (ch == '\\' && i + 1 < str.length()) { + // Checks whether it is a literal escape sequence (preceded by a backslash) + if (i > 0 && str.charAt(i - 1) == '\\') { + sb.append(ch); + continue; + } + + char nextCh = str.charAt(i + 1); + if (nextCh == 'u' && i + 5 < str.length()) { // Manage \\uxxxx + int code = Integer.parseInt(str.substring(i + 2, i + 6), 16); + sb.append(Character.toChars(code)); + i += 5; // Skip escape sequence + + } else if (nextCh == 'U' && i + 9 < str.length()) { // Manage \\Uxxxxxxxx + int code = Integer.parseInt(str.substring(i + 2, i + 10), 16); + for (char c : Character.toChars(code)) { + sb.append(c); + } + i += 9; // Skip escape sequence + } else { + sb.append(ch); // If this is not an escape sequence, add the character as is + } + } else { + // Handles the case where a literal backslash is immediately followed by another character (not 'u' or 'U') + if (ch == '\\' && i > 0 && str.charAt(i - 1) == '\\') { + // Do nothing here to avoid adding the literal backslash a second time + } else { + sb.append(ch); + } + } + } + return sb.toString(); + } + + public ParseException createStopException() { return new ParseException("stop"); } @@ -2820,7 +2861,26 @@ String String() : { Token t; } | t = | t = ) - { return handler.remEscapes(t.image); } + { + final String SQ3 = "\"\"\""; + final String SSQ3 = "'''"; + + String str = t.image; + + // remove leading/trailing " or ' + int start = 1, end = str.length() - 1; + + if ((str.startsWith(SQ3) && str.endsWith(SQ3)) + || (str.startsWith(SSQ3) && str.endsWith(SSQ3))) { + // remove leading/trailing """ or ''' + start = 3; + end = str.length() - 3; + } + + str = str.substring(start, end); + + return handler.remEscapes(convertUnicodeSequences(str)); + } } String IRIref() : { String res; } @@ -2872,16 +2932,16 @@ Variable BlankNode(Exp stack): { Token t; Variable v; } String Q_IRI_ref() : { Token t; String s; } { - t = - { - s = t.image; - s = s.substring(1,s.length()-1); - s = astq.defURI(s); - return s; - } + t = + { + s = t.image; + s = s.substring(1,s.length()-1); // Remove "<" and ">" + s = convertUnicodeSequences(s); // Convert Unicode escape sequences + s = astq.defURI(s); + return s; + } } - // ------------------------------------------ // Tokens // ------------------------------------------ @@ -3186,7 +3246,7 @@ TOKEN : | < #EXPONENT: ["e","E"] (["+","-"])? (["0"-"9"])+ > | < #QUOTE_3D: "\"\"\""> | < #QUOTE_3S: "'''"> -| +| | < STRING_LITERAL1: // Single quoted string diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/ParserHandler.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/ParserHandler.java index b5df5fb44..299ba6017 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/ParserHandler.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/ParserHandler.java @@ -1,14 +1,16 @@ package fr.inria.corese.sparql.triple.parser; +import java.util.ArrayList; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import fr.inria.corese.sparql.exceptions.EngineException; import fr.inria.corese.sparql.triple.api.Creator; import fr.inria.corese.sparql.triple.javacc1.ParseException; import fr.inria.corese.sparql.triple.javacc1.SparqlCorese; import fr.inria.corese.sparql.triple.javacc1.Token; -import java.util.ArrayList; -import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * @@ -17,11 +19,9 @@ */ public class ParserHandler { public static Logger logger = LoggerFactory.getLogger(ParserHandler.class); - - static final String SQ3 = "\"\"\""; - static final String SSQ3 = "'''"; + public static boolean rdf_star_validation = false; - + boolean insideDelete = false; boolean insideDeleteData = false; private boolean insideValues = false; @@ -36,15 +36,15 @@ public class ParserHandler { SparqlCorese parser; private Metadata metadata; private ArrayList errorList; - + ParserHandler() { errorList = new ArrayList<>(); } - + public void setParser(SparqlCorese parser) { this.parser = parser; } - + // create rdf graph Edge directly for rdf loader public void setCreator(Creator c) { setCreate(c); @@ -52,35 +52,37 @@ public void setCreator(Creator c) { setTurtleLoader(true); } } - - public Exp createTriples(ASTQuery ast, Exp stack, Expression subject, Atom predicate, ExpressionList objectList, int n) + public Exp createTriples(ASTQuery ast, Exp stack, Expression subject, Atom predicate, ExpressionList objectList, + int n) throws ParseException { for (Expression object : objectList) { createTripleWithAnnotation(ast, stack, subject.getAtom(), predicate, object.getAtom()); } return stack; } - + /** * s p o1, o2, oi * we may have s p o whith o.reference = t and o.annotation = (t q v) * create triple s p o t and add its annotation t q v after triple in stack - */ - Exp createTripleWithAnnotation(ASTQuery ast, Exp stack, Atom subject, Atom predicate, Atom object) + */ + Exp createTripleWithAnnotation(ASTQuery ast, Exp stack, Atom subject, Atom predicate, Atom object) throws ParseException { - + boolean turtleLoaderStatus = isTurtleLoader(); - + if (stack.isStack() && isTurtleLoader()) { // create annotation triple t q v - // switch to sparql parser mode and create/record annotation Triple instead of Edge - // annotation Edge will be created from triple later by processTurtleAnnotation() - // when subject Edge s p o t will be created + // switch to sparql parser mode and create/record annotation Triple instead of + // Edge + // annotation Edge will be created from triple later by + // processTurtleAnnotation() + // when subject Edge s p o t will be created // in order to get subject ref ID Node of subject Edge setTurtleLoader(false); } - + Triple triple = genericCreateTriple(ast, subject, predicate, object); if (isTurtleLoader()) { @@ -89,18 +91,18 @@ Exp createTripleWithAnnotation(ASTQuery ast, Exp stack, Atom subject, Atom predi } else { // triple created in stack stack.add(triple); // stack.add(n++, triple); - processSparqlAnnotation(ast, stack, triple, object); + processSparqlAnnotation(ast, stack, triple, object); } - + setTurtleLoader(turtleLoaderStatus); - + return stack; } - + /** * Turtle loader create triple = s p o t where object = o * o.annotation = (t q v) - * create Edge t q v now from stacked triple t q v + * create Edge t q v now from stacked triple t q v * in order to get rdf ID graph node for t */ void processTurtleAnnotation(ASTQuery ast, Atom object) throws ParseException { @@ -112,15 +114,15 @@ void processTurtleAnnotation(ASTQuery ast, Atom object) throws ParseException { } } } - + /** * Sparql parser create triple = s p o t where object = o - * o.annotation = (t q v) + * o.annotation = (t q v) */ void processSparqlAnnotation(ASTQuery ast, Exp stack, Triple triple, Atom object) { if (object.getAnnotation() != null) { // triple is annotated by a list of triple - // add annotation of triple in stack after t + // add annotation of triple in stack after t // ref ID t will be created in target graph before use of t // in annotation for (Exp ee : object.getAnnotation()) { @@ -132,16 +134,14 @@ void processSparqlAnnotation(ASTQuery ast, Exp stack, Triple triple, Atom object } } - Triple genericCreateTriple(ASTQuery ast, Atom s, Atom p, Atom o) throws ParseException { - if (o.getTripleReference()==null) { + if (o.getTripleReference() == null) { return createTriple(ast, s, p, o); - } - else { + } else { // s p o {| q v |} -> // triple(s p o t) . t q v // t = o.getAtom().getTripleReference() - // create: s p o t + // create: s p o t return createTripleStar(ast, s, p, o, o.getTripleReference()); } } @@ -163,13 +163,13 @@ Triple createTriple(ASTQuery ast, Atom s, Atom p, Atom o) throws ParseException return ast.createTriple(s, p, o); } } - + public void createNquad(Atom subject, Atom predicate, Atom object, Atom graph) { if (isTurtleLoader()) { getCreate().triple(graph, subject, predicate, object); } } - + public Triple createTriple(ASTQuery ast, Atom p, List list, boolean matchArity) { return createTriple(ast, p, list, matchArity, false); } @@ -190,12 +190,11 @@ Triple createTriple(ASTQuery ast, Atom p, List list, boolean matchArity, b public void declareBlankNode(Token id) { setBnode(id); } - + public void graphPattern(Atom g) { if (isTurtleLoader()) { getCreate().graph(g.getConstant()); - } - else if (g.isBlankOrBlankNode()) { + } else if (g.isBlankOrBlankNode()) { throw new Error("bnode as graph name: " + getBnode() + " Line: " + getBnode().beginLine); } } @@ -225,17 +224,7 @@ public Atom list(ASTQuery ast, Exp stack, List l, int arobase) { public String remEscapes(String str) { StringBuilder retval = new StringBuilder(); - // remove leading/trailing " or ' - int start = 1, end = str.length() - 1; - - if ((str.startsWith(SQ3) && str.endsWith(SQ3)) - || (str.startsWith(SSQ3) && str.endsWith(SSQ3))) { - // remove leading/trailing """ or ''' - start = 3; - end = str.length() - 3; - } - - for (int i = start; i < end; i++) { + for (int i = 0; i < str.length(); i++) { if (str.charAt(i) == '\\' && i + 1 < str.length()) { i += 1; @@ -265,6 +254,12 @@ public String remEscapes(String str) { case '\\': retval.append('\\'); continue; + default: + // If the character following the backslash is not one of those + // specified above, the backslash is left in the result string + retval.append('\\'); + retval.append(str.charAt(i)); + continue; } } else { @@ -274,14 +269,13 @@ public String remEscapes(String str) { return retval.toString(); } - /** - * <> - * return Constant cst(dt) with: - * dt=bnode triple reference isTriple() == true - * when sparql: cst.triple = triple(s p o) - * when load: cst.triple = null, edge created in graph directly + * <> + * return Constant cst(dt) with: + * dt=bnode triple reference isTriple() == true + * when sparql: cst.triple = triple(s p o) + * when load: cst.triple = null, edge created in graph directly * */ public Atom createNestedTripleStar(ASTQuery ast, Exp stack, Atom s, Atom p, Atom o, Atom v) { @@ -293,8 +287,6 @@ public Atom createNestedTripleStar(ASTQuery ast, Exp stack, Atom s, Atom p, Atom return createTripleStar(ast, stack, s, p, o, ref, true); } - - Atom createTripleStar(ASTQuery ast, Exp stack, Atom s, Atom p, Atom o, Atom ref, boolean nested) { Triple t = createTripleStar(ast, s, p, o, ref, nested); if (t != null) { @@ -303,11 +295,11 @@ Atom createTripleStar(ASTQuery ast, Exp stack, Atom s, Atom p, Atom o, Atom ref, } return ref; } - + Triple createTripleStar(ASTQuery ast, Atom s, Atom p, Atom o, Atom ref) { return createTripleStar(ast, s, p, o, ref, false); } - + Triple createTripleStar(ASTQuery ast, Atom s, Atom p, Atom o, Atom ref, boolean nested) { ArrayList list = new ArrayList<>(); list.add(s); @@ -321,46 +313,42 @@ Triple createTripleStar(ASTQuery ast, Atom s, Atom p, Atom o, Atom ref, boolean } return createTriple(ast, p, list, true, nested); } - + /** * Generate ref st: * <> q v * triple(s p o ref) . ref q v - */ + */ public Atom createTripleReference(ASTQuery ast) { return createTripleReference(ast, null); } - + public Atom createTripleReference(ASTQuery ast, Atom var) { Atom ref; if (isTurtleLoader() || isInsideValues()) { // Constant with Datatype Blank Node with isTriple() == true // Once in the graph, Datatype will contain Edge(s p o t) ref = ast.tripleReferenceDefinition(); - } - else if (var != null) { + } else if (var != null) { ref = var; - } - else if (isInsideWhere()) { + } else if (isInsideWhere()) { if (ast.isUpdate()) { // delete works with a variable, not with a bnode - // use case: delete where {} and delete is empty + // use case: delete where {} and delete is empty // Variable with isTriple() == true ref = ast.tripleReferenceVariable(); - } - else { + } else { // Variable with isBlankNode() == true and isTriple() == true // not returned by select * - ref = ast.tripleReferenceQuery(); + ref = ast.tripleReferenceQuery(); } + } else { + // insert delete data + // insert delete -- where + // construct -- where + ref = ast.tripleReferenceDefinition(); } - else { - // insert delete data - // insert delete -- where - // construct -- where - ref = ast.tripleReferenceDefinition(); - } - + return ref; } @@ -377,7 +365,7 @@ public Metadata getMetadata() { public void setMetadata(Metadata metadata) { this.metadata = metadata; } - + public void cleanMetadata() { setMetadata(null); } @@ -395,63 +383,60 @@ public boolean isFunction() { public void setFunction(boolean function) { this.function = function; } - + /** * pragma: name is variable without ? and $ * are we in LDScript or in SPARQL ? * */ - public void checkVariable(Token name) { - if (! isFunction()) { - System.out.println("Incorrect Variable: " + name + " Line: " + name.beginLine); + public void checkVariable(Token name) { + if (!isFunction()) { throw new Error("Incorrect Variable: " + name + " Line: " + name.beginLine); } } - + public void enterWhere() { countWhere++; } - + public void leaveWhere() { countWhere--; } - + public void enterService(ASTQuery ast, Atom at) { if (at.isConstant()) { ast.enterService(at); } } - + public void leaveService(ASTQuery ast) { ast.leaveService(); } - + public boolean isInsideWhere() { - return countWhere>0; + return countWhere > 0; } - + public void enterDelete() { - insideDelete = true; + insideDelete = true; } - + public void leaveDelete() { insideDelete = false; } - - + public boolean isInsideDelete() { return insideDelete; } - + public void enterDeleteData() { - insideDeleteData = true; + insideDeleteData = true; } - + public void leaveDeleteData() { insideDeleteData = false; } - - + public boolean isInsideDeleteData() { return insideDeleteData; } @@ -471,17 +456,17 @@ public ArrayList getErrorList() { public void setErrorList(ArrayList errorList) { this.errorList = errorList; } - + boolean isTurtleLoader() { return turtleLoader; } - + void setTurtleLoader(boolean b) { turtleLoader = b; } - + boolean isSparqlParser() { - return ! isTurtleLoader(); + return !isTurtleLoader(); } public Creator getCreate() { @@ -499,5 +484,5 @@ public Token getBnode() { public void setBnode(Token bnode) { this.bnode = bnode; } - + } From f580ddef9a8a30aafd4bed80532aaf6eb999a516 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 12 Apr 2024 11:13:33 +0200 Subject: [PATCH 044/146] Update release URI and date in earl repport --- .../corese/w3cEarlRepportGenerator/EarlRepportGenerator.java | 4 ++-- .../fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java index c8e0eeccd..0f94099fd 100644 --- a/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cEarlRepportGenerator/EarlRepportGenerator.java @@ -47,8 +47,8 @@ public class EarlRepportGenerator { private final String softwareBlog = "https://github.com/Wimmics/corese/discussions/"; private final String softwareProgrammingLanguage = "Java"; - private final String releaseURI = "160645274c021c59eaebc113054ec55d0be8995b"; - private final String releaseDate = "2024-03-07"; + private final String releaseURI = "fc1825918302fec47852dc1f73ad1175c84fd7d1"; + private final String releaseDate = "2024-04-11"; private final Path reportDir = Path.of("corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf"); private final Path inputReportPath = reportDir.resolve("testReport.csv"); diff --git a/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java b/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java index 59b1d548c..8be53860d 100644 --- a/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java +++ b/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java @@ -30,7 +30,7 @@ /** * Auto-generated JUnit test file for the W3C test suite: https://w3c.github.io/rdf-canon/tests/manifest.ttl * This file was automatically generated by JUnitTestFileGenerator.java. - * Generation date: 2024-03-07, Time: 09:21:34 Europe/Paris + * Generation date: 2024-04-11, Time: 16:06:27 Europe/Paris */ public class canonicalRdfTest { From b88e2262ce120adf17430f407865cc2c349aa092 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 12 Apr 2024 11:13:45 +0200 Subject: [PATCH 045/146] Update launch_ssl_server.sh and stop_ssl_server.sh to include osfamily attribute --- corese-server/pom.xml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/corese-server/pom.xml b/corese-server/pom.xml index 75dc0bae5..ef9821202 100644 --- a/corese-server/pom.xml +++ b/corese-server/pom.xml @@ -191,6 +191,7 @@ @@ -208,6 +209,7 @@ From 7133a2ae5cf80bc53226c5479e1672cf3ba0941c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 12 Apr 2024 15:29:26 +0200 Subject: [PATCH 046/146] if query method is not define by user, then automatically determine the best query method --- .../corese/command/programs/RemoteSparql.java | 2 +- .../command/utils/http/SparqlHttpClient.java | 17 ++++++++++++++++- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/RemoteSparql.java b/corese-command/src/main/java/fr/inria/corese/command/programs/RemoteSparql.java index aa291f94f..4743123ef 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/RemoteSparql.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/RemoteSparql.java @@ -48,7 +48,7 @@ public class RemoteSparql implements Callable { private String accept; @Option(names = { "-m", - "--request-method" }, description = "HTTP request method to use (GET, POST-urlencoded, POST-direct).", defaultValue = "GET") + "--request-method" }, description = "HTTP request method to use (GET, POST-urlencoded, POST-direct).") private EnumRequestMethod requestMethod; @Option(names = { "-v", diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java b/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java index 978f62f59..17bfbfe93 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java @@ -32,6 +32,7 @@ public class SparqlHttpClient { private final String endpointUrl; private EnumRequestMethod queryMethod = EnumRequestMethod.GET; + private Boolean queryMethodIsDefinedByUser = false; private List> headers = new ArrayList<>(); private boolean verbose = false; @@ -66,7 +67,10 @@ public SparqlHttpClient(String endpointUrl) { * @param requestMethod the query method */ public void setQueryMethod(EnumRequestMethod requestMethod) { - this.queryMethod = requestMethod; + if (requestMethod != null) { + this.queryMethod = requestMethod; + this.queryMethodIsDefinedByUser = true; + } } /** @@ -280,6 +284,17 @@ private void validateQuery(String queryString, List defaultGraphUris, Li Query query = buildQuery(queryString); + if (!this.queryMethodIsDefinedByUser) { + // Check if the query is an update query. + if (query.getAST().isSPARQLUpdate()) { + // If it is an update query, set the request method to POST_DIRECT. + this.queryMethod = EnumRequestMethod.POST_DIRECT; + } else { + // If the query is not an update query, set the request method to GET. + // No need to set it here as GET is already the default value. + } + } + // Check if the query is an update query and the method is GET // which is not allowed by the SPARQL specification // (see https://www.w3.org/TR/sparql11-protocol/#update-operation) From 8d3cb7507f619b8d144f948d373947d070d24697 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 12 Apr 2024 15:32:01 +0200 Subject: [PATCH 047/146] Rename query-methods to request-methods for clarity --- .../java/fr/inria/corese/command/programs/RemoteSparql.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/RemoteSparql.java b/corese-command/src/main/java/fr/inria/corese/command/programs/RemoteSparql.java index 4743123ef..de4f2dac8 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/RemoteSparql.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/RemoteSparql.java @@ -48,7 +48,7 @@ public class RemoteSparql implements Callable { private String accept; @Option(names = { "-m", - "--request-method" }, description = "HTTP request method to use (GET, POST-urlencoded, POST-direct).") + "--request-method" }, description = "HTTP request method to use. Possible values:\u001b[34m ${COMPLETION-CANDIDATES}\u001b[0m.") private EnumRequestMethod requestMethod; @Option(names = { "-v", @@ -191,7 +191,7 @@ public String sendRequest() throws Exception { SparqlHttpClient client = new SparqlHttpClient(this.endpoint_url); this.parseHeader(client); - client.setQueryMethod(this.requestMethod); + client.setRequestMethod(this.requestMethod); client.setVerbose(this.verbose); client.setMaxRedirection(this.maxRedirection); From c3335cc790bd24a02446f36e3d6caac4bba467f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 12 Apr 2024 15:33:37 +0200 Subject: [PATCH 048/146] Add color coding to enum display in Corese-command help output --- .../corese/command/programs/Convert.java | 4 +- .../inria/corese/command/programs/Shacl.java | 6 +-- .../inria/corese/command/programs/Sparql.java | 4 +- .../command/utils/http/SparqlHttpClient.java | 44 +++++++++---------- 4 files changed, 29 insertions(+), 29 deletions(-) diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java b/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java index a88eedee2..ce64a12a5 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java @@ -35,7 +35,7 @@ public class Convert implements Callable { private String input; @Option(names = { "-f", "-if", - "--input-format" }, description = "RDF serialization format of the input file. Possible values: ${COMPLETION-CANDIDATES}.") + "--input-format" }, description = "RDF serialization format of the input file. Possible values:\u001b[34m ${COMPLETION-CANDIDATES}\u001b[0m.") private EnumInputFormat inputFormat = null; @Option(names = { "-o", @@ -43,7 +43,7 @@ public class Convert implements Callable { private Path output; @Option(names = { "-r", "-of", - "--output-format" }, required = true, description = "Serialization format to which the input file should be converted. Possible values: ${COMPLETION-CANDIDATES}.") + "--output-format" }, required = true, description = "Serialization format to which the input file should be converted. Possible values:\u001b[34m ${COMPLETION-CANDIDATES}\u001b[0m.") private EnumOutputFormat outputFormat; @Option(names = { "-v", diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/Shacl.java b/corese-command/src/main/java/fr/inria/corese/command/programs/Shacl.java index a0487e0e4..a349d7409 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/Shacl.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/Shacl.java @@ -31,7 +31,7 @@ public class Shacl implements Callable { private CommandSpec spec; @Option(names = { "-f", "-if", - "--input-format" }, description = "RDF serialization format of the input file. Possible values: ${COMPLETION-CANDIDATES}.") + "--input-format" }, description = "RDF serialization format of the input file. Possible values:\u001b[34m ${COMPLETION-CANDIDATES}\u001b[0m.") private EnumInputFormat inputFormat = null; @Option(names = { "-i", @@ -39,7 +39,7 @@ public class Shacl implements Callable { private String[] rdfData; @Option(names = { "-a", "-sf", - "--shapes-format" }, description = "Serialization format of the SHACL shapes. Possible values: ${COMPLETION-CANDIDATES}.", defaultValue = "TURTLE") + "--shapes-format" }, description = "Serialization format of the SHACL shapes. Possible values:\u001b[34m ${COMPLETION-CANDIDATES}\u001b[0m.", defaultValue = "TURTLE") private EnumInputFormat reportFormat = null; @Option(names = { "-s", @@ -47,7 +47,7 @@ public class Shacl implements Callable { private String[] shaclShapes; @Option(names = { "-r", "-of", - "--output-format" }, description = "Serialization format of the validation report. Possible values: ${COMPLETION-CANDIDATES}.", defaultValue = "TURTLE") + "--output-format" }, description = "Serialization format of the validation report. Possible values:\u001b[34m ${COMPLETION-CANDIDATES}\u001b[0m.", defaultValue = "TURTLE") private EnumOutputFormat outputFormat = null; @Option(names = { "-o", diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/Sparql.java b/corese-command/src/main/java/fr/inria/corese/command/programs/Sparql.java index 896a82989..be5f47d83 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/Sparql.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/Sparql.java @@ -41,7 +41,7 @@ public class Sparql implements Callable { private CommandSpec spec; @Option(names = { "-f", "-if", - "--input-format" }, description = "RDF serialization format of the input file. Possible values: ${COMPLETION-CANDIDATES}.") + "--input-format" }, description = "RDF serialization format of the input file. Possible values:\u001b[34m ${COMPLETION-CANDIDATES}\u001b[0m.") private EnumInputFormat inputFormat = null; @Option(names = { "-i", @@ -49,7 +49,7 @@ public class Sparql implements Callable { private String[] inputs; @Option(names = { "-r", "-of", - "--result-format" }, description = "Result fileformat. Possible values: ${COMPLETION-CANDIDATES}. ") + "--result-format" }, description = "Result fileformat. Possible values:\u001b[34m ${COMPLETION-CANDIDATES}\u001b[0m. ") private EnumResultFormat resultFormat = null; @Option(names = { "-o", diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java b/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java index 17bfbfe93..d2059c1ef 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java @@ -31,8 +31,8 @@ public class SparqlHttpClient { private final String endpointUrl; - private EnumRequestMethod queryMethod = EnumRequestMethod.GET; - private Boolean queryMethodIsDefinedByUser = false; + private EnumRequestMethod requestMethod = EnumRequestMethod.GET; + private Boolean requestMethodIsDefinedByUser = false; private List> headers = new ArrayList<>(); private boolean verbose = false; @@ -62,14 +62,14 @@ public SparqlHttpClient(String endpointUrl) { /////////////////////// /** - * Sets the query method. + * Sets the request method. * - * @param requestMethod the query method + * @param requestMethod the request method */ - public void setQueryMethod(EnumRequestMethod requestMethod) { + public void setRequestMethod(EnumRequestMethod requestMethod) { if (requestMethod != null) { - this.queryMethod = requestMethod; - this.queryMethodIsDefinedByUser = true; + this.requestMethod = requestMethod; + this.requestMethodIsDefinedByUser = true; } } @@ -162,10 +162,10 @@ public String sendRequest(String query, List defaultGraphUris, List defaultGraphUris, Li Query query = buildQuery(queryString); - if (!this.queryMethodIsDefinedByUser) { + if (!this.requestMethodIsDefinedByUser) { // Check if the query is an update query. if (query.getAST().isSPARQLUpdate()) { // If it is an update query, set the request method to POST_DIRECT. - this.queryMethod = EnumRequestMethod.POST_DIRECT; + this.requestMethod = EnumRequestMethod.POST_DIRECT; } else { // If the query is not an update query, set the request method to GET. // No need to set it here as GET is already the default value. @@ -298,7 +298,7 @@ private void validateQuery(String queryString, List defaultGraphUris, Li // Check if the query is an update query and the method is GET // which is not allowed by the SPARQL specification // (see https://www.w3.org/TR/sparql11-protocol/#update-operation) - if (this.queryMethod == EnumRequestMethod.GET && query.getAST().isSPARQLUpdate()) { + if (this.requestMethod == EnumRequestMethod.GET && query.getAST().isSPARQLUpdate()) { throw new IllegalArgumentException( "SPARQL query is an update query, but GET method is used. Please use a POST method instead."); } @@ -384,12 +384,12 @@ private WebTarget buildWebTarget( WebTarget webTarget = client.target(endpoint); // Add the query parameter - if (this.queryMethod == EnumRequestMethod.GET) { + if (this.requestMethod == EnumRequestMethod.GET) { webTarget = webTarget.queryParam("query", this.encode(query)); } // Add graph URIs - if (this.queryMethod == EnumRequestMethod.GET || this.queryMethod == EnumRequestMethod.POST_DIRECT) { + if (this.requestMethod == EnumRequestMethod.GET || this.requestMethod == EnumRequestMethod.POST_DIRECT) { for (String defaultGraphUri : defaultGraphUris) { webTarget = webTarget.queryParam("default-graph-uri", this.encode(defaultGraphUri)); } @@ -416,7 +416,7 @@ private String buildRequestBody( StringBuilder bodyContent = new StringBuilder(); - if (this.queryMethod == EnumRequestMethod.POST_URLENCODED) { + if (this.requestMethod == EnumRequestMethod.POST_URLENCODED) { // Add the query parameter bodyContent.append("query=").append(this.encode(query)); @@ -427,7 +427,7 @@ private String buildRequestBody( for (String namedGraphUri : namedGraphUris) { bodyContent.append("&named-graph-uri=").append(this.encode(namedGraphUri)); } - } else if (this.queryMethod == EnumRequestMethod.POST_DIRECT) { + } else if (this.requestMethod == EnumRequestMethod.POST_DIRECT) { // Add the query parameter bodyContent.append(query); } @@ -454,11 +454,11 @@ private Response executeRequest(WebTarget webTarget, String bodyContent) { } // Send the request - if (this.queryMethod == EnumRequestMethod.GET) { + if (this.requestMethod == EnumRequestMethod.GET) { response = builder.get(); - } else if (this.queryMethod == EnumRequestMethod.POST_URLENCODED) { + } else if (this.requestMethod == EnumRequestMethod.POST_URLENCODED) { response = builder.post(Entity.entity(bodyContent, MediaType.APPLICATION_FORM_URLENCODED)); - } else if (this.queryMethod == EnumRequestMethod.POST_DIRECT) { + } else if (this.requestMethod == EnumRequestMethod.POST_DIRECT) { response = builder.post(Entity.entity(bodyContent, "application/sparql-query")); } From d15da243860d9dbd42f7e3d171ea52e8bfb8d356 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 12 Apr 2024 15:43:41 +0200 Subject: [PATCH 049/146] Update default for update query to Post_Encoded --- .../fr/inria/corese/command/utils/http/SparqlHttpClient.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java b/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java index d2059c1ef..dd0e51d93 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java @@ -287,8 +287,8 @@ private void validateQuery(String queryString, List defaultGraphUris, Li if (!this.requestMethodIsDefinedByUser) { // Check if the query is an update query. if (query.getAST().isSPARQLUpdate()) { - // If it is an update query, set the request method to POST_DIRECT. - this.requestMethod = EnumRequestMethod.POST_DIRECT; + // If it is an update query, set the request method to POST_Encoded. + this.requestMethod = EnumRequestMethod.POST_URLENCODED; } else { // If the query is not an update query, set the request method to GET. // No need to set it here as GET is already the default value. From cff6b6447d5d81ac8421ee6fc93decbcb816faea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Mon, 15 Apr 2024 15:23:25 +0200 Subject: [PATCH 050/146] Add support for N-Triples format in server --- .../main/java/fr/inria/corese/core/print/ResultFormat.java | 5 +++++ .../java/fr/inria/corese/sparql/triple/parser/Metadata.java | 1 + 2 files changed, 6 insertions(+) diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java index 38ba2a6fa..674ac75e4 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java @@ -34,6 +34,7 @@ public class ResultFormat implements ResultFormatDef { public static final String SPARQL_RESULTS_CSV = "text/csv"; // application/sparql-results+csv"; public static final String SPARQL_RESULTS_TSV = "text/tab-separated-values"; // application/sparql-results+tsv"; public static final String SPARQL_RESULTS_MD = "text/markdown"; + public static final String SPARQL_RESULTS_NT = "application/n-triples"; static final String HEADER = "\n" + "\n" @@ -96,6 +97,7 @@ static void init() { table.put(Metadata.DISPLAY_XML, XML_FORMAT); table.put(Metadata.DISPLAY_JSON, JSON_FORMAT); table.put(Metadata.DISPLAY_MARKDOWN, MARKDOWN_FORMAT); + table.put(Metadata.DISPLAY_NT, NTRIPLES_FORMAT); } @@ -112,6 +114,8 @@ static void initFormat() { defContent(SPARQL_RESULTS_CSV, CSV_FORMAT); defContent(SPARQL_RESULTS_TSV, TSV_FORMAT); defContent(SPARQL_RESULTS_MD, MARKDOWN_FORMAT); + defContent(SPARQL_RESULTS_NT, NTRIPLES_FORMAT); + // Graph defContent(RDF_XML, RDF_XML_FORMAT); defContent(TURTLE_TEXT, TURTLE_FORMAT); @@ -134,6 +138,7 @@ static void initFormat() { format.put("csv", CSV_FORMAT); format.put("tsv", TSV_FORMAT); format.put("markdown", MARKDOWN_FORMAT); + format.put("nt", NTRIPLES_FORMAT); format.put("jsonld", JSONLD_FORMAT); format.put("rdf", TURTLE_FORMAT); diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Metadata.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Metadata.java index 75dc41ba7..c6a3c8aec 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Metadata.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Metadata.java @@ -106,6 +106,7 @@ public class Metadata extends ASTObject public static final String DISPLAY_XML = PREF + "xml"; public static final String DISPLAY_RDF = PREF + "rdf"; public static final String DISPLAY_MARKDOWN = PREF + "markdown"; + public static final String DISPLAY_NT = PREF + "nt"; public static final String RELAX_URI = PREF + "uri"; public static final String RELAX_PROPERTY = PREF + "property"; From aa3f9175123955a9f6e55449cb367a3ba646a889 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Mon, 15 Apr 2024 16:44:57 +0200 Subject: [PATCH 051/146] Add support for N-Quads format in server --- .../main/java/fr/inria/corese/core/print/ResultFormat.java | 5 +++++ .../java/fr/inria/corese/sparql/triple/parser/Metadata.java | 5 +++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java index 674ac75e4..cea36ebab 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java @@ -35,6 +35,8 @@ public class ResultFormat implements ResultFormatDef { public static final String SPARQL_RESULTS_TSV = "text/tab-separated-values"; // application/sparql-results+tsv"; public static final String SPARQL_RESULTS_MD = "text/markdown"; public static final String SPARQL_RESULTS_NT = "application/n-triples"; + public static final String SPARQL_RESULTS_NQ = "application/n-quads"; + public static final String SPARQL_RESULTS_HTML = "application/n-quads"; static final String HEADER = "\n" + "\n" @@ -98,6 +100,7 @@ static void init() { table.put(Metadata.DISPLAY_JSON, JSON_FORMAT); table.put(Metadata.DISPLAY_MARKDOWN, MARKDOWN_FORMAT); table.put(Metadata.DISPLAY_NT, NTRIPLES_FORMAT); + table.put(Metadata.DISPLAY_NQ, NQUADS_FORMAT); } @@ -115,6 +118,7 @@ static void initFormat() { defContent(SPARQL_RESULTS_TSV, TSV_FORMAT); defContent(SPARQL_RESULTS_MD, MARKDOWN_FORMAT); defContent(SPARQL_RESULTS_NT, NTRIPLES_FORMAT); + defContent(SPARQL_RESULTS_NQ, NQUADS_FORMAT); // Graph defContent(RDF_XML, RDF_XML_FORMAT); @@ -139,6 +143,7 @@ static void initFormat() { format.put("tsv", TSV_FORMAT); format.put("markdown", MARKDOWN_FORMAT); format.put("nt", NTRIPLES_FORMAT); + format.put("nq", NQUADS_FORMAT); format.put("jsonld", JSONLD_FORMAT); format.put("rdf", TURTLE_FORMAT); diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Metadata.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Metadata.java index c6a3c8aec..29e9c57e2 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Metadata.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Metadata.java @@ -98,15 +98,16 @@ public class Metadata extends ASTObject public static final int FOCUS = 75; static final String PREF = NSManager.KGRAM; + public static final String DISPLAY_RDF_XML = PREF + "rdfxml"; public static final String DISPLAY_TURTLE = PREF + "turtle"; public static final String DISPLAY_JSON_LD = PREF + "jsonld"; - public static final String DISPLAY_RDF_XML = PREF + "rdfxml"; + public static final String DISPLAY_NT = PREF + "nt"; + public static final String DISPLAY_NQ = PREF + "nq"; public static final String DISPLAY_JSON = PREF + "json"; public static final String DISPLAY_XML = PREF + "xml"; public static final String DISPLAY_RDF = PREF + "rdf"; public static final String DISPLAY_MARKDOWN = PREF + "markdown"; - public static final String DISPLAY_NT = PREF + "nt"; public static final String RELAX_URI = PREF + "uri"; public static final String RELAX_PROPERTY = PREF + "property"; From fc42a30479bb28ad61dc86075517d310fdeabe62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Tue, 16 Apr 2024 11:15:53 +0200 Subject: [PATCH 052/146] Clean resultFormat --- .../fr/inria/corese/core/print/ResultFormat.java | 13 ++++--------- .../inria/corese/sparql/triple/parser/Metadata.java | 3 --- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java index cea36ebab..1631647a5 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java @@ -34,8 +34,6 @@ public class ResultFormat implements ResultFormatDef { public static final String SPARQL_RESULTS_CSV = "text/csv"; // application/sparql-results+csv"; public static final String SPARQL_RESULTS_TSV = "text/tab-separated-values"; // application/sparql-results+tsv"; public static final String SPARQL_RESULTS_MD = "text/markdown"; - public static final String SPARQL_RESULTS_NT = "application/n-triples"; - public static final String SPARQL_RESULTS_NQ = "application/n-quads"; public static final String SPARQL_RESULTS_HTML = "application/n-quads"; static final String HEADER = "\n" @@ -98,9 +96,6 @@ static void init() { table.put(Metadata.DISPLAY_RDF, RDF_FORMAT); table.put(Metadata.DISPLAY_XML, XML_FORMAT); table.put(Metadata.DISPLAY_JSON, JSON_FORMAT); - table.put(Metadata.DISPLAY_MARKDOWN, MARKDOWN_FORMAT); - table.put(Metadata.DISPLAY_NT, NTRIPLES_FORMAT); - table.put(Metadata.DISPLAY_NQ, NQUADS_FORMAT); } @@ -117,14 +112,14 @@ static void initFormat() { defContent(SPARQL_RESULTS_CSV, CSV_FORMAT); defContent(SPARQL_RESULTS_TSV, TSV_FORMAT); defContent(SPARQL_RESULTS_MD, MARKDOWN_FORMAT); - defContent(SPARQL_RESULTS_NT, NTRIPLES_FORMAT); - defContent(SPARQL_RESULTS_NQ, NQUADS_FORMAT); // Graph defContent(RDF_XML, RDF_XML_FORMAT); defContent(TURTLE_TEXT, TURTLE_FORMAT); defContent(TRIG, TRIG_FORMAT); defContent(JSON_LD, JSONLD_FORMAT); + defContent(N_TRIPLES, NTRIPLES_FORMAT); + defContent(N_QUADS, NQUADS_FORMAT); // defContent(JSON, JSON_LD_FORMAT); format.put(TRIG_TEXT, TRIG_FORMAT); @@ -142,14 +137,14 @@ static void initFormat() { format.put("csv", CSV_FORMAT); format.put("tsv", TSV_FORMAT); format.put("markdown", MARKDOWN_FORMAT); - format.put("nt", NTRIPLES_FORMAT); - format.put("nq", NQUADS_FORMAT); format.put("jsonld", JSONLD_FORMAT); format.put("rdf", TURTLE_FORMAT); format.put("turtle", TURTLE_FORMAT); format.put("trig", TRIG_FORMAT); format.put("rdfxml", RDF_XML_FORMAT); + format.put("nt", NTRIPLES_FORMAT); + format.put("nq", NQUADS_FORMAT); } static void defContent(String f, int t) { diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Metadata.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Metadata.java index 29e9c57e2..2b273b2a8 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Metadata.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Metadata.java @@ -101,13 +101,10 @@ public class Metadata extends ASTObject public static final String DISPLAY_RDF_XML = PREF + "rdfxml"; public static final String DISPLAY_TURTLE = PREF + "turtle"; public static final String DISPLAY_JSON_LD = PREF + "jsonld"; - public static final String DISPLAY_NT = PREF + "nt"; - public static final String DISPLAY_NQ = PREF + "nq"; public static final String DISPLAY_JSON = PREF + "json"; public static final String DISPLAY_XML = PREF + "xml"; public static final String DISPLAY_RDF = PREF + "rdf"; - public static final String DISPLAY_MARKDOWN = PREF + "markdown"; public static final String RELAX_URI = PREF + "uri"; public static final String RELAX_PROPERTY = PREF + "property"; From f90f418ce0880fed5b5e134121ee4d639b337dc0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Tue, 16 Apr 2024 14:47:56 +0200 Subject: [PATCH 053/146] Fix Nquads format --- .../src/main/java/fr/inria/corese/core/print/ResultFormat.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java index 1631647a5..d47ea2bf7 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java @@ -447,8 +447,7 @@ String graphToString(Node node) { case NTRIPLES_FORMAT: return NTriplesFormat.create(getGraph()).toString(); case NQUADS_FORMAT: - return TripleFormat.create(getGraph(), true) - .setNbTriple(getNbTriple()).toString(node); + return NQuadsFormat.create(getGraph()).toString(); case TURTLE_FORMAT: default: // e.g. HTML From b9c1beddf2e99acc9709623e3dce96aa8c23f0e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Tue, 16 Apr 2024 14:49:02 +0200 Subject: [PATCH 054/146] Add export options for NTriple and NQuad formats --- .../fr/inria/corese/gui/core/MainFrame.java | 66 ++++++++++++++----- 1 file changed, 49 insertions(+), 17 deletions(-) diff --git a/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java b/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java index c7076cb4b..c71373b97 100755 --- a/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java +++ b/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java @@ -139,9 +139,11 @@ public class MainFrame extends JFrame implements ActionListener { private JMenuItem refresh; private JMenuItem exportRDF; private JMenuItem exportTurtle; - private JMenuItem exportOwl; - private JMenuItem exportJson; private JMenuItem exportTrig; + private JMenuItem exportJson; + private JMenuItem exportNt; + private JMenuItem exportNq; + private JMenuItem exportOwl; private JMenuItem copy; private JMenuItem cut; private JMenuItem paste; @@ -636,17 +638,25 @@ private void initMenu() { exportTurtle.addActionListener(this); exportTurtle.setToolTipText("Export graph in Turtle format"); - exportOwl = new JMenuItem("OWL"); - exportOwl.addActionListener(this); - exportOwl.setToolTipText("Export graph in OWL format"); + exportTrig = new JMenuItem("TriG"); + exportTrig.addActionListener(this); + exportTrig.setToolTipText("Export graph in TriG format"); - exportJson = new JMenuItem("JSON"); + exportJson = new JMenuItem("JsonLD"); exportJson.addActionListener(this); exportJson.setToolTipText("Export graph in JSON format"); - exportTrig = new JMenuItem("TriG"); - exportTrig.addActionListener(this); - exportTrig.setToolTipText("Export graph in TriG format"); + exportNt = new JMenuItem("NTriple"); + exportNt.addActionListener(this); + exportNt.setToolTipText("Export graph in NTriple format"); + + exportNq = new JMenuItem("NQuad"); + exportNq.addActionListener(this); + exportNq.setToolTipText("Export graph in NQuad format"); + + exportOwl = new JMenuItem("OWL"); + exportOwl.addActionListener(this); + exportOwl.setToolTipText("Export graph in OWL format"); execWorkflow = new JMenuItem("Process Workflow"); execWorkflow.addActionListener(this); @@ -823,9 +833,11 @@ private void initMenu() { fileMenu.add(fileMenuSaveGraph); fileMenuSaveGraph.add(exportRDF); fileMenuSaveGraph.add(exportTurtle); - fileMenuSaveGraph.add(exportOwl); - fileMenuSaveGraph.add(exportJson); fileMenuSaveGraph.add(exportTrig); + fileMenuSaveGraph.add(exportJson); + fileMenuSaveGraph.add(exportNt); + fileMenuSaveGraph.add(exportNq); + fileMenuSaveGraph.add(exportOwl); fileMenu.add(saveQuery); fileMenu.add(saveResult); @@ -1316,15 +1328,21 @@ else if (e.getSource() == exportRDF) { } // Exporter le graph au format Turle else if (e.getSource() == exportTurtle) { saveGraph(Transformer.TURTLE); - } // Exporter le graph au format OWL - else if (e.getSource() == exportOwl) { - saveGraph(Transformer.OWL); - } // Exporter le graph au format Json - else if (e.getSource() == exportJson) { - saveGraph(Transformer.JSON); } // Exporter le graph au format TriG else if (e.getSource() == exportTrig) { saveGraph(Transformer.TRIG); + } // Exporter le graph au format Json + else if (e.getSource() == exportJson) { + saveGraph(Transformer.JSON); + } // Exporter le graph au format NTriple + else if (e.getSource() == exportNt) { + saveGraph(ResultFormat.NTRIPLES_FORMAT); + } // Exporter le graph au format NQuad + else if (e.getSource() == exportNq) { + saveGraph(ResultFormat.NQUADS_FORMAT); + } // Exporter le graph au format OWL + else if (e.getSource() == exportOwl) { + saveGraph(Transformer.OWL); } // Charge et exécute une règle directement else if (e.getSource() == loadAndRunRule) { loadRunRule(); @@ -1483,6 +1501,20 @@ void saveGraph(String format) { } } + /** + * Save the graph in the specified format + * + * @param format the format in which the graph will be saved + * (See ResultFormat.java for the list of formats) + */ + void saveGraph(int format) { + Graph graph = myCorese.getGraph(); + + ResultFormat ft = ResultFormat.create(graph, format); + save(ft.toString()); + + } + void saveQuery() { // Créer un JFileChooser JFileChooser filechoose = new JFileChooser(getPath()); From 0ffb1d16e0ebeb60604d6a161d7b8bf14d99258d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Tue, 16 Apr 2024 14:49:13 +0200 Subject: [PATCH 055/146] Add error handling for files without extensions in MainFrame.java --- .../src/main/java/fr/inria/corese/gui/core/MainFrame.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java b/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java index c71373b97..92fbc200f 100755 --- a/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java +++ b/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java @@ -1761,6 +1761,14 @@ void load(boolean wf, boolean exec, boolean run, Filter... filter) { if (!model.contains(lPath) && !wf) { model.addElement(lPath); } + + if (extension(lPath) == null) { + appendMsg("Error: No extension for file: " + lPath + "\n"); + appendMsg("Please select a file with an extension (e.g: .ttl, .rdf, .trig, .jsonld, .html, ...)\n"); + appendMsg("Load is aborted\n"); + return; + } + appendMsg("Loading " + extension(lPath) + " File from path : " + lPath + "\n"); if (wf) { if (exec) { From 04c11cd013c6a19c9db5b30b97efdca581fc06d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Tue, 16 Apr 2024 17:04:03 +0200 Subject: [PATCH 056/146] Add support for saving query results in multiple formats --- .../fr/inria/corese/gui/core/MainFrame.java | 64 ++++++++++++++++--- 1 file changed, 55 insertions(+), 9 deletions(-) diff --git a/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java b/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java index 92fbc200f..e01f6d2d7 100755 --- a/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java +++ b/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java @@ -133,8 +133,13 @@ public class MainFrame extends JFrame implements ActionListener { private JMenuItem loadRule; private JMenuItem loadStyle; private JMenuItem cpTransform, shex; + private JMenu fileMenuSaveResult; private JMenuItem saveQuery; - private JMenuItem saveResult; + private JMenuItem saveResultXml; + private JMenuItem saveResultJson; + private JMenuItem saveResultCsv; + private JMenuItem saveResultTsv; + private JMenuItem saveResultMarkdown; private JMenuItem loadAndRunRule; private JMenuItem refresh; private JMenuItem exportRDF; @@ -353,7 +358,7 @@ public void stateChanged(ChangeEvent changeEvent) { duplicateFrom.setEnabled(true); comment.setEnabled(true); saveQuery.setEnabled(true); - saveResult.setEnabled(true); + fileMenuSaveResult.setEnabled(true); MyJPanelQuery temp = (MyJPanelQuery) getConteneurOnglets().getComponentAt(selected); @@ -372,7 +377,7 @@ public void stateChanged(ChangeEvent changeEvent) { duplicateFrom.setEnabled(false); comment.setEnabled(false); saveQuery.setEnabled(false); - saveResult.setEnabled(false); + fileMenuSaveResult.setEnabled(false); } // Si l'onglet sélectionné est le "+" on crée un nouvel onglet Query if (c == plus) { @@ -670,8 +675,20 @@ private void initMenu() { saveQuery = new JMenuItem("Save Query"); saveQuery.addActionListener(this); - saveResult = new JMenuItem("Save Result"); - saveResult.addActionListener(this); + saveResultXml = new JMenuItem("XML"); + saveResultXml.addActionListener(this); + + saveResultJson = new JMenuItem("JSON"); + saveResultJson.addActionListener(this); + + saveResultCsv = new JMenuItem("CSV"); + saveResultCsv.addActionListener(this); + + saveResultTsv = new JMenuItem("TSV"); + saveResultTsv.addActionListener(this); + + saveResultMarkdown = new JMenuItem("Markdown"); + saveResultMarkdown.addActionListener(this); itable = new HashMap<>(); @@ -808,6 +825,7 @@ private void initMenu() { JMenu fileMenuLoad = new JMenu("Load"); JMenu fileMenuSaveGraph = new JMenu("Save Graph"); + fileMenuSaveResult = new JMenu("Save Result"); // On ajoute tout au menu fileMenu.add(fileMenuLoad); @@ -840,7 +858,13 @@ private void initMenu() { fileMenuSaveGraph.add(exportOwl); fileMenu.add(saveQuery); - fileMenu.add(saveResult); + + fileMenu.add(fileMenuSaveResult); + fileMenuSaveResult.add(saveResultXml); + fileMenuSaveResult.add(saveResultJson); + fileMenuSaveResult.add(saveResultCsv); + fileMenuSaveResult.add(saveResultTsv); + fileMenuSaveResult.add(saveResultMarkdown); queryMenu.add(iselect); queryMenu.add(iconstruct); @@ -1162,7 +1186,7 @@ public void actionPerformed(ActionEvent l_Event) { duplicateFrom.setEnabled(false); comment.setEnabled(false); saveQuery.setEnabled(false); - saveResult.setEnabled(false); + fileMenuSaveResult.setEnabled(false); } } @@ -1320,8 +1344,20 @@ else if (e.getSource() == saveQuery) { String style = loadText(); defaultStylesheet = style; } // Sauvegarde le résultat sous forme XML dans un fichier texte - else if (e.getSource() == saveResult) { - save(current.getTextAreaXMLResult().getText()); + else if (e.getSource() == saveResultXml) { + saveResult(ResultFormat.XML_FORMAT); + } // Sauvegarde le résultat sous forme JSON dans un fichier texte + else if (e.getSource() == saveResultJson) { + saveResult(ResultFormat.JSON_FORMAT); + } // Sauvegarde le résultat sous forme CSV dans un fichier texte + else if (e.getSource() == saveResultCsv) { + saveResult(ResultFormat.CSV_FORMAT); + } // Sauvegarde le résultat sous forme TSV dans un fichier texte + else if (e.getSource() == saveResultTsv) { + saveResult(ResultFormat.TSV_FORMAT); + } // Sauvegarde le résultat sous forme Markdown dans un fichier texte + else if (e.getSource() == saveResultMarkdown) { + saveResult(ResultFormat.MARKDOWN_FORMAT); } // Exporter le graph au format RDF/XML else if (e.getSource() == exportRDF) { saveGraph(Transformer.RDFXML); @@ -1512,7 +1548,17 @@ void saveGraph(int format) { ResultFormat ft = ResultFormat.create(graph, format); save(ft.toString()); + } + /** + * Save the result of a query in the specified format + * + * @param format the format in which the result will be saved + * (See ResultFormat.java for the list of formats) + */ + void saveResult(int format) { + ResultFormat ft = ResultFormat.create(current.getMappings(), format); + save(ft.toString()); } void saveQuery() { From 6d0be84bfbc58e6059f4d4e011070727abca59c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Wed, 17 Apr 2024 11:18:18 +0200 Subject: [PATCH 057/146] Add support for saving query results in RDFC-1.0 canonic format --- .../resources/data/corese/property.properties | 2 +- .../fr/inria/corese/gui/core/MainFrame.java | 34 +++++++++++++++++++ .../inria/corese/gui/query/GraphEngine.java | 2 +- 3 files changed, 36 insertions(+), 2 deletions(-) diff --git a/corese-core/src/main/resources/data/corese/property.properties b/corese-core/src/main/resources/data/corese/property.properties index 07f9ffa15..a695af2d2 100644 --- a/corese-core/src/main/resources/data/corese/property.properties +++ b/corese-core/src/main/resources/data/corese/property.properties @@ -69,7 +69,7 @@ GRAPH_NODE_AS_DATATYPE = false EXTERNAL_NAMED_GRAPH = true -# load in kg:default or in file path as named graph +# load in kg:default LOAD_IN_DEFAULT_GRAPH = true # skolemize bnode as URI diff --git a/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java b/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java index e01f6d2d7..372a668e5 100755 --- a/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java +++ b/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java @@ -71,7 +71,9 @@ import fr.inria.corese.core.load.LoadException; import fr.inria.corese.core.load.QueryLoad; import fr.inria.corese.core.load.result.SPARQLResultParser; +import fr.inria.corese.core.print.CanonicalRdf10Format; import fr.inria.corese.core.print.ResultFormat; +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; import fr.inria.corese.core.query.QueryProcess; import fr.inria.corese.core.rule.RuleEngine; import fr.inria.corese.core.transform.TemplatePrinter; @@ -149,6 +151,9 @@ public class MainFrame extends JFrame implements ActionListener { private JMenuItem exportNt; private JMenuItem exportNq; private JMenuItem exportOwl; + private JMenu exportCanonic; + private JMenuItem saveRDFC_1_0_sha256; + private JMenuItem saveRDFC_1_1_sha384; private JMenuItem copy; private JMenuItem cut; private JMenuItem paste; @@ -663,6 +668,15 @@ private void initMenu() { exportOwl.addActionListener(this); exportOwl.setToolTipText("Export graph in OWL format"); + exportCanonic = new JMenu("Canonic"); + exportCanonic.addActionListener(this); + + saveRDFC_1_0_sha256 = new JMenuItem("RDFC-1.0 (sha256)"); + saveRDFC_1_0_sha256.addActionListener(this); + + saveRDFC_1_1_sha384 = new JMenuItem("RDFC-1.0 (sha384)"); + saveRDFC_1_1_sha384.addActionListener(this); + execWorkflow = new JMenuItem("Process Workflow"); execWorkflow.addActionListener(this); @@ -856,6 +870,9 @@ private void initMenu() { fileMenuSaveGraph.add(exportNt); fileMenuSaveGraph.add(exportNq); fileMenuSaveGraph.add(exportOwl); + fileMenuSaveGraph.add(exportCanonic); + exportCanonic.add(saveRDFC_1_0_sha256); + exportCanonic.add(saveRDFC_1_1_sha384); fileMenu.add(saveQuery); @@ -1379,6 +1396,12 @@ else if (e.getSource() == exportNq) { } // Exporter le graph au format OWL else if (e.getSource() == exportOwl) { saveGraph(Transformer.OWL); + } // Exporter le graph au format RDFC-1.0 (sha256) + else if (e.getSource() == saveRDFC_1_0_sha256) { + saveGraphCanonic(HashAlgorithm.SHA_256); + } // Exporter le graph au format RDFC-1.0 (sha384) + else if (e.getSource() == saveRDFC_1_1_sha384) { + saveGraphCanonic(HashAlgorithm.SHA_384); } // Charge et exécute une règle directement else if (e.getSource() == loadAndRunRule) { loadRunRule(); @@ -1537,6 +1560,17 @@ void saveGraph(String format) { } } + /** + * Save the graph in canonic format with the specified algorithm + * + * @param format the format in which the graph will be saved + */ + void saveGraphCanonic(HashAlgorithm algo) { + Graph graph = myCorese.getGraph(); + CanonicalRdf10Format transformer = new CanonicalRdf10Format(graph, algo); + save(transformer.toString()); + } + /** * Save the graph in the specified format * diff --git a/corese-gui/src/main/java/fr/inria/corese/gui/query/GraphEngine.java b/corese-gui/src/main/java/fr/inria/corese/gui/query/GraphEngine.java index d51a59eec..09fab0e73 100644 --- a/corese-gui/src/main/java/fr/inria/corese/gui/query/GraphEngine.java +++ b/corese-gui/src/main/java/fr/inria/corese/gui/query/GraphEngine.java @@ -240,7 +240,7 @@ public Load loader() { public void load(String path) throws EngineException, LoadException { Load ld = loader(); - ld.parse(path); + ld.parse(path, ld.defaultGraph()); // in case of load rule if (ld.getRuleEngine() != null) { setRuleEngine(ld.getRuleEngine()); From bc5e698069f052bbe4a64aebcc8102400e686feb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Wed, 17 Apr 2024 16:27:05 +0200 Subject: [PATCH 058/146] Add canonical command --- .../java/fr/inria/corese/command/App.java | 3 +- .../corese/command/programs/Canonical.java | 176 ++++++++++++++++++ .../command/utils/format/EnumCanonicAlgo.java | 62 ++++++ .../utils/format/EnumOutputFormat.java | 4 +- .../utils/rdf/RdfDataCanonicalizer.java | 104 +++++++++++ .../command/utils/rdf/RdfDataExporter.java | 4 - 6 files changed, 345 insertions(+), 8 deletions(-) create mode 100644 corese-command/src/main/java/fr/inria/corese/command/programs/Canonical.java create mode 100644 corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumCanonicAlgo.java create mode 100644 corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataCanonicalizer.java diff --git a/corese-command/src/main/java/fr/inria/corese/command/App.java b/corese-command/src/main/java/fr/inria/corese/command/App.java index 2402abaf9..441efc9c1 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/App.java +++ b/corese-command/src/main/java/fr/inria/corese/command/App.java @@ -1,5 +1,6 @@ package fr.inria.corese.command; +import fr.inria.corese.command.programs.Canonical; import fr.inria.corese.command.programs.Convert; import fr.inria.corese.command.programs.RemoteSparql; import fr.inria.corese.command.programs.Shacl; @@ -8,7 +9,7 @@ import picocli.CommandLine.Command; @Command(name = "Corese-command", version = App.version, mixinStandardHelpOptions = true, subcommands = { - Convert.class, Sparql.class, Shacl.class, RemoteSparql.class + Convert.class, Sparql.class, Shacl.class, RemoteSparql.class, Canonical.class }) public final class App implements Runnable { diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/Canonical.java b/corese-command/src/main/java/fr/inria/corese/command/programs/Canonical.java new file mode 100644 index 000000000..4345f7300 --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/Canonical.java @@ -0,0 +1,176 @@ +package fr.inria.corese.command.programs; + +import java.io.IOException; +import java.net.URL; +import java.nio.file.Path; +import java.util.Optional; +import java.util.concurrent.Callable; + +import fr.inria.corese.command.App; +import fr.inria.corese.command.utils.ConfigManager; +import fr.inria.corese.command.utils.ConvertString; +import fr.inria.corese.command.utils.format.EnumCanonicAlgo; +import fr.inria.corese.command.utils.format.EnumInputFormat; +import fr.inria.corese.command.utils.rdf.RdfDataCanonicalizer; +import fr.inria.corese.command.utils.rdf.RdfDataLoader; +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.util.Property; +import fr.inria.corese.core.util.Property.Value; +import picocli.CommandLine.Command; +import picocli.CommandLine.Model.CommandSpec; +import picocli.CommandLine.Option; +import picocli.CommandLine.Spec; + +@Command(name = "canonicalize", version = App.version, description = "Canonicalize an RDF file to a specific format.", mixinStandardHelpOptions = true) +public class Canonical implements Callable { + + private final String DEFAULT_OUTPUT_FILE_NAME = "output"; + private final int ERROR_EXIT_CODE_SUCCESS = 0; + private final int ERROR_EXIT_CODE_ERROR = 1; + + @Spec + CommandSpec spec; + + @Option(names = { "-i", "--input-data" }, description = "Path or URL of the file that needs to be canonicalized.") + private String input; + + @Option(names = { "-f", "-if", + "--input-format" }, description = "RDF serialization format of the input file. Possible values:\u001b[34m ${COMPLETION-CANDIDATES}\u001b[0m.") + private EnumInputFormat inputFormat = null; + + @Option(names = { "-o", + "--output-data" }, description = "Output file path. If not provided, the result will be written to standard output.", arity = "0..1", fallbackValue = DEFAULT_OUTPUT_FILE_NAME) + private Path output; + + @Option(names = { "-r", "-a", "-ca", "-of", + "--canonical-algo" }, required = true, description = "Canonicalization algorithm to use. Possible values:\u001b[34m ${COMPLETION-CANDIDATES}\u001b[0m.") + private EnumCanonicAlgo canonicalAlgo; + + @Option(names = { "-v", + "--verbose" }, description = "Prints more information about the execution of the command.") + private boolean verbose = false; + + @Option(names = { "-c", "--config", + "--init" }, description = "Path to a configuration file. If not provided, the default configuration file will be used.", required = false) + private Path configFilePath; + + @Option(names = { "-w", + "--no-owl-import" }, description = "Disables the automatic importation of ontologies specified in 'owl:imports' statements. When this flag is set, the application will not fetch and include referenced ontologies.", required = false, defaultValue = "false") + private boolean noOwlImport; + + private Graph graph = Graph.create(); + + private boolean canonicalAlgoIsDefined = false; + private boolean isDefaultOutputName = false; + + public Canonical() { + } + + @Override + public Integer call() { + + try { + + // Load configuration file + Optional configFilePath = Optional.ofNullable(this.configFilePath); + if (configFilePath.isPresent()) { + ConfigManager.loadFromFile(configFilePath.get(), this.spec, this.verbose); + } else { + ConfigManager.loadDefaultConfig(this.spec, this.verbose); + } + + // Set owl import + Property.set(Value.DISABLE_OWL_AUTO_IMPORT, this.noOwlImport); + + // Check if canonical algorithm is defined + this.canonicalAlgoIsDefined = this.output != null; + + // Check if output file name is default + this.isDefaultOutputName = this.output != null + && DEFAULT_OUTPUT_FILE_NAME.equals(this.output.toString()); + + // Execute command + this.checkInputValues(); + this.loadInputFile(); + this.exportGraph(); + + return this.ERROR_EXIT_CODE_SUCCESS; + } catch (IllegalArgumentException | IOException e) { + this.spec.commandLine().getErr().println("\u001B[31mError: " + e.getMessage() + "\u001B[0m"); + return this.ERROR_EXIT_CODE_ERROR; + } + } + + /** + * Check if the input values are correct. + * + * @throws IllegalArgumentException if input path is same as output path. + */ + private void checkInputValues() throws IllegalArgumentException { + if (this.input != null + && this.output != null + && this.input.equals(this.output.toString())) { + throw new IllegalArgumentException("Input path cannot be the same as output path."); + } + } + + /** + * Load the input file. + * + * @throws IllegalArgumentException if the input format is not supported. + * @throws IOException if an I/O error occurs while loading the + * input file. + */ + private void loadInputFile() throws IllegalArgumentException, IOException { + Optional url = ConvertString.toUrl(this.input); + Optional path = ConvertString.toPath(this.input); + + if (input == null) { + // if input is not provided, load from standard input + RdfDataLoader.LoadFromStdin(this.inputFormat, this.graph, this.spec, this.verbose); + } else if (url.isPresent()) { + // if input is a URL, load from the given URL + RdfDataLoader.loadFromURL(url.get(), this.inputFormat, this.graph, this.spec, this.verbose); + } else if (path.isPresent()) { + // if input is provided, load from the given file + RdfDataLoader.loadFromFile(path.get(), this.inputFormat, this.graph, this.spec, this.verbose); + } else { + throw new IllegalArgumentException("Input path is not a valid URL or file path: " + this.input); + } + } + + /** + * Canonicalize the graph. + * + * @throws IOException if an I/O error occurs while exporting the graph. + */ + private void exportGraph() throws IOException { + + if (this.verbose) { + this.spec.commandLine().getOut() + .println("Canonicalizing file with " + this.canonicalAlgo + " algorithm..."); + } + + Path outputFileName; + + // Set output file name + if (this.canonicalAlgoIsDefined && !this.isDefaultOutputName) { + outputFileName = this.output; + } else { + outputFileName = Path.of(this.DEFAULT_OUTPUT_FILE_NAME + "." + this.canonicalAlgo.getExtention()); + } + + // Export the graph + if (this.output == null) { + RdfDataCanonicalizer.canonicalizeToStdout(this.canonicalAlgo, this.graph, this.spec, this.verbose); + } else { + RdfDataCanonicalizer.canonicalizeToFile( + outputFileName, + this.canonicalAlgo, + this.graph, + this.spec, + this.verbose); + } + } + +} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumCanonicAlgo.java b/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumCanonicAlgo.java new file mode 100644 index 000000000..f991a06bd --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumCanonicAlgo.java @@ -0,0 +1,62 @@ +package fr.inria.corese.command.utils.format; + +/** + * Enumeration of exportable RDF serialization formats. + */ +public enum EnumCanonicAlgo { + RDFC10(1, "rdfc-1.0", "nq"), + RDFC10SHA256(1, "rdfc-1.0-sha256", "nq"), + + RDFC10SHA384(2, "rdfc-1.0-sha384", "nq"); + + private final int value; + private final String name; + private final String extention; + + /** + * Constructor. + * + * @param value The value of the enum. + * @param name The name of the enum. + * @param extention The extension of the format. + */ + private EnumCanonicAlgo(int value, String name, String extention) { + this.value = value; + this.name = name; + this.extention = extention; + } + + /** + * Get the value of the enum. + * + * @return The value of the enum. + */ + public int getValue() { + return this.value; + } + + /** + * Get the name of the canonic algorithm. + * + * @return The name of the canonic algorithm. + */ + public String getName() { + return this.name; + } + + /** + * Get the extension of the file format associated with the canonic algorithm. + * + * @return The extension of the file format associated with the canonic + * algorithm. + */ + public String getExtention() { + return this.extention; + } + + @Override + public String toString() { + return this.name; + } + +} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java b/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java index 8e57b4b4b..6a5b29cf8 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java @@ -24,9 +24,7 @@ public enum EnumOutputFormat { NQUADS(7, "nquads", "nq"), NQ(7, "nq", "nq"), - APPLICATION_NQUADS(7, "application/n-quads", "nq"), - - CANONICAL10(8, "canonical", "nq"); + APPLICATION_NQUADS(7, "application/n-quads", "nq"); private final int value; private final String name; diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataCanonicalizer.java b/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataCanonicalizer.java new file mode 100644 index 000000000..2da538b41 --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataCanonicalizer.java @@ -0,0 +1,104 @@ +package fr.inria.corese.command.utils.rdf; + +import java.io.FileOutputStream; +import java.io.OutputStream; +import java.nio.file.Path; + +import fr.inria.corese.command.utils.format.EnumCanonicAlgo; +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.print.CanonicalRdf10Format; +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; +import picocli.CommandLine.Model.CommandSpec; + +/** + * Utility class to canonicalize RDF data from a Corese Graph. + */ +public class RdfDataCanonicalizer { + + /** + * Canonicalize RDF data from a Corese Graph and write it to a file. + * + * @param path Path of the file to canonicalize to. + * @param canonicAlgo Canonicalization algorithm to use. + * @param graph Corese Graph to canonicalize RDF data from. + * @param spec Command specification. + * @param verbose If true, print information about the exported file. + */ + public static void canonicalizeToFile( + Path path, + EnumCanonicAlgo canonicAlgo, + Graph graph, + CommandSpec spec, + boolean verbose) { + + OutputStream outputStream; + + try { + outputStream = new FileOutputStream(path.toString()); + } catch (Exception e) { + throw new IllegalArgumentException("Failed to open export file: " + path.toString(), e); + } + + canonicalizeToOutputStream(outputStream, canonicAlgo, graph); + + if (verbose) { + spec.commandLine().getErr().println("Canonicalized RDF data with format: " + canonicAlgo); + spec.commandLine().getErr().println("Canonicalized RDF data to file: " + path.toString()); + } + } + + /** + * Canonicalize RDF data from a Corese Graph and write it to standard output. + * + * @param canonicAlgo Canonicalization algorithm to use. + * @param graph Corese Graph to canonicalize RDF data from. + * @param spec Command specification. + * @param verbose If true, print information about the exported file. + */ + public static void canonicalizeToStdout( + EnumCanonicAlgo canonicAlgo, + Graph graph, + CommandSpec spec, + boolean verbose) { + + canonicalizeToOutputStream(System.out, canonicAlgo, graph); + + if (verbose) { + spec.commandLine().getErr().println("Canonicalized RDF data with format: " + canonicAlgo); + spec.commandLine().getErr().println("Canonicalized RDF data to: standard output"); + } + } + + /** + * Canonicalize RDF data from a Corese Graph and write it to an output stream. + * + * @param outputStream Output stream to write the canonicalized RDF data to. + * @param canonicAlgo Canonicalization algorithm to use. + * @param graph Corese Graph to canonicalize RDF data from. + */ + private static void canonicalizeToOutputStream( + OutputStream outputStream, + EnumCanonicAlgo canonicAlgo, + Graph graph) { + + try { + switch (canonicAlgo) { + case RDFC10: + case RDFC10SHA256: + CanonicalRdf10Format.create(graph, HashAlgorithm.SHA_256).write(outputStream); + break; + case RDFC10SHA384: + CanonicalRdf10Format.create(graph, HashAlgorithm.SHA_384).write(outputStream); + break; + default: + throw new IllegalArgumentException("Unsupported output format: " + canonicAlgo); + } + + outputStream.flush(); + + } catch (Exception e) { + throw new IllegalArgumentException("Failed to write to RDF data to output stream", e); + } + } + +} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java b/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java index f12cfff62..5fdc22a16 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java @@ -6,7 +6,6 @@ import fr.inria.corese.command.utils.format.EnumOutputFormat; import fr.inria.corese.core.Graph; -import fr.inria.corese.core.print.CanonicalRdf10Format; import fr.inria.corese.core.print.JSONLDFormat; import fr.inria.corese.core.print.NQuadsFormat; import fr.inria.corese.core.print.NTriplesFormat; @@ -115,9 +114,6 @@ private static void exportToOutputStream( case APPLICATION_NQUADS: NQuadsFormat.create(graph).write(outputStream); break; - case CANONICAL10: - CanonicalRdf10Format.create(graph).write(outputStream); - break; default: throw new IllegalArgumentException("Unsupported output format: " + outputFormat); } From 574174121064fdc21900d96351edc9408910fab9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Thu, 18 Apr 2024 11:04:46 +0200 Subject: [PATCH 059/146] Add documentation for Corese-Command canonicalise --- .../Getting Started With Corese-command.md | 140 ++++++++++++++++-- 1 file changed, 128 insertions(+), 12 deletions(-) diff --git a/docs/getting started/Getting Started With Corese-command.md b/docs/getting started/Getting Started With Corese-command.md index 039edc3d2..0ca052f1f 100644 --- a/docs/getting started/Getting Started With Corese-command.md +++ b/docs/getting started/Getting Started With Corese-command.md @@ -39,12 +39,17 @@ Designed to simplify and streamline tasks related to querying, converting, and v 1. [6.7.1. Custom HTTP Headers](#671-custom-http-headers) 2. [6.7.2. Redirection Limit](#672-redirection-limit) 3. [6.7.3. Query Validation](#673-query-validation) - 7. [7. General Options](#7-general-options) - 1. [7.1. Configuration file](#71-configuration-file) - 2. [7.2. Verbose](#72-verbose) - 3. [7.3. Version](#73-version) - 4. [7.4. Get Help](#74-get-help) - 5. [7.5. Disabling OWL Auto Import](#75-disabling-owl-auto-import) + 7. [7. `canonicalize` Command](#7-canonicalize-command) + 1. [7.1. Basic Usage](#71-basic-usage) + 2. [7.2. Different Types of Input](#72-different-types-of-input) + 3. [7.3. Different Types of Output](#73-different-types-of-output) + 4. [7.4. Canonicalization Algorithms](#74-canonicalization-algorithms) + 8. [8. General Options](#8-general-options) + 1. [8.1. Configuration file](#81-configuration-file) + 2. [8.2. Verbose](#82-verbose) + 3. [8.3. Version](#83-version) + 4. [8.4. Get Help](#84-get-help) + 5. [8.5. Disabling OWL Auto Import](#85-disabling-owl-auto-import) ## 2. Installation @@ -578,11 +583,122 @@ corese-command remote-sparql -q 'SELECT * WHERE {?s ?p ?o}' -e "http://example.o This option is useful when you want to send a query that is not valid according to the SPARQL grammar, but is still accepted by the SPARQL endpoint. -## 7. General Options + + +## 7. `canonicalize` Command + +The `canonicalize` command allows you to apply a specific canonicalization algorithm to RDF files. + +### 7.1. Basic Usage + +Use the following syntax to canonicalize an RDF file using the SHA-256 algorithm under the RDFC 1.0 specification: + +```shell +corese-command canonicalize -i myData.ttl -r rdfc-1.0-sha256 +``` + +This example canonicalizes `myData.ttl` to the `rdfc-1.0-sha256` (See [RDFC1.0](https://www.w3.org/TR/rdf-canon/)) canonical algorithm. The `-i` flag specifies the input file, and the `-r` flag specifies the canonical algorithm. + +### 7.2. Different Types of Input + +The input can be provided in different ways: + +- **File Input:** The input file can be specified with the `-i` flag: + +```shell +corese-command canonicalize -i myData.ttl -r rdfc-1.0-sha256 +``` + +- **URL Input:** URLs can be specified with the `-i` flag: + +```shell +corese-command canonicalize -i 'http://example.org/myData.ttl' -r rdfc-1.0-sha256 +``` + +- **Standard Input:** If no input file is specified with `-i`, the program uses the standard input: + +```shell +cat myData.ttl | corese-command canonicalize -r rdfc-1.0-sha256 -if turtle +``` + +> The input file format is automatically detected for file and URL inputs. If +> the input is provided on the standard input or you want to force the input +> format, you can use the `-f` or `-if` flag. Possible values are: +> +> - `rdfxml`, `rdf` or `application/rdf+xml` +> - `turtle`, `ttl` or `text/turtle` +> - `trig`, `application/trig` +> - `jsonld`, `application/ld+json` +> - `ntriples`, `nt` or `application/n-triples` +> - `nquads`, `nq`, or `application/n-quads` +> - `rdfa`, `html` or `application/xhtml+xml` + +### 7.3. Different Types of Output + +The output can be provided in different ways: + +- **File Output:** The output file can be specified with the `-o` flag: + +```shell +corese-command canonicalize -i myData.ttl -r rdfc-1.0-sha256 -o myResult.ttl +``` + +- **Standard Output:** If no output file is specified with `-o`, the program uses the standard output: + +```shell +corese-command canonicalize -i myData.ttl -r rdfc-1.0-sha256 | other-command +``` + +### 7.4. Canonicalization Algorithms + +The following canonicalization algorithms are available: + +- [RDFC 1.0](https://www.w3.org/TR/rdf-canon/) with SHA-256. +- [RDFC 1.0](https://www.w3.org/TR/rdf-canon/) with SHA-384. + +> The output file format can be specified with the `-r` flag. Possible values are: +> +> - `rdfc-1.0` or `rdfc-1.0-sha256` for [RDFC 1.0](https://www.w3.org/TR/rdf-canon/) with SHA-256 +> - `rdfc-1.0-sha384` for [RDFC 1.0](https://www.w3.org/TR/rdf-canon/) with SHA-384 + +## 8. General Options General options are available for all commands. -### 7.1. Configuration file +### 8.1. Configuration file All interface of Corese (Gui, Server, Command) can be configured with a configuration file. The configuration file is a property file (See a example on [GitHub](https://github.com/Wimmics/corese/blob/master/corese-core/src/main/resources/data/corese/property.properties)). @@ -598,7 +714,7 @@ For exampample, you can disable the auto import of owl with the following proper DISABLE_OWL_AUTO_IMPORT = true ``` -### 7.2. Verbose +### 8.2. Verbose The `-v` flag allows you to get more information about the execution of the command. @@ -606,11 +722,11 @@ The `-v` flag allows you to get more information about the execution of the comm corese-command sparql -q 'SELECT * WHERE {?s ?p ?o}' -i myData.ttl -v ``` -### 7.3. Version +### 8.3. Version The `-V` flag allows you to get the version of the command. -### 7.4. Get Help +### 8.4. Get Help For any command, you can use the `-h` or `--help` flag to get a description and the syntax. This is also available for the general `corese-command` and each specific sub-command. @@ -621,7 +737,7 @@ corese-command convert -h corese-command shacl -h ``` -### 7.5. Disabling OWL Auto Import +### 8.5. Disabling OWL Auto Import Corese-Command is configured to automatically import the vocabulary referenced in `owl:imports` statements by default. However, this behavior can be turned off by using the `-w` or `--no-owl-import` flag. From 79864d0e79194503e484c1b0ca22209e0ba36615 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Thu, 18 Apr 2024 11:47:53 +0200 Subject: [PATCH 060/146] Add support of multiple input in convert and canonicalize --- .../java/fr/inria/corese/command/App.java | 4 +- .../corese/command/programs/Convert.java | 39 ++++++++++------- .../{Canonical.java => canonicalize.java} | 43 +++++++++++-------- 3 files changed, 50 insertions(+), 36 deletions(-) rename corese-command/src/main/java/fr/inria/corese/command/programs/{Canonical.java => canonicalize.java} (80%) diff --git a/corese-command/src/main/java/fr/inria/corese/command/App.java b/corese-command/src/main/java/fr/inria/corese/command/App.java index 441efc9c1..f646ba033 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/App.java +++ b/corese-command/src/main/java/fr/inria/corese/command/App.java @@ -1,6 +1,6 @@ package fr.inria.corese.command; -import fr.inria.corese.command.programs.Canonical; +import fr.inria.corese.command.programs.canonicalize; import fr.inria.corese.command.programs.Convert; import fr.inria.corese.command.programs.RemoteSparql; import fr.inria.corese.command.programs.Shacl; @@ -9,7 +9,7 @@ import picocli.CommandLine.Command; @Command(name = "Corese-command", version = App.version, mixinStandardHelpOptions = true, subcommands = { - Convert.class, Sparql.class, Shacl.class, RemoteSparql.class, Canonical.class + Convert.class, Sparql.class, Shacl.class, RemoteSparql.class, canonicalize.class }) public final class App implements Runnable { diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java b/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java index ce64a12a5..4a205bbdf 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java @@ -31,8 +31,9 @@ public class Convert implements Callable { @Spec CommandSpec spec; - @Option(names = { "-i", "--input-data" }, description = "Path or URL of the file that needs to be converted.") - private String input; + @Option(names = { "-i", + "--input-data" }, description = "Path or URL of the file that needs to be converted.", arity = "1...") + private String[] rdfData; @Option(names = { "-f", "-if", "--input-format" }, description = "RDF serialization format of the input file. Possible values:\u001b[34m ${COMPLETION-CANDIDATES}\u001b[0m.") @@ -107,10 +108,12 @@ public Integer call() { * @throws IllegalArgumentException if input path is same as output path. */ private void checkInputValues() throws IllegalArgumentException { - if (this.input != null - && this.output != null - && this.input.equals(this.output.toString())) { - throw new IllegalArgumentException("Input path cannot be the same as output path."); + if (this.rdfData != null && this.output != null) { + for (String input : this.rdfData) { + if (Path.of(input).compareTo(this.output) == 0) { + throw new IllegalArgumentException("Input path is same as output path: " + input); + } + } } } @@ -122,20 +125,24 @@ private void checkInputValues() throws IllegalArgumentException { * input file. */ private void loadInputFile() throws IllegalArgumentException, IOException { - Optional url = ConvertString.toUrl(this.input); - Optional path = ConvertString.toPath(this.input); - if (input == null) { + if (rdfData == null) { // if input is not provided, load from standard input RdfDataLoader.LoadFromStdin(this.inputFormat, this.graph, this.spec, this.verbose); - } else if (url.isPresent()) { - // if input is a URL, load from the given URL - RdfDataLoader.loadFromURL(url.get(), this.inputFormat, this.graph, this.spec, this.verbose); - } else if (path.isPresent()) { - // if input is provided, load from the given file - RdfDataLoader.loadFromFile(path.get(), this.inputFormat, this.graph, this.spec, this.verbose); } else { - throw new IllegalArgumentException("Input path is not a valid URL or file path: " + this.input); + for (String input : this.rdfData) { + Optional url = ConvertString.toUrl(input); + Optional path = ConvertString.toPath(input); + if (url.isPresent()) { + // if input is a URL, load from the given URL + RdfDataLoader.loadFromURL(url.get(), this.inputFormat, this.graph, this.spec, this.verbose); + } else if (path.isPresent()) { + // if input is provided, load from the given file + RdfDataLoader.loadFromFile(path.get(), this.inputFormat, this.graph, this.spec, this.verbose); + } else { + throw new IllegalArgumentException("Input path is not a valid URL or file path: " + input); + } + } } } diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/Canonical.java b/corese-command/src/main/java/fr/inria/corese/command/programs/canonicalize.java similarity index 80% rename from corese-command/src/main/java/fr/inria/corese/command/programs/Canonical.java rename to corese-command/src/main/java/fr/inria/corese/command/programs/canonicalize.java index 4345f7300..f56064a36 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/Canonical.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/canonicalize.java @@ -22,7 +22,7 @@ import picocli.CommandLine.Spec; @Command(name = "canonicalize", version = App.version, description = "Canonicalize an RDF file to a specific format.", mixinStandardHelpOptions = true) -public class Canonical implements Callable { +public class canonicalize implements Callable { private final String DEFAULT_OUTPUT_FILE_NAME = "output"; private final int ERROR_EXIT_CODE_SUCCESS = 0; @@ -31,8 +31,9 @@ public class Canonical implements Callable { @Spec CommandSpec spec; - @Option(names = { "-i", "--input-data" }, description = "Path or URL of the file that needs to be canonicalized.") - private String input; + @Option(names = { "-i", + "--input-data" }, description = "Path or URL of the file that needs to be canonicalized.", arity = "1...") + private String[] rdfData; @Option(names = { "-f", "-if", "--input-format" }, description = "RDF serialization format of the input file. Possible values:\u001b[34m ${COMPLETION-CANDIDATES}\u001b[0m.") @@ -63,7 +64,7 @@ public class Canonical implements Callable { private boolean canonicalAlgoIsDefined = false; private boolean isDefaultOutputName = false; - public Canonical() { + public canonicalize() { } @Override @@ -107,10 +108,12 @@ public Integer call() { * @throws IllegalArgumentException if input path is same as output path. */ private void checkInputValues() throws IllegalArgumentException { - if (this.input != null - && this.output != null - && this.input.equals(this.output.toString())) { - throw new IllegalArgumentException("Input path cannot be the same as output path."); + if (this.rdfData != null && this.output != null) { + for (String input : this.rdfData) { + if (Path.of(input).compareTo(this.output) == 0) { + throw new IllegalArgumentException("Input path is same as output path: " + input); + } + } } } @@ -122,20 +125,24 @@ private void checkInputValues() throws IllegalArgumentException { * input file. */ private void loadInputFile() throws IllegalArgumentException, IOException { - Optional url = ConvertString.toUrl(this.input); - Optional path = ConvertString.toPath(this.input); - if (input == null) { + if (rdfData == null) { // if input is not provided, load from standard input RdfDataLoader.LoadFromStdin(this.inputFormat, this.graph, this.spec, this.verbose); - } else if (url.isPresent()) { - // if input is a URL, load from the given URL - RdfDataLoader.loadFromURL(url.get(), this.inputFormat, this.graph, this.spec, this.verbose); - } else if (path.isPresent()) { - // if input is provided, load from the given file - RdfDataLoader.loadFromFile(path.get(), this.inputFormat, this.graph, this.spec, this.verbose); } else { - throw new IllegalArgumentException("Input path is not a valid URL or file path: " + this.input); + for (String input : this.rdfData) { + Optional url = ConvertString.toUrl(input); + Optional path = ConvertString.toPath(input); + if (url.isPresent()) { + // if input is a URL, load from the given URL + RdfDataLoader.loadFromURL(url.get(), this.inputFormat, this.graph, this.spec, this.verbose); + } else if (path.isPresent()) { + // if input is provided, load from the given file + RdfDataLoader.loadFromFile(path.get(), this.inputFormat, this.graph, this.spec, this.verbose); + } else { + throw new IllegalArgumentException("Input path is not a valid URL or file path: " + input); + } + } } } From fcc1662bdd12a2ddda1d553392f4bb3b7d42b0b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Thu, 18 Apr 2024 12:04:20 +0200 Subject: [PATCH 061/146] Add support for recursive loading of files in Convert and canonicalize commands --- .../inria/corese/command/programs/Convert.java | 16 ++++++++++++---- .../corese/command/programs/canonicalize.java | 16 ++++++++++++---- 2 files changed, 24 insertions(+), 8 deletions(-) diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java b/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java index 4a205bbdf..73be86926 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java @@ -59,6 +59,10 @@ public class Convert implements Callable { "--no-owl-import" }, description = "Disables the automatic importation of ontologies specified in 'owl:imports' statements. When this flag is set, the application will not fetch and include referenced ontologies.", required = false, defaultValue = "false") private boolean noOwlImport; + @Option(names = { "-R", + "--recursive" }, description = "If an input is a directory, load all the files in the directory recursively.") + private boolean recursive = false; + private Graph graph = Graph.create(); private boolean outputFormatIsDefined = false; @@ -137,10 +141,14 @@ private void loadInputFile() throws IllegalArgumentException, IOException { // if input is a URL, load from the given URL RdfDataLoader.loadFromURL(url.get(), this.inputFormat, this.graph, this.spec, this.verbose); } else if (path.isPresent()) { - // if input is provided, load from the given file - RdfDataLoader.loadFromFile(path.get(), this.inputFormat, this.graph, this.spec, this.verbose); - } else { - throw new IllegalArgumentException("Input path is not a valid URL or file path: " + input); + if (path.get().toFile().isDirectory()) { + // if input is a directory, load all the files in the directory + RdfDataLoader.loadFromDirectory(path.get(), this.inputFormat, this.graph, this.recursive, + this.spec, this.verbose); + } else { + // if input is provided, load from the given file + RdfDataLoader.loadFromFile(path.get(), this.inputFormat, this.graph, this.spec, this.verbose); + } } } } diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/canonicalize.java b/corese-command/src/main/java/fr/inria/corese/command/programs/canonicalize.java index f56064a36..f5d208240 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/canonicalize.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/canonicalize.java @@ -59,6 +59,10 @@ public class canonicalize implements Callable { "--no-owl-import" }, description = "Disables the automatic importation of ontologies specified in 'owl:imports' statements. When this flag is set, the application will not fetch and include referenced ontologies.", required = false, defaultValue = "false") private boolean noOwlImport; + @Option(names = { "-R", + "--recursive" }, description = "If an input is a directory, load all the files in the directory recursively.") + private boolean recursive = false; + private Graph graph = Graph.create(); private boolean canonicalAlgoIsDefined = false; @@ -137,10 +141,14 @@ private void loadInputFile() throws IllegalArgumentException, IOException { // if input is a URL, load from the given URL RdfDataLoader.loadFromURL(url.get(), this.inputFormat, this.graph, this.spec, this.verbose); } else if (path.isPresent()) { - // if input is provided, load from the given file - RdfDataLoader.loadFromFile(path.get(), this.inputFormat, this.graph, this.spec, this.verbose); - } else { - throw new IllegalArgumentException("Input path is not a valid URL or file path: " + input); + if (path.get().toFile().isDirectory()) { + // if input is a directory, load all the files in the directory + RdfDataLoader.loadFromDirectory(path.get(), this.inputFormat, this.graph, this.recursive, + this.spec, this.verbose); + } else { + // if input is provided, load from the given file + RdfDataLoader.loadFromFile(path.get(), this.inputFormat, this.graph, this.spec, this.verbose); + } } } } From 8b766d03dc5bd0d75144f9c043f30a6422e56fb9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Thu, 18 Apr 2024 12:05:22 +0200 Subject: [PATCH 062/146] Add support for multiple input files in convert and canonicalize commands in docs --- .../Getting Started With Corese-command.md | 101 ++++++++++++------ 1 file changed, 66 insertions(+), 35 deletions(-) diff --git a/docs/getting started/Getting Started With Corese-command.md b/docs/getting started/Getting Started With Corese-command.md index 0ca052f1f..01c239dbc 100644 --- a/docs/getting started/Getting Started With Corese-command.md +++ b/docs/getting started/Getting Started With Corese-command.md @@ -21,6 +21,7 @@ Designed to simplify and streamline tasks related to querying, converting, and v 2. [4.2. Different Types of Input](#42-different-types-of-input) 3. [4.3. Different Types of Output](#43-different-types-of-output) 4. [4.4. Summary of Available Formats](#44-summary-of-available-formats) + 5. [4.5. Multiple Input Files](#45-multiple-input-files) 5. [5. The `shacl` Command](#5-the-shacl-command) 1. [5.1. Basic Usage](#51-basic-usage) 2. [5.2. Different Types of Input](#52-different-types-of-input) @@ -44,6 +45,7 @@ Designed to simplify and streamline tasks related to querying, converting, and v 2. [7.2. Different Types of Input](#72-different-types-of-input) 3. [7.3. Different Types of Output](#73-different-types-of-output) 4. [7.4. Canonicalization Algorithms](#74-canonicalization-algorithms) + 5. [7.5. Multiple Input Files](#75-multiple-input-files) 8. [8. General Options](#8-general-options) 1. [8.1. Configuration file](#81-configuration-file) 2. [8.2. Verbose](#82-verbose) @@ -343,6 +345,38 @@ The `convert` command supports the following formats for input and output: | NQUADS | ✅ | ✅ | | RDFA | ✅ | ❌ | +### 4.5. Multiple Input Files + +- **Multiple Input:** It's possible to provide multiple input files by repeating the `-i` flag: + +```shell +corese-command convert -i myData1.ttl -i myData2.ttl -r jsonld +``` + +- **Shell Globbing:** It's also possible to use shell globbing to provide multiple input files: + +```shell +corese-command convert -i rdf/*.ttl -r jsonld +``` + +```shell +corese-command convert -i myData?.ttl -r jsonld +``` + +- **Directory Input:** If you want to use a whole directory as input, you can do so. + +```shell +corese-command convert -i ./myDirectory/ -r jsonld +``` + +- **Directory Input Recursive:** If you want to use a whole directory as input, you can do so. The `-R` flag allows you to use the directory recursively. + +```shell +corese-command convert -i ./myDirectory/ -r jsonld -R +``` + +> The command integrates all specified input files into a single dataset for processing. During conversion, these files are collectively transformed into the designated output format, effectively merging all data into one coherent file. + ## 5. The `shacl` Command The `shacl` command allows you to validate RDF data against SHACL shapes. @@ -583,41 +617,6 @@ corese-command remote-sparql -q 'SELECT * WHERE {?s ?p ?o}' -e "http://example.o This option is useful when you want to send a query that is not valid according to the SPARQL grammar, but is still accepted by the SPARQL endpoint. - - ## 7. `canonicalize` Command The `canonicalize` command allows you to apply a specific canonicalization algorithm to RDF files. @@ -694,6 +693,38 @@ The following canonicalization algorithms are available: > - `rdfc-1.0` or `rdfc-1.0-sha256` for [RDFC 1.0](https://www.w3.org/TR/rdf-canon/) with SHA-256 > - `rdfc-1.0-sha384` for [RDFC 1.0](https://www.w3.org/TR/rdf-canon/) with SHA-384 +### 7.5. Multiple Input Files + +- **Multiple Input:** It's possible to provide multiple input files by repeating the `-i` flag: + +```shell +corese-command canonicalize -i myData1.ttl -i myData2.ttl -r rdfc-1.0-sha256 +``` + +- **Shell Globbing:** It's also possible to use shell globbing to provide multiple input files: + +```shell +corese-command canonicalize -i rdf/*.ttl -r rdfc-1.0-sha256 +``` + +```shell +corese-command canonicalize -i myData?.ttl -r rdfc-1.0-sha256 +``` + +- **Directory Input:** If you want to use a whole directory as input, you can do so. + +```shell +corese-command canonicalize -i ./myDirectory/ -r rdfc-1.0-sha256 +``` + +- **Directory Input Recursive:** If you want to use a whole directory as input, you can do so. The `-R` flag allows you to use the directory recursively. + +```shell +corese-command canonicalize -i ./myDirectory/ -r rdfc-1.0-sha256 -R +``` + +> All input files are loaded into the same dataset. Canonicalization algorithms are applied to the entire dataset. + ## 8. General Options General options are available for all commands. From f59f6ea38ffddb03ddd6ffb3b447afdc69671166 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Wed, 24 Apr 2024 15:07:08 +0200 Subject: [PATCH 063/146] Add error handling for invalid SPARQL queries in Sparql.java --- .../main/java/fr/inria/corese/command/programs/Sparql.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/Sparql.java b/corese-command/src/main/java/fr/inria/corese/command/programs/Sparql.java index be5f47d83..360f28b87 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/Sparql.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/Sparql.java @@ -196,7 +196,9 @@ private void loadQuery() throws IOException { } else if (path.isPresent()) { // if query is a path this.query = SparqlQueryLoader.loadFromFile(path.get(), this.spec, this.verbose); - } + }else { + throw new RuntimeException("The query is not a valid SPARQL query, a URL or a file path."); + } } /** From adcd2658cebaa1521ef5f78acb960d7d86ab0e81 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Wed, 24 Apr 2024 15:07:13 +0200 Subject: [PATCH 064/146] Update description of --canonical-algo option in canonicalize.java --- .../java/fr/inria/corese/command/programs/canonicalize.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/canonicalize.java b/corese-command/src/main/java/fr/inria/corese/command/programs/canonicalize.java index f5d208240..e997214b4 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/canonicalize.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/canonicalize.java @@ -44,7 +44,7 @@ public class canonicalize implements Callable { private Path output; @Option(names = { "-r", "-a", "-ca", "-of", - "--canonical-algo" }, required = true, description = "Canonicalization algorithm to use. Possible values:\u001b[34m ${COMPLETION-CANDIDATES}\u001b[0m.") + "--canonical-algo" }, required = true, description = "Canonicalization algorithm to which the input file should be converted. Possible values:\u001b[34m ${COMPLETION-CANDIDATES}\u001b[0m.") private EnumCanonicAlgo canonicalAlgo; @Option(names = { "-v", From 2aab76ddf2e977c8fdf42c53a34e7820d2cc6f02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Wed, 24 Apr 2024 16:55:06 +0200 Subject: [PATCH 065/146] Add RDFC-1.0 canonic format in all Corese Command --- .../corese/command/utils/format/EnumOutputFormat.java | 7 ++++++- .../corese/command/utils/format/EnumResultFormat.java | 11 +++++++++++ .../corese/command/utils/rdf/RdfDataExporter.java | 9 +++++++++ .../java/fr/inria/corese/core/print/ResultFormat.java | 11 +++++++++++ .../fr/inria/corese/sparql/api/ResultFormatDef.java | 2 ++ 5 files changed, 39 insertions(+), 1 deletion(-) diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java b/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java index 6a5b29cf8..07cb32264 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java @@ -24,7 +24,12 @@ public enum EnumOutputFormat { NQUADS(7, "nquads", "nq"), NQ(7, "nq", "nq"), - APPLICATION_NQUADS(7, "application/n-quads", "nq"); + APPLICATION_NQUADS(7, "application/n-quads", "nq"), + + CANONICAL(8, "rdfc-1.0", "nq"), + CANONICAL_SHA256(8, "rdfc-1.0-sha256", "nq"), + + CANONICAL_SHA384(9, "rdfc-1.0-sha384", "nq"); private final int value; private final String name; diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumResultFormat.java b/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumResultFormat.java index 4f62dd159..6b1daa155 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumResultFormat.java +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumResultFormat.java @@ -30,6 +30,11 @@ public enum EnumResultFormat { NQ(7, "nq", "nq", true), APPLICATION_NQUADS(7, "application/n-quads", "nq", true), + CANONICAL(8, "rdfc-1.0", "nq", true), + CANONICAL_SHA256(8, "rdfc-1.0-sha256", "nq", true), + + CANONICAL_SHA384(9, "rdfc-1.0-sha384", "nq", true), + BIDING_XML(11, "xml", "srx", false), SRX(11, "srx", "srx", false), APPLICATION_SPARQL_RESULTS_XML(11, ResultFormat.SPARQL_RESULTS_XML, "srx", false), @@ -134,6 +139,12 @@ public EnumOutputFormat convertToOutputFormat() { case NQ: case APPLICATION_NQUADS: return EnumOutputFormat.NQUADS; + case CANONICAL: + return EnumOutputFormat.CANONICAL; + case CANONICAL_SHA256: + return EnumOutputFormat.CANONICAL_SHA256; + case CANONICAL_SHA384: + return EnumOutputFormat.CANONICAL_SHA384; default: throw new InvalidParameterException("Output format " + this + " cannot be converted to OutputFormat."); diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java b/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java index 5fdc22a16..bc451acbb 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java @@ -6,11 +6,13 @@ import fr.inria.corese.command.utils.format.EnumOutputFormat; import fr.inria.corese.core.Graph; +import fr.inria.corese.core.print.CanonicalRdf10Format; import fr.inria.corese.core.print.JSONLDFormat; import fr.inria.corese.core.print.NQuadsFormat; import fr.inria.corese.core.print.NTriplesFormat; import fr.inria.corese.core.print.RDFFormat; import fr.inria.corese.core.print.TripleFormat; +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; import picocli.CommandLine.Model.CommandSpec; /** @@ -114,6 +116,13 @@ private static void exportToOutputStream( case APPLICATION_NQUADS: NQuadsFormat.create(graph).write(outputStream); break; + case CANONICAL: + case CANONICAL_SHA256: + CanonicalRdf10Format.create(graph).write(outputStream); + break; + case CANONICAL_SHA384: + CanonicalRdf10Format.create(graph, HashAlgorithm.SHA_384).write(outputStream); + break; default: throw new IllegalArgumentException("Unsupported output format: " + outputFormat); } diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java index d47ea2bf7..0a84b480d 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java @@ -8,6 +8,7 @@ import fr.inria.corese.compiler.parser.Pragma; import fr.inria.corese.core.Graph; +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; import fr.inria.corese.core.transform.Transformer; import fr.inria.corese.core.util.MappingsGraph; import fr.inria.corese.kgram.api.core.Node; @@ -448,6 +449,10 @@ String graphToString(Node node) { return NTriplesFormat.create(getGraph()).toString(); case NQUADS_FORMAT: return NQuadsFormat.create(getGraph()).toString(); + case RDFC10_FORMAT: + return CanonicalRdf10Format.create(getGraph(), HashAlgorithm.SHA_256).toString(); + case RDFC10_SHA384_FORMAT: + return CanonicalRdf10Format.create(getGraph(), HashAlgorithm.SHA_384).toString(); case TURTLE_FORMAT: default: // e.g. HTML @@ -491,6 +496,8 @@ boolean isGraphFormat(int type) { case JSONLD_FORMAT: case NTRIPLES_FORMAT: case NQUADS_FORMAT: + case RDFC10_FORMAT: + case RDFC10_SHA384_FORMAT: // case RDF_FORMAT: return true; default: @@ -545,6 +552,10 @@ String processBasic(Mappings map, int type) { return NTriplesFormat.create(map).toString(); case NQUADS_FORMAT: return NQuadsFormat.create(map).toString(); + case RDFC10_FORMAT: + return CanonicalRdf10Format.create(map, HashAlgorithm.SHA_256).toString(); + case RDFC10_SHA384_FORMAT: + return CanonicalRdf10Format.create(map, HashAlgorithm.SHA_384).toString(); case RDF_FORMAT: // W3C RDF Graph Mappings diff --git a/sparql/src/main/java/fr/inria/corese/sparql/api/ResultFormatDef.java b/sparql/src/main/java/fr/inria/corese/sparql/api/ResultFormatDef.java index 06a3d40c5..783e64b04 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/api/ResultFormatDef.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/api/ResultFormatDef.java @@ -15,6 +15,8 @@ public interface ResultFormatDef { public static final int JSONLD_FORMAT = 4; public static final int NTRIPLES_FORMAT = 6; public static final int NQUADS_FORMAT = 7; + public static final int RDFC10_FORMAT = 8; + public static final int RDFC10_SHA384_FORMAT = 9; public static final int XML_FORMAT = 11; public static final int RDF_FORMAT = 12; From 9eab9681283b20e16301a2af6db0148918ff9858 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Thu, 2 May 2024 19:18:36 +0200 Subject: [PATCH 066/146] Add support of n-triples, n-quads and canonical rdf in Corese-Server --- .../server/webservice/SPARQLRestAPI.java | 142 +++++++++++++++++- 1 file changed, 141 insertions(+), 1 deletion(-) diff --git a/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java b/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java index c826c5ec0..37bb6fe4a 100644 --- a/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java +++ b/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java @@ -1,5 +1,6 @@ package fr.inria.corese.server.webservice; +import java.util.ArrayList; import java.util.List; import java.util.UUID; @@ -65,9 +66,15 @@ public class SPARQLRestAPI implements ResultFormatDef, URLParam { static final String TRIG = ResultFormat.TRIG; static final String TRIG_TEXT = ResultFormat.TRIG_TEXT; static final String NT_TEXT = ResultFormat.NT_TEXT; + static final String N_TRILES = ResultFormat.N_TRIPLES; + static final String N_QUADS = ResultFormat.N_QUADS; static final String TEXT = ResultFormat.TEXT; static final String HTML = ResultFormat.HTML; + // Profiles + private final String CN10_SHA256 = "https://www.w3.org/TR/rdf-canon/#sha-256"; + private final String CN10_SHA384 = "https://www.w3.org/TR/rdf-canon/#sha-384"; + public static final String PROFILE_DEFAULT = "profile.ttl"; public static final String DEFAULT = NSManager.STL + "default"; @@ -373,6 +380,34 @@ String getResult(Mappings map, String format) { return getResultFormat(map, format).toString(); } + /** + * Get the profiles from the Accept header + * + * @param accept The Accept header + * @return The profiles + */ + private ArrayList getProfiles(String accept) { + ArrayList profiles = new ArrayList<>(); + String[] parts = accept.split(";"); + for (String part : parts) { + if (part.contains("profile=")) { + String[] profileParts = part.split("="); + String[] profileUrls = profileParts[1].split(" "); + + for (String profileUrl : profileUrls) { + // Remove the quotes + profileUrl = profileUrl.replace("\"", ""); + + profiles.add(profileUrl); + } + } + } + return profiles; + + // eg: Accept: + // application/n-quads;profile="https://www.w3.org/TR/rdf-canon/#sha-256 https://www.w3.org/TR/rdf-canon/#sha-384" + } + @GET @Produces({ HTML }) public Response getHTMLForGet(@jakarta.ws.rs.core.Context HttpServletRequest request, @@ -541,10 +576,60 @@ public Response getRDFGraphJsonLDForGet(@jakarta.ws.rs.core.Context HttpServletR @QueryParam("mode") List mode, @QueryParam("uri") List uri) { - logger.info("getRDFGraphJsonLDForGet"); + System.out.println("getRDFGraphJsonLDForGet"); return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, JSONLD_FORMAT); } + @GET + @Produces({ N_TRILES }) + public Response getRDFGraphNTriplesForGet(@jakarta.ws.rs.core.Context HttpServletRequest request, + @PathParam("name") String name, + @PathParam("oper") String oper, + @QueryParam("query") String query, + @QueryParam("access") String access, + @QueryParam("default-graph-uri") List defaut, + @QueryParam("named-graph-uri") List named, + @QueryParam("param") List param, + @QueryParam("mode") List mode, + @QueryParam("uri") List uri) { + + logger.info("getRDFGraphNTriplesForGet"); + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, NTRIPLES_FORMAT); + } + + @GET + @Produces({ N_QUADS }) + public Response getRDFGraphNQuadsForGet(@jakarta.ws.rs.core.Context HttpServletRequest request, + @PathParam("name") String name, + @PathParam("oper") String oper, + @QueryParam("query") String query, + @QueryParam("access") String access, + @QueryParam("default-graph-uri") List defaut, + @QueryParam("named-graph-uri") List named, + @QueryParam("param") List param, + @QueryParam("mode") List mode, + @QueryParam("uri") List uri) { + + logger.info("getRDFGraphNQuadsForGet"); + + // Get the profiles from the Accept header + ArrayList profiles = getProfiles(request.getHeader("Accept")); + + for (String profile : profiles) { + if (profile.equals(this.CN10_SHA256)) { + logger.info("Profile: " + profile); + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, RDFC10_FORMAT); + } + if (profile.equals(this.CN10_SHA384)) { + logger.info("Profile: " + profile); + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, + RDFC10_SHA384_FORMAT); + } + } + + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, NQUADS_FORMAT); + } + // ---------------------------------------------------- // SPARQL QUERY - SELECT and ASK with HTTP POST // ---------------------------------------------------- @@ -855,7 +940,62 @@ public Response getRDFGraphJsonLDForPost(@jakarta.ws.rs.core.Context HttpServlet query = getQuery(query, update, message); logger.info("getRDFGraphJsonLDForPost"); return getResultForPost(request, name, oper, uri, param, mode, query, access, defaut, named, JSONLD_FORMAT); + } + + @POST + @Produces({ N_TRILES }) + @Consumes(MediaType.APPLICATION_FORM_URLENCODED) + public Response getRDFGraphNTriplesForPost(@jakarta.ws.rs.core.Context HttpServletRequest request, + @PathParam("name") String name, + @PathParam("oper") String oper, + @DefaultValue("") @FormParam("query") String query, + @DefaultValue("") @FormParam("update") String update, + @FormParam("access") String access, + @FormParam("default-graph-uri") List defaut, + @FormParam("named-graph-uri") List named, + @FormParam("param") List param, + @FormParam("mode") List mode, + @FormParam("uri") List uri, + String message) { + query = getQuery(query, update, message); + logger.info("getRDFGraphNTriplesForPost"); + return getResultForPost(request, name, oper, uri, param, mode, query, access, defaut, named, NTRIPLES_FORMAT); + } + + @POST + @Produces({ N_QUADS }) + @Consumes(MediaType.APPLICATION_FORM_URLENCODED) + public Response getRDFGraphNQuadsForPost(@jakarta.ws.rs.core.Context HttpServletRequest request, + @PathParam("name") String name, + @PathParam("oper") String oper, + @DefaultValue("") @FormParam("query") String query, + @DefaultValue("") @FormParam("update") String update, + @FormParam("access") String access, + @FormParam("default-graph-uri") List defaut, + @FormParam("named-graph-uri") List named, + @FormParam("param") List param, + @FormParam("mode") List mode, + @FormParam("uri") List uri, + String message) { + query = getQuery(query, update, message); + logger.info("getRDFGraphNQuadsForPost"); + + // Get the profiles from the Accept header + ArrayList profiles = getProfiles(request.getHeader("Accept")); + + for (String profile : profiles) { + if (profile.equals(this.CN10_SHA256)) { + logger.info("Profile: " + profile); + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, RDFC10_FORMAT); + } + if (profile.equals(this.CN10_SHA384)) { + logger.info("Profile: " + profile); + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, + RDFC10_SHA384_FORMAT); + } + } + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, NQUADS_FORMAT); } // ---------------------------------------------------- From 65edf1224fa741169609cffac721a34f4b4b79c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 3 May 2024 09:00:18 +0200 Subject: [PATCH 067/146] Update profiles URLs in SPARQLRestAPI --- .../corese/server/webservice/SPARQLRestAPI.java | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java b/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java index 37bb6fe4a..67f2d4c35 100644 --- a/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java +++ b/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java @@ -72,8 +72,9 @@ public class SPARQLRestAPI implements ResultFormatDef, URLParam { static final String HTML = ResultFormat.HTML; // Profiles - private final String CN10_SHA256 = "https://www.w3.org/TR/rdf-canon/#sha-256"; - private final String CN10_SHA384 = "https://www.w3.org/TR/rdf-canon/#sha-384"; + private final String CN10_SHA = "https://www.w3.org/TR/rdf-canon"; + private final String CN10_SHA256 = "https://www.w3.org/TR/rdf-canon#sha-256"; + private final String CN10_SHA384 = "https://www.w3.org/TR/rdf-canon#sha-384"; public static final String PROFILE_DEFAULT = "profile.ttl"; public static final String DEFAULT = NSManager.STL + "default"; @@ -405,7 +406,8 @@ private ArrayList getProfiles(String accept) { return profiles; // eg: Accept: - // application/n-quads;profile="https://www.w3.org/TR/rdf-canon/#sha-256 https://www.w3.org/TR/rdf-canon/#sha-384" + // application/n-quads;profile="https://www.w3.org/TR/rdf-canon/#sha-256 + // https://www.w3.org/TR/rdf-canon/#sha-384" } @GET @@ -616,7 +618,7 @@ public Response getRDFGraphNQuadsForGet(@jakarta.ws.rs.core.Context HttpServletR ArrayList profiles = getProfiles(request.getHeader("Accept")); for (String profile : profiles) { - if (profile.equals(this.CN10_SHA256)) { + if (profile.equals(this.CN10_SHA) || profile.equals(this.CN10_SHA256)) { logger.info("Profile: " + profile); return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, RDFC10_FORMAT); } @@ -979,12 +981,12 @@ public Response getRDFGraphNQuadsForPost(@jakarta.ws.rs.core.Context HttpServlet String message) { query = getQuery(query, update, message); logger.info("getRDFGraphNQuadsForPost"); - + // Get the profiles from the Accept header ArrayList profiles = getProfiles(request.getHeader("Accept")); for (String profile : profiles) { - if (profile.equals(this.CN10_SHA256)) { + if (profile.equals(this.CN10_SHA) || profile.equals(this.CN10_SHA256)) { logger.info("Profile: " + profile); return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, RDFC10_FORMAT); } From 2f47e51498cc549597ba476daabb52d44194a65e Mon Sep 17 00:00:00 2001 From: corby Date: Fri, 3 May 2024 17:45:44 +0200 Subject: [PATCH 068/146] pprint with namespace manager --- .../inria/corese/core/print/ResultFormat.java | 25 ++++++++++++++++--- .../inria/corese/core/print/TripleFormat.java | 14 +++++++++++ 2 files changed, 36 insertions(+), 3 deletions(-) diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java index 38ba2a6fa..3481e26b2 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java @@ -67,6 +67,7 @@ public class ResultFormat implements ResultFormatDef { private Graph graph; private Binding bind; private Context context; + private NSManager nsmanager; int type = UNDEF_FORMAT; private int transformType = UNDEF_FORMAT; private int construct_format = DEFAULT_CONSTRUCT_FORMAT; @@ -170,6 +171,12 @@ static void defContent(String f, int t) { this(g); this.type = type; } + + ResultFormat(Graph g, NSManager nsm, int type) { + this(g); + setNsmanager(nsm); + this.type = type; + } static public ResultFormat create(Mappings m) { return new ResultFormat(m, type(m)); @@ -277,6 +284,10 @@ static public ResultFormat create(Graph g) { static public ResultFormat create(Graph g, int type) { return new ResultFormat(g, type); } + + static public ResultFormat create(Graph g, NSManager nsm, int type) { + return new ResultFormat(g, nsm, type); + } static public ResultFormat create(Graph g, String type) { return new ResultFormat(g, getSyntax(type)); @@ -435,7 +446,7 @@ String graphToString(Node node) { case RDF_XML_FORMAT: return RDFFormat.create(getGraph()).toString(); case TRIG_FORMAT: - return TripleFormat.create(getGraph(), true) + return TripleFormat.create(getGraph(), getNsmanager(), true) .setNbTriple(getNbTriple()).toString(node); case JSONLD_FORMAT: return JSONLDFormat.create(getGraph()).toString(); @@ -447,8 +458,8 @@ String graphToString(Node node) { case TURTLE_FORMAT: default: // e.g. HTML - String str = TripleFormat.create(getGraph()) - .setNbTriple(getNbTriple()).toString(node); + TripleFormat tf = TripleFormat.create(getGraph(), getNsmanager()); + String str = tf.setNbTriple(getNbTriple()).toString(node); if (type() == HTML_FORMAT) { return html(str); } @@ -761,4 +772,12 @@ public ResultFormat setNbTriple(int nbTriple) { return this; } + public NSManager getNsmanager() { + return nsmanager; + } + + public void setNsmanager(NSManager nsmanager) { + this.nsmanager = nsmanager; + } + } diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java index 011beba6f..4e4af89f2 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java @@ -8,6 +8,7 @@ import fr.inria.corese.kgram.core.Query; import fr.inria.corese.sparql.api.IDatatype; import fr.inria.corese.sparql.triple.parser.NSManager; +import java.util.List; /** * Turtle & Trig Format @@ -56,6 +57,9 @@ public void disableCompactBlankNodeSyntax() { } public static TripleFormat create(Graph g, NSManager n) { + if (n == null) { + return new TripleFormat(g, nsm()); + } return new TripleFormat(g, n); } @@ -95,6 +99,12 @@ public static TripleFormat create(Graph g, boolean isGraph) { t.setGraph(isGraph); return t; } + + public static TripleFormat create(Graph g, NSManager nsm, boolean isGraph) { + TripleFormat t = TripleFormat.create(g, nsm); + t.setGraph(isGraph); + return t; + } public void setGraph(boolean b) { isGraph = b; @@ -349,6 +359,10 @@ void node(Node node, boolean rec) { sdisplay(dt.toSparql(true, false, false, nsm)); } } + + void blank(Node node) { + List list = graph.getList(node); + } // node is triple reference of edge // node is subject/object From 1e5324228aacd63585cdeb18e62e488be7576ab4 Mon Sep 17 00:00:00 2001 From: corby Date: Fri, 3 May 2024 17:45:57 +0200 Subject: [PATCH 069/146] pprint with namespace manager --- .../main/java/fr/inria/corese/gui/query/MyJPanelQuery.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/corese-gui/src/main/java/fr/inria/corese/gui/query/MyJPanelQuery.java b/corese-gui/src/main/java/fr/inria/corese/gui/query/MyJPanelQuery.java index 6e1a4053f..fb7858156 100644 --- a/corese-gui/src/main/java/fr/inria/corese/gui/query/MyJPanelQuery.java +++ b/corese-gui/src/main/java/fr/inria/corese/gui/query/MyJPanelQuery.java @@ -597,7 +597,8 @@ String graphToString(Mappings map) { case Property.RDF_XML: return ResultFormat.create(g, ResultFormat.RDF_XML_FORMAT).toString(); case Property.TURTLE: - return ResultFormat.create(g, ResultFormat.TURTLE_FORMAT).toString(); + return ResultFormat.create(g, map.getQuery().getAST().getNSM(), + ResultFormat.TURTLE_FORMAT).toString(); case Property.TRIG: return ResultFormat.create(g, ResultFormat.TRIG_FORMAT).toString(); case Property.JSON: @@ -605,7 +606,8 @@ String graphToString(Mappings map) { } } // default - return ResultFormat.create(g, ResultFormat.TRIG_FORMAT).toString(); + return ResultFormat.create(g, + map.getQuery().getAST().getNSM(), ResultFormat.TRIG_FORMAT).toString(); // return turtle(g); } From 77eda8eeaca2985d674705d36fa2972b5cce561b Mon Sep 17 00:00:00 2001 From: corby Date: Fri, 3 May 2024 18:15:42 +0200 Subject: [PATCH 070/146] math functions --- .../java/fr/inria/corese/kgram/api/core/ExprType.java | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/sparql/src/main/java/fr/inria/corese/kgram/api/core/ExprType.java b/sparql/src/main/java/fr/inria/corese/kgram/api/core/ExprType.java index 22444a28b..1cde1a587 100644 --- a/sparql/src/main/java/fr/inria/corese/kgram/api/core/ExprType.java +++ b/sparql/src/main/java/fr/inria/corese/kgram/api/core/ExprType.java @@ -155,7 +155,16 @@ public interface ExprType { public static int START = 122; // ^ public static int IN = 123; public static int POWER = 124; - public static int STAR = 125; + public static int STAR = 125; + + public static int COS = 126; + public static int SIN = 127; + public static int TAN = 128; + public static int ARC_COS = 129; + public static int ARC_SIN = 130; + public static int ARC_TAN = 131; + public static int SQRT = 132; + From 1c53ae2f2f5ec1c6847ef2ee6ee0fd24b7a1c52c Mon Sep 17 00:00:00 2001 From: corby Date: Fri, 3 May 2024 18:16:43 +0200 Subject: [PATCH 071/146] math functions as xt:sin xt:cos xt:tan xt:acos xt:asin xt:atan xt:sqrt --- .../corese/sparql/triple/function/core/UnaryFunction.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/function/core/UnaryFunction.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/function/core/UnaryFunction.java index 92d9fa180..d9fa260c2 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/function/core/UnaryFunction.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/function/core/UnaryFunction.java @@ -45,6 +45,14 @@ public IDatatype eval(Computer eval, Binding b, Environment env, Producer p) thr case ExprType.ABS: return abs(dt); case ExprType.ROUND: return round(dt); case ExprType.FLOOR: return floor(dt); + + case ExprType.SQRT: return DatatypeMap.newInstance(Math.sqrt(dt.doubleValue())); + case ExprType.SIN: return DatatypeMap.newInstance(Math.sin(dt.doubleValue())); + case ExprType.COS: return DatatypeMap.newInstance(Math.cos(dt.doubleValue())); + case ExprType.TAN: return DatatypeMap.newInstance(Math.tan(dt.doubleValue())); + case ExprType.ARC_SIN: return DatatypeMap.newInstance(Math.asin(dt.doubleValue())); + case ExprType.ARC_COS: return DatatypeMap.newInstance(Math.acos(dt.doubleValue())); + case ExprType.ARC_TAN: return DatatypeMap.newInstance(Math.atan(dt.doubleValue())); case ExprType.DATATYPE: return dt.getDatatype(); case ExprType.LANG: return dt.getDataLang(); From b4bc0a01da16d5c99cd19877873a4b434541488e Mon Sep 17 00:00:00 2001 From: corby Date: Fri, 3 May 2024 18:17:01 +0200 Subject: [PATCH 072/146] math functions as xt:sin xt:cos xt:tan xt:acos xt:asin xt:atan xt:sqrt --- .../inria/corese/sparql/triple/parser/Processor.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Processor.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Processor.java index 658747ec9..4bcdc22a8 100755 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Processor.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Processor.java @@ -1072,7 +1072,15 @@ static void deftable(){ defoper(UUID, ExprType.FUUID); defoper(STRUUID, ExprType.STRUUID); - + defoper(EXT+"power", ExprType.POWER); + defoper(EXT+"sqrt", ExprType.SQRT); + defoper(EXT+"sin", ExprType.SIN); + defoper(EXT+"cos", ExprType.COS); + defoper(EXT+"tan", ExprType.TAN); + defoper(EXT+"asin", ExprType.ARC_SIN); + defoper(EXT+"acos", ExprType.ARC_COS); + defoper(EXT+"atan", ExprType.ARC_TAN); + defoper(POWER, ExprType.POWER); defoper(RANDOM, ExprType.RANDOM); defoper(ABS, ExprType.ABS); From cad5067e0fba4833599b0358faef5bc23b16f82d Mon Sep 17 00:00:00 2001 From: corby Date: Fri, 3 May 2024 18:17:07 +0200 Subject: [PATCH 073/146] math functions as xt:sin xt:cos xt:tan xt:acos xt:asin xt:atan xt:sqrt --- .../java/fr/inria/corese/sparql/triple/parser/Term.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Term.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Term.java index af3d564cc..23c0d2e54 100755 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Term.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Term.java @@ -298,6 +298,14 @@ static Term newFunction(String name, String longName) { case ExprType.ISUNDEFINED: case ExprType.ISSKOLEM: case ExprType.ISEXTENSION: + + case ExprType.SQRT: + case ExprType.COS: + case ExprType.SIN: + case ExprType.TAN: + case ExprType.ARC_COS: + case ExprType.ARC_SIN: + case ExprType.ARC_TAN: return new UnaryFunction(name); case ExprType.CONCAT: From 8260599df5245bef928b28a778e0a22423f33e34 Mon Sep 17 00:00:00 2001 From: corby Date: Wed, 8 May 2024 18:24:23 +0200 Subject: [PATCH 074/146] load linked rule is PUBLIC and check authorized namespace --- .../main/java/fr/inria/corese/sparql/triple/parser/Access.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Access.java b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Access.java index 9e965485e..9129bb4a3 100644 --- a/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Access.java +++ b/sparql/src/main/java/fr/inria/corese/sparql/triple/parser/Access.java @@ -487,6 +487,8 @@ void init() { set(LDSCRIPT, PUBLIC); // authorize server for query + transform when transform is authorized set(LINKED_TRANSFORMATION, PUBLIC); + // public but check authorized path namespace + set(LINKED_RULE, PUBLIC); // read authorized source is allowed set(READ, PUBLIC); } From ad3a5eba80751ad347127f9e223e3e93a34e235f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 16 May 2024 20:33:02 +0000 Subject: [PATCH 075/146] Bump rexml in /corese-unit-test/src/test/resources/data/rdf-star-main Bumps [rexml](https://github.com/ruby/rexml) from 3.2.5 to 3.2.8. - [Release notes](https://github.com/ruby/rexml/releases) - [Changelog](https://github.com/ruby/rexml/blob/master/NEWS.md) - [Commits](https://github.com/ruby/rexml/compare/v3.2.5...v3.2.8) --- updated-dependencies: - dependency-name: rexml dependency-type: indirect ... Signed-off-by: dependabot[bot] --- .../src/test/resources/data/rdf-star-main/Gemfile.lock | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/corese-unit-test/src/test/resources/data/rdf-star-main/Gemfile.lock b/corese-unit-test/src/test/resources/data/rdf-star-main/Gemfile.lock index 53732640d..8405f0dfb 100644 --- a/corese-unit-test/src/test/resources/data/rdf-star-main/Gemfile.lock +++ b/corese-unit-test/src/test/resources/data/rdf-star-main/Gemfile.lock @@ -66,7 +66,8 @@ GEM rdf (~> 3.1) rexml (~> 3.2) redcarpet (3.5.1) - rexml (3.2.5) + rexml (3.2.8) + strscan (>= 3.0.9) scanf (1.0.0) sparql (3.1.8) builder (~> 3.2) @@ -80,6 +81,7 @@ GEM sparql-client (3.1.2) net-http-persistent (~> 4.0, >= 4.0.1) rdf (~> 3.1) + strscan (3.1.0) sxp (1.1.0) rdf (~> 3.1) temple (0.8.2) From 31e4b961c9541d15af93022e48fa4a1a3b29c2bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Thu, 23 May 2024 15:08:07 +0200 Subject: [PATCH 076/146] Add errors message in all corese interfaces for CanonicalizationException --- .../utils/rdf/RdfDataCanonicalizer.java | 5 + .../fr/inria/corese/gui/core/MainFrame.java | 17 +- .../server/webservice/SPARQLRestAPI.java | 2 +- .../server/webservice/SPARQLResult.java | 405 +++++++++--------- 4 files changed, 227 insertions(+), 202 deletions(-) diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataCanonicalizer.java b/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataCanonicalizer.java index 2da538b41..d20113060 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataCanonicalizer.java +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataCanonicalizer.java @@ -7,6 +7,7 @@ import fr.inria.corese.command.utils.format.EnumCanonicAlgo; import fr.inria.corese.core.Graph; import fr.inria.corese.core.print.CanonicalRdf10Format; +import fr.inria.corese.core.print.rdfc10.CanonicalRdf10.CanonicalizationException; import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; import picocli.CommandLine.Model.CommandSpec; @@ -96,6 +97,10 @@ private static void canonicalizeToOutputStream( outputStream.flush(); + } catch (CanonicalizationException e) { + throw new IllegalArgumentException("Unable to canonicalize the RDF data. " + e.getMessage(), + e); + } catch (Exception e) { throw new IllegalArgumentException("Failed to write to RDF data to output stream", e); } diff --git a/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java b/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java index 372a668e5..c07cb3ffa 100755 --- a/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java +++ b/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java @@ -46,6 +46,7 @@ import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; +import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JRadioButton; import javax.swing.JTabbedPane; @@ -73,6 +74,7 @@ import fr.inria.corese.core.load.result.SPARQLResultParser; import fr.inria.corese.core.print.CanonicalRdf10Format; import fr.inria.corese.core.print.ResultFormat; +import fr.inria.corese.core.print.rdfc10.CanonicalRdf10.CanonicalizationException; import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; import fr.inria.corese.core.query.QueryProcess; import fr.inria.corese.core.rule.RuleEngine; @@ -1567,8 +1569,19 @@ void saveGraph(String format) { */ void saveGraphCanonic(HashAlgorithm algo) { Graph graph = myCorese.getGraph(); - CanonicalRdf10Format transformer = new CanonicalRdf10Format(graph, algo); - save(transformer.toString()); + CanonicalRdf10Format transformer = null; + + try { + transformer = new CanonicalRdf10Format(graph, algo); + } catch (CanonicalizationException ex) { + // Create a new alert dialog with the error message and ok button + String errorMessage = "Unable to canonicalize the RDF data. " + ex.getMessage(); + JOptionPane.showMessageDialog(this, errorMessage, "Error", JOptionPane.ERROR_MESSAGE); + } + + if (transformer != null) { + save(transformer.toString()); + } } /** diff --git a/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java b/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java index 67f2d4c35..abd827ace 100644 --- a/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java +++ b/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java @@ -407,7 +407,7 @@ private ArrayList getProfiles(String accept) { // eg: Accept: // application/n-quads;profile="https://www.w3.org/TR/rdf-canon/#sha-256 - // https://www.w3.org/TR/rdf-canon/#sha-384" + // https://www.w3.org/TR/rdf-canon#sha-384" } @GET diff --git a/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLResult.java b/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLResult.java index 7f83ac790..44e36e6fa 100644 --- a/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLResult.java +++ b/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLResult.java @@ -1,54 +1,57 @@ package fr.inria.corese.server.webservice; -import fr.inria.corese.server.webservice.message.LinkedResult; +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + import fr.inria.corese.compiler.federate.FederateVisitor; import fr.inria.corese.core.print.ResultFormat; +import fr.inria.corese.core.print.rdfc10.CanonicalRdf10.CanonicalizationException; import fr.inria.corese.kgram.core.Mappings; +import fr.inria.corese.server.webservice.message.LinkedResult; import fr.inria.corese.sparql.api.IDatatype; import fr.inria.corese.sparql.api.ResultFormatDef; import fr.inria.corese.sparql.datatype.DatatypeMap; import fr.inria.corese.sparql.exceptions.EngineException; import fr.inria.corese.sparql.triple.function.term.Binding; -import fr.inria.corese.sparql.triple.parser.Dataset; +import fr.inria.corese.sparql.triple.parser.Access; +import fr.inria.corese.sparql.triple.parser.Access.Level; import fr.inria.corese.sparql.triple.parser.Context; +import fr.inria.corese.sparql.triple.parser.Dataset; import fr.inria.corese.sparql.triple.parser.URLParam; -import fr.inria.corese.sparql.triple.parser.Access.Level; -import fr.inria.corese.sparql.triple.parser.Access; -import java.io.UnsupportedEncodingException; -import java.net.URLDecoder; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; -import jakarta.ws.rs.core.Response; import jakarta.servlet.http.HttpServletRequest; +import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.Response.ResponseBuilder; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; /** * Process sparql query, post process query result, generate query result format */ -public class SPARQLResult implements ResultFormatDef, URLParam { - +public class SPARQLResult implements ResultFormatDef, URLParam { + static private final Logger logger = LogManager.getLogger(SPARQLResult.class); private static final String headerAccept = "Access-Control-Allow-Origin"; private static final String ERROR_ENDPOINT = "Error while querying Corese SPARQL endpoint"; - private static final String OPER = "operation"; - private static final String URL = Context.URL; + private static final String OPER = "operation"; + private static final String URL = Context.URL; static final int ERROR = 500; private static SPARQLResult singleton; - + private HttpServletRequest request; QuerySolverVisitorServer visitor; - + static { - setSingleton(new SPARQLResult()); + setSingleton(new SPARQLResult()); + } + + SPARQLResult() { } - - - SPARQLResult(){} - + SPARQLResult(HttpServletRequest request) { setRequest(request); } @@ -56,39 +59,42 @@ public class SPARQLResult implements ResultFormatDef, URLParam { static TripleStore getTripleStore() { return SPARQLRestAPI.getTripleStore(); } - - static TripleStore getTripleStore (String name) { - if (name == null) { - return getTripleStore(); - } - return Manager.getEndpoint(name); + + static TripleStore getTripleStore(String name) { + if (name == null) { + return getTripleStore(); + } + return Manager.getEndpoint(name); } - - + /** * Specific endpoint function where format can be specified by format parameter - * Content-Type is set according to format parameter and what is returned by ResultFormat + * Content-Type is set according to format parameter and what is returned by + * ResultFormat * - * @name is a) the name of a specific triple store, b) undefined for standard sparql endpoint + * @name is a) the name of a specific triple store, b) undefined for standard + * sparql endpoint * @oper is sparql | federate | symbolic name defined in urlprofile.ttl * @uri is optional list of URI. use case: URL of shacl shape * @param is optional parameter in format: param=key~val;val - * @mode is such as mode=debug;link;log + * @mode is such as mode=debug;link;log * @access is a key that may give access to protected features - * @defaut and @named are graph name URI - * @format is json|xml to specify return format when there is no http header content - * @type is format specified by content negotiation http header (consider type otherwise format) + * @defaut and @named are graph name URI + * @format is json|xml to specify return format when there is no http header + * content + * @type is format specified by content negotiation http header (consider type + * otherwise format) * @transform is list of transformation such as st:map */ - public Response getResultFormat(String name, String oper, + public Response getResultFormat(String name, String oper, List uri, List param, List mode, - String query, String access, + String query, String access, List defaut, List named, - String format, int type, List transform) { - - try { + String format, int type, List transform) { + + try { logger.info("Endpoint URL: " + getRequest().getRequestURL()); - + query = getQuery(query, mode); if (query == null) { throw new EngineException("Undefined query parameter "); @@ -97,26 +103,33 @@ public Response getResultFormat(String name, String oper, beforeRequest(getRequest(), query); Dataset ds = createDataset(getRequest(), defaut, named, access); - + beforeParameter(ds, oper, uri, param, mode, transform); Mappings map = getTripleStore(name).query(getRequest(), query, ds); complete(map, ds.getContext()); afterParameter(ds, map); - - ResultFormat rf = getFormat(map, ds, format, type, transform); + + ResultFormat rf = getFormat(map, ds, format, type, transform); String res = rf.toString(); - + ResponseBuilder rb = Response.status(Response.Status.OK).header(headerAccept, "*"); - + if (format != null) { // real content type of result, possibly different from @Produces rb = rb.header("Content-Type", rf.getContentType()); } Response resp = rb.entity(res).build(); - - afterRequest(getRequest(), resp, query, map, res, ds); - + + afterRequest(getRequest(), resp, query, map, res, ds); + return resp; + } catch (CanonicalizationException ex) { + String errorMessage = "Unable to canonicalize the RDF data. " + ex.getMessage(); + logger.error(errorMessage); + return Response.status(ERROR) + .header(headerAccept, "*") + .entity(errorMessage) + .build(); } catch (EngineException ex) { logger.error("query:"); logger.error(query); @@ -125,14 +138,14 @@ public Response getResultFormat(String name, String oper, return Response.status(ERROR).header(headerAccept, "*").entity(message).build(); } } - + String getQuery(String query, List mode) { if (query == null && mode != null) { query = getContext().getDefaultValue(mode, QUERY); } return query; } - + /** * Post processing */ @@ -144,28 +157,28 @@ void complete(Mappings map, Context c) { } } if (c.hasValue(EXPLAIN)) { - Binding bind = (Binding) map.getBinding(); - if (bind != null && bind.getTrace().length()>0) { + Binding bind = (Binding) map.getBinding(); + if (bind != null && bind.getTrace().length() > 0) { LinkedResult lr = linkedResult(c, "explain"); lr.write(bind.getTrace().toString()); map.addLink(lr.getURL()); } } } - - /** - * Creates a Dataset based on a set of default or named graph URIs. - * For *strong* SPARQL compliance, use dataset.complete() before returning the dataset. + + /** + * Creates a Dataset based on a set of default or named graph URIs. + * For *strong* SPARQL compliance, use dataset.complete() before returning the + * dataset. * * @return a dataset - */ + */ Dataset createDataset(HttpServletRequest request, List defaut, List named, String access) { Dataset ds = null; - if (((defaut != null) && (!defaut.isEmpty())) + if (((defaut != null) && (!defaut.isEmpty())) || ((named != null) && (!named.isEmpty()))) { ds = Dataset.instance(defaut, named); - } - else { + } else { ds = new Dataset(); } boolean b = SPARQLRestAPI.hasKey(request, access); @@ -177,24 +190,23 @@ Dataset createDataset(HttpServletRequest request, List defaut, List uri, + Dataset beforeParameter(Dataset ds, String oper, List uri, List param, List mode, List transform) { if (oper != null) { ds.getContext().set(OPER, oper); List federation = new ArrayList<>(); switch (oper) { - + case FEDERATE: // From SPARQLService: var name is bound to d2kab // URL = http://corese.inria.fr/d2kab/federate @@ -202,49 +214,49 @@ Dataset beforeParameter(Dataset ds, String oper, List uri, // From SPARQL endpoint (alternative) mode and uri are bound // http://corese.inria.fr/sparql?mode=federate&uri=http://ns.inria.fr/federation/d2kab mode = leverage(mode); - //uri = leverage(uri); + // uri = leverage(uri); // declare federate mode for TripleStore query() mode.add(FEDERATE); // federation URL defined in /webapp/data/demo/fedprofile.ttl federation.add(ds.getContext().get(URL).getLabel()); defineFederation(ds, federation); - // additional parameters attached to URL in urlparameter.ttl + // additional parameters attached to URL in urlparameter.ttl break; - + case COMPILE: - // /test/compile?uri=http://myendpoint/sparql + // /test/compile?uri=http://myendpoint/sparql mode = leverage(mode); mode.add(FEDERATE); mode.add(COMPILE); federation.addAll(uri); defineFederation(ds, federation); break; - + case SPARQL: // URL = http://corese.inria.fr/id/sparql // when id is a federation: union of query results of endpoint of id federation // otherwise query triple store with name=id String surl = ds.getContext().get(URL).getLabel(); - + if (surl.contains("/federate/sparql")) { // federate query with graph index mode = leverage(mode); mode.add(FEDERATE); // authorize service clause -// Level level = Access.getQueryAccessLevel(true, true); -// ds.getCreateContext().setLevel(level); + // Level level = Access.getQueryAccessLevel(true, true); + // ds.getCreateContext().setLevel(level); logger.info("Federate query with graph index"); break; } - + String furl = surl; - + if (FederateVisitor.getFederation(furl) == null) { furl = surl.replace("/sparql", "/federate"); } - + if (FederateVisitor.getFederation(furl) != null) { - // federation is defined + // federation is defined mode = leverage(mode); mode.add(FEDERATE); mode.add(SPARQL); @@ -253,58 +265,58 @@ Dataset beforeParameter(Dataset ds, String oper, List uri, defineFederation(ds, federation); } break; - + // default: // other operations considered as sparql endpoint - // with server name if any + // with server name if any default: - // /map/sparql - + // /map/sparql + } - // get additional parameters attached to URL in urlprofile.ttl - //context(ds.getContext(), getContext()); + // get additional parameters attached to URL in urlprofile.ttl + // context(ds.getContext(), getContext()); } - - // get additional parameters attached to URL in urlprofile.ttl + + // get additional parameters attached to URL in urlprofile.ttl context(ds.getContext(), getContext()); - - // get default parameters attached to joker mode * in urlprofile.ttl - //ds.getContext().context(getContext(), STAR); - - if (uri!=null && !uri.isEmpty()) { - // list of URI given as parameter uri= + + // get default parameters attached to joker mode * in urlprofile.ttl + // ds.getContext().context(getContext(), STAR); + + if (uri != null && !uri.isEmpty()) { + // list of URI given as parameter uri= ds.getContext().set(URI, DatatypeMap.listResource(uri)); } - + if (param != null) { for (String kw : param) { // decode param=key~val;val ds.getContext().mode(getContext(), PARAM, decode(kw)); } } - + if (mode != null) { for (String kw : mode) { // decode mode=map ds.getContext().mode(getContext(), MODE, decode(kw)); } } - + if (!ds.getContext().hasValue(USER)) { // mode=user means skip mode=* - // get default parameters attached to joker mode * in urlprofile.ttl + // get default parameters attached to joker mode * in urlprofile.ttl ds.getContext().context(getContext(), STAR); } - - if (transform != null && ! transform.isEmpty()) { + + if (transform != null && !transform.isEmpty()) { ds.getContext().set(URLParam.TRANSFORM, DatatypeMap.newStringList(transform)); } - + beforeParameter(ds); - + return ds; } - + /** * urlprofile.ttl may predefine parameters for endpoint URL eg /psparql * complete Context accordingly as if it were URL parameters @@ -312,9 +324,6 @@ Dataset beforeParameter(Dataset ds, String oper, List uri, void context(Context c, Context gc) { c.context(gc, c.get(URL).getLabel()); } - - - /** * Server Context build from urlprofile.ttl @@ -323,13 +332,12 @@ void context(Context c, Context gc) { Context getContext() { return Profile.getProfile().getContext(); } - - + void defineFederation(Dataset ds, List federation) { ds.setUriList(federation); - //ds.getContext().set(FEDERATION, DatatypeMap.listResource(federation)); + // ds.getContext().set(FEDERATION, DatatypeMap.listResource(federation)); } - + String decode(String value) { try { return URLDecoder.decode(value, StandardCharsets.UTF_8.toString()); @@ -337,11 +345,11 @@ String decode(String value) { return value; } } - + List leverage(List name) { return (name == null) ? new ArrayList<>() : name; } - + /** * Record dataset from named in context for documentation purpose */ @@ -354,8 +362,8 @@ void beforeParameter(Dataset ds) { if (named.size() > 0) { ds.getContext().set(NAMED_GRAPH, named); } - } - + } + void afterParameter(Dataset ds, Mappings map) { if (ds.getContext().hasValue(TRACE)) { System.out.println("SPARQL endpoint"); @@ -368,42 +376,41 @@ void afterParameter(Dataset ds, Mappings map) { System.out.println(ft); } } - + QuerySolverVisitorServer getVisitor() { return visitor; } - + SPARQLResult setVisitor(QuerySolverVisitorServer vis) { visitor = vis; return this; } - + /** - * Visitor call LDScript event @beforeRequest @public function - * profile.ttl must load function definitions, + * Visitor call LDScript event @beforeRequest @public function + * profile.ttl must load function definitions, * e.g. * */ void beforeRequest(HttpServletRequest request, String query) { getVisitor().beforeRequest(request, query); } - + void afterRequest(HttpServletRequest request, String query, Mappings map) { getVisitor().afterRequest(request, query, map); } - + void afterRequest(HttpServletRequest request, Response resp, String query, Mappings map, String res, Dataset ds) { afterRequest(map, ds, res); getVisitor().afterRequest(request, resp, query, map, res); } - + void afterRequest(Mappings map, Dataset ds, String res) { if (ds.getContext().hasValue(TRACE)) { - System.out.println("service result: \n"+res); + System.out.println("service result: \n" + res); } } - - + ResultFormat getFormat(Mappings map, Dataset ds, String format, int type, List transformList) { // predefined parameter associated to URL/mode in urlparameter.ttl transformList = selectTransformation(ds.getContext(), getValue(ds.getContext(), TRANSFORM, transformList)); @@ -413,75 +420,77 @@ ResultFormat getFormat(Mappings map, Dataset ds, String format, int type, List transformList) { - logger.info("Transform: " + transformList); - - boolean link = ds.getContext().hasAnyValue(LINK, LINK_REST); - ResultFormat std ; - LinkedResult lr = null; - - if (link) { - lr = linkedResult(ds.getContext(), "std"); - // prepare (and return) std result with link to transform - // map will record link url of transform in function getFormatTransformList - // result format will be generated when returning HTTP result - std = getFormatSimple(map, ds, format, type); - // record url of std result document in case transform generate link to std result (cf mapper) - ds.getContext().add(Context.STL_LINK, DatatypeMap.newResource(lr.getURL())); - } - else { - // return transform result - // record std result in href document in case transform generate link href - int mytype = (type==ResultFormat.HTML_FORMAT) ? ResultFormat.UNDEF_FORMAT : type; - std = getFormatSimple(map, ds, format, mytype); - } - - Optional res = getFormatTransformList(map, ds, format, type, transformList); - if (res.isPresent()) { - // no link: return transformation result - return res.get(); - } - - if (link) { - // do it only now because map has recorded transform link - // generate std result document in case transform manage link (cf mapper) - lr.write(std.toString()); - } - // link: return query result - return std; + logger.info("Transform: " + transformList); + + boolean link = ds.getContext().hasAnyValue(LINK, LINK_REST); + ResultFormat std; + LinkedResult lr = null; + + if (link) { + lr = linkedResult(ds.getContext(), "std"); + // prepare (and return) std result with link to transform + // map will record link url of transform in function getFormatTransformList + // result format will be generated when returning HTTP result + std = getFormatSimple(map, ds, format, type); + // record url of std result document in case transform generate link to std + // result (cf mapper) + ds.getContext().add(Context.STL_LINK, DatatypeMap.newResource(lr.getURL())); + } else { + // return transform result + // record std result in href document in case transform generate link href + int mytype = (type == ResultFormat.HTML_FORMAT) ? ResultFormat.UNDEF_FORMAT : type; + std = getFormatSimple(map, ds, format, mytype); + } + + Optional res = getFormatTransformList(map, ds, format, type, transformList); + if (res.isPresent()) { + // no link: return transformation result + return res.get(); + } + + if (link) { + // do it only now because map has recorded transform link + // generate std result document in case transform manage link (cf mapper) + lr.write(std.toString()); + } + // link: return query result + return std; } - + /** * URLs of one request share the same key file name */ LinkedResult linkedResult(Context c, String name) { return new LinkedResult(name, "", c.getCreateKey()); } - + /** * Process transformations * When mode=link, add url of transformation result in map query result link - * and return empty + * and return empty * Otherwise return result of (first) transformation */ - Optional getFormatTransformList(Mappings map, Dataset ds, String format, int type, List transformList) { + Optional getFormatTransformList(Mappings map, Dataset ds, String format, int type, + List transformList) { ResultFormat fst = null; Context c = ds.getContext(); // prepare the list of linked result URL before all // each result may then contain link to these URLs List linkedResult = getLinkedResult(map, c, transformList); int i = 0; - + for (String transform : transformList) { ResultFormat res = getFormatTransform(map, ds, format, type, transform); if (fst == null) { @@ -494,31 +503,33 @@ Optional getFormatTransformList(Mappings map, Dataset ds, String f if (c.hasAnyValue(LINK, LINK_REST)) { // mode=link - // save transformation result in document and record URL of document in map result link + // save transformation result in document and record URL of document in map + // result link LinkedResult lr = linkedResult.get(i++); - lr.write(res.toString()); - logger.info(String.format("Transformation %s result in: %s", + lr.write(res.toString()); + logger.info(String.format("Transformation %s result in: %s", c.nsm().toPrefix(transform), lr.getURL())); } else { // no link: return result of first transformation return Optional.of(res); } } - + if (c.hasValue(LINK_REST)) { - // return result of first transformation (it may have generated links to other transformations) + // return result of first transformation (it may have generated links to other + // transformations) return Optional.of(fst); - } - else { + } else { // query result will be returned with link url to transformation result return Optional.empty(); } } - + /** * Prepare LinkedResult place holder list with file name and URL - * Each LinkedResult will be used to store a result in a document accessible by URL - * PRAGMA: map record link url + * Each LinkedResult will be used to store a result in a document accessible by + * URL + * PRAGMA: map record link url * It will be considered by ResultFormat std in * function getFormatTransform above * @@ -526,7 +537,7 @@ Optional getFormatTransformList(Mappings map, Dataset ds, String f List getLinkedResult(Mappings map, Context c, List transformList) { if (c.hasAnyValue(LINK, LINK_REST)) { List list = new ArrayList<>(); - + for (String name : transformList) { LinkedResult lr = linkedResult(c, getName(name)); list.add(lr); @@ -536,7 +547,7 @@ List getLinkedResult(Mappings map, Context c, List transfo } return null; } - + ResultFormat getFormatTransform(Mappings map, Dataset ds, String format, int type, String transform) { ResultFormat ft; if (type == UNDEF_FORMAT) { @@ -544,13 +555,13 @@ ResultFormat getFormatTransform(Mappings map, Dataset ds, String format, int typ } else { ft = ResultFormat.create(map, type, transform).init(ds); } - if (map.getBinding()!=null && ft.getBind()==null) { + if (map.getBinding() != null && ft.getBind() == null) { // share ldscript binding environment with transformer - ft.setBind((Binding)map.getBinding()); + ft.setBind((Binding) map.getBinding()); } return ft; } - + ResultFormat getFormatSimple(Mappings map, Dataset ds, String format, int type) { if (type == UNDEF_FORMAT) { return ResultFormat.create(map, format).init(ds); @@ -558,7 +569,7 @@ ResultFormat getFormatSimple(Mappings map, Dataset ds, String format, int type) return ResultFormat.create(map, type).init(ds); } } - + /** * predefined parameter associated to url/mode in urlprofile.ttl */ @@ -568,7 +579,7 @@ List getValue(Context ct, String name, List value) { } return ct.getStringList(name); } - + /** * select authorized transformations */ @@ -581,9 +592,7 @@ List selectTransformation(Context ct, List list) { // check authorized transformation return Access.selectNamespace(Access.Feature.LINKED_TRANSFORMATION, ct.getLevel(), alist); } - - - + /** * trans;trans -> list of trans * st:all -> st:xml st:json @@ -594,29 +603,27 @@ List prepare(List transformList) { System.out.println("server context: " + getContext()); } List list = new ArrayList<>(); - + for (String name : transformList) { if (name.contains(";")) { for (String key : name.split(";")) { getContext().prepare(key, list); } - } - else { + } else { getContext().prepare(name, list); } } return list; } - - + String getName(String transform) { if (transform.contains("#")) { - return transform.substring(1+transform.indexOf("#")); + return transform.substring(1 + transform.indexOf("#")); } - return transform.substring(1+transform.lastIndexOf("/")); + return transform.substring(1 + transform.lastIndexOf("/")); } - + public static SPARQLResult getSingleton() { return singleton; } From 92e3074a827a86f6629a083a399a401907e16edb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Thu, 23 May 2024 16:00:48 +0200 Subject: [PATCH 077/146] Increment version number to 4.5.1 --- CHANGELOG.md | 2 ++ README.md | 22 ++++++------- .../flatpak/fr.inria.corese.CoreseCommand.yml | 4 +-- .../fr.inria.corese.CoreseCommand.appdata.xml | 31 +++++++++++++------ .../java/fr/inria/corese/command/App.java | 2 +- .../command/utils/http/SparqlHttpClient.java | 2 +- .../flatpak/fr.inria.corese.CoreseGui.yml | 4 +-- .../fr.inria.corese.CoreseGui.appdata.xml | 12 +++++-- .../fr/inria/corese/gui/core/MainFrame.java | 2 +- corese-server/build-docker/README.md | 1 + corese-server/build-docker/corese/Dockerfile | 2 +- .../build-docker/corese/corese-server.sh | 2 +- .../webservice/EmbeddedJettyServer.java | 2 +- .../Corese-library with Python.md | 8 ++--- pom.xml | 2 +- 15 files changed, 60 insertions(+), 38 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7cea61022..ad99305c8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ # Corese Changelog +## 4.5.1 – + ## 4.5.0 – 2023/12/14 ### Added diff --git a/README.md b/README.md index 0fc360d6b..53271bcb7 100644 --- a/README.md +++ b/README.md @@ -41,21 +41,21 @@ There are several interfaces for Corese: fr.inria.corese corese-core - 4.5.0 + 4.5.1 fr.inria.corese corese-jena - 4.5.0 + 4.5.1 fr.inria.corese corese-rdf4j - 4.5.0 + 4.5.1 ``` @@ -74,8 +74,8 @@ docker run --name my-corese \ - Alternatively, download [Corese-server jar file](https://project.inria.fr/corese/jar/). ```sh -wget "https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-server-4.5.0.jar" -java -jar "-Dfile.encoding=UTF8" "corese-server-4.5.0.jar" +wget "https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-server-4.5.1.jar" +java -jar "-Dfile.encoding=UTF8" "corese-server-4.5.1.jar" ``` - Documentation: @@ -94,8 +94,8 @@ java -jar "-Dfile.encoding=UTF8" "corese-server-4.5.0.jar" - Or download [Corese-gui jar file](https://project.inria.fr/corese/jar/). ```sh -wget "https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-gui-4.5.0.jar" -java -jar "-Dfile.encoding=UTF8" "corese-gui-4.5.0.jar" +wget "https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-gui-4.5.1.jar" +java -jar "-Dfile.encoding=UTF8" "corese-gui-4.5.1.jar" ``` ### Corese-Command @@ -110,8 +110,8 @@ java -jar "-Dfile.encoding=UTF8" "corese-gui-4.5.0.jar" - Or download [Corese-command jar file](https://project.inria.fr/corese/jar/). ```sh -wget "https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-command-4.5.0.jar" -java -jar "-Dfile.encoding=UTF8" "corese-command-4.5.0.jar" +wget "https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-command-4.5.1.jar" +java -jar "-Dfile.encoding=UTF8" "corese-command-4.5.1.jar" ``` - Alternatively, use the installation script for Linux and MacOS systems. @@ -135,8 +135,8 @@ curl -sSL https://files.inria.fr/corese/distrib/script/uninstall-corese-command. - Download [Corese-python jar file](https://project.inria.fr/corese/jar/). ```sh -wget "https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-library-python-4.5.0.jar" -java -jar "-Dfile.encoding=UTF8" "corese-library-python-4.5.0.jar" +wget "https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-library-python-4.5.1.jar" +java -jar "-Dfile.encoding=UTF8" "corese-library-python-4.5.1.jar" ``` - Documentation: [Getting Started With Corese-python](/docs/corese-python/Corese-library%20with%20Python.md) diff --git a/corese-command/metadata/flatpak/fr.inria.corese.CoreseCommand.yml b/corese-command/metadata/flatpak/fr.inria.corese.CoreseCommand.yml index 8b2cd79e2..b749b8c78 100644 --- a/corese-command/metadata/flatpak/fr.inria.corese.CoreseCommand.yml +++ b/corese-command/metadata/flatpak/fr.inria.corese.CoreseCommand.yml @@ -31,7 +31,7 @@ modules: sources: - type: file - url: https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-command-4.5.0.jar + url: https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-command-4.5.1.jar sha256: 2b15c46a9643eafb1119db9579e4f259e576647d9f322f437089960120960393 x-checker-data: type: json @@ -42,6 +42,6 @@ modules: - type: git dest: git_repo url: https://github.com/Wimmics/corese - tag: metadata-4.5.0 + tag: metadata-4.5.1 - type: file path: run.sh \ No newline at end of file diff --git a/corese-command/metadata/fr.inria.corese.CoreseCommand.appdata.xml b/corese-command/metadata/fr.inria.corese.CoreseCommand.appdata.xml index 73b86c1de..2ac0af659 100644 --- a/corese-command/metadata/fr.inria.corese.CoreseCommand.appdata.xml +++ b/corese-command/metadata/fr.inria.corese.CoreseCommand.appdata.xml @@ -109,6 +109,10 @@ + + + +
    @@ -142,27 +146,36 @@

    - Enhanced 'convert' and 'sparql' to accept URL and standard input. - - 'sparql' and 'convert' now support standard output and multiple file inputs. - - Expanded 'sparql' to handle directories, recursive directories, and various query types (SELECT, CONSTRUCT, ASK, etc.). + - 'sparql' and 'convert' now support standard output and multiple file + inputs. + - Expanded 'sparql' to handle directories, recursive directories, and various + query types (SELECT, CONSTRUCT, ASK, etc.). - User can choose result format in 'sparql', including markdown. - Added mime type as a format name. – Disabled owl:imports auto-import. - - Docker users can pass custom options and adjust log level for Corese-server. + - Docker users can pass custom options and adjust log level for + Corese-server. - Standardized format names in Corese-command. - Removed 'owlProfile' and 'ldscript'; to return after refactoring. - Fixed warning related to sun.reflect.Reflection.getCallerClass.

    - - Amélioration des commandes 'convert' et 'sparql' pour accepter les URL et l'entrée standard. - - 'sparql' et 'convert' supportent désormais la sortie standard et de multiples fichiers en entrée. - - Extension de 'sparql' pour gérer les répertoires, les sous-répertoires et divers types de requêtes (SELECT, CONSTRUCT, ASK, etc.). - - L'utilisateur peut choisir le format du résultat dans 'sparql', y compris en markdown. + - Amélioration des commandes 'convert' et 'sparql' pour accepter les URL et + l'entrée standard. + - 'sparql' et 'convert' supportent désormais la sortie standard et de multiples + fichiers en entrée. + - Extension de 'sparql' pour gérer les répertoires, les sous-répertoires et + divers types de requêtes (SELECT, CONSTRUCT, ASK, etc.). + - L'utilisateur peut choisir le format du résultat dans 'sparql', y compris en + markdown. - Ajout du type MIME comme nom de format. – Désactivation de l'auto-importation owl:imports. - - Les utilisateurs de Docker peuvent passer des options personnalisées et ajuster le niveau de journalisation pour Corese-server. + - Les utilisateurs de Docker peuvent passer des options personnalisées et + ajuster le niveau de journalisation pour Corese-server. - Standardisation des noms de format dans Corese-command. - - Retrait des commandes 'owlProfile' et 'ldscript'; reviendront après refonte. + - Retrait des commandes 'owlProfile' et 'ldscript'; reviendront après + refonte. - Correction d'un avertissement lié à sun.reflect.Reflection.getCallerClass.

    diff --git a/corese-command/src/main/java/fr/inria/corese/command/App.java b/corese-command/src/main/java/fr/inria/corese/command/App.java index f646ba033..137f100db 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/App.java +++ b/corese-command/src/main/java/fr/inria/corese/command/App.java @@ -14,7 +14,7 @@ public final class App implements Runnable { - public final static String version = "4.5.0"; + public final static String version = "4.5.1"; public static void main(String[] args) { int exitCode = new CommandLine(new App()).execute(args); diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java b/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java index dd0e51d93..9fbeef342 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java @@ -39,7 +39,7 @@ public class SparqlHttpClient { private int redirectCount = 0; private int maxRedirects = 5; - private final String USERAGENT = "Corese-Command/4.5.0"; + private final String USERAGENT = "Corese-Command/4.5.1"; ///////////////// // Constructor // diff --git a/corese-gui/metadata/flatpak/fr.inria.corese.CoreseGui.yml b/corese-gui/metadata/flatpak/fr.inria.corese.CoreseGui.yml index 56a521fbd..b711212aa 100644 --- a/corese-gui/metadata/flatpak/fr.inria.corese.CoreseGui.yml +++ b/corese-gui/metadata/flatpak/fr.inria.corese.CoreseGui.yml @@ -32,7 +32,7 @@ modules: sources: - type: file - url: https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-gui-4.5.0.jar + url: https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-gui-4.5.1.jar sha256: cb3270d2ceccc9e8194c7d657eeca3c037e957de994f7eca3bbd1cf58fdbea89 x-checker-data: type: json @@ -43,6 +43,6 @@ modules: - type: git dest: git_repo url: https://github.com/Wimmics/corese - tag: metadata-4.5.0 + tag: metadata-4.5.1 - type: file path: run.sh diff --git a/corese-gui/metadata/fr.inria.corese.CoreseGui.appdata.xml b/corese-gui/metadata/fr.inria.corese.CoreseGui.appdata.xml index a0a713a82..6f1cc91d9 100644 --- a/corese-gui/metadata/fr.inria.corese.CoreseGui.appdata.xml +++ b/corese-gui/metadata/fr.inria.corese.CoreseGui.appdata.xml @@ -3,7 +3,7 @@ fr.inria.corese.CoreseGui Corese-Gui - + Unlock the power of the Semantic Web Débloquez la puissance du Web sémantique @@ -26,7 +26,8 @@ the creation, manipulation, analysis, serialization, and querying of RDF data.

    - Moreover, the GUI brings advanced capabilities to your fingertips, incorporating extended + Moreover, the GUI brings advanced capabilities to your fingertips, incorporating + extended functionalities such as STTL SPARQL, SPARQL Rule, and LDScript. This enhances your data processing and reasoning workflows, making Corese-GUI an indispensable tool for both novices and experts in the Semantic Web domain. @@ -124,6 +125,10 @@ + + + +

      @@ -134,7 +139,8 @@
      • Mise à jour du message de chargement dans Corese-GUI.
      • -
      • Correction de la requête fédérée avec des déclarations PREFIX échouant dans certaines conditions.
      • +
      • Correction de la requête fédérée avec des déclarations PREFIX échouant dans + certaines conditions.
      • Mise à jour de la bibliothèque json à 20231013.
      diff --git a/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java b/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java index c07cb3ffa..ce152dd78 100755 --- a/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java +++ b/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java @@ -112,7 +112,7 @@ public class MainFrame extends JFrame implements ActionListener { private static MainFrame singleton; private static final long serialVersionUID = 1L; private static final int LOAD = 1; - private static final String TITLE = "Corese 4.5.0 - Inria UCA I3S - 2023-10-14"; + private static final String TITLE = "Corese 4.5.1 - Inria UCA I3S - 2023-10-14"; // On déclare notre conteneur d'onglets protected static JTabbedPane conteneurOnglets; // Compteur pour le nombre d'onglets query créés diff --git a/corese-server/build-docker/README.md b/corese-server/build-docker/README.md index 4865bf822..0950105d8 100644 --- a/corese-server/build-docker/README.md +++ b/corese-server/build-docker/README.md @@ -17,6 +17,7 @@ Corese also implements the LDScript and STTL SPARQL extensions. The Docker image tag includes the Corese version installed in the image. The following versions are currently available: +- corese:4.5.1 - corese:4.5.0 - corese:4.4.1 - corese:4.4.0 diff --git a/corese-server/build-docker/corese/Dockerfile b/corese-server/build-docker/corese/Dockerfile index 5ce4463ba..2993c23f8 100644 --- a/corese-server/build-docker/corese/Dockerfile +++ b/corese-server/build-docker/corese/Dockerfile @@ -7,7 +7,7 @@ ENV CORESE="/usr/local/corese" RUN mkdir -p $CORESE WORKDIR $CORESE -RUN wget https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-server-4.5.0.jar +RUN wget https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-server-4.5.1.jar COPY log4j2.xml $CORESE/log4j2.xml COPY corese-default-profile.ttl $CORESE/corese-default-profile.ttl diff --git a/corese-server/build-docker/corese/corese-server.sh b/corese-server/build-docker/corese/corese-server.sh index 5c045370f..38c6e0243 100644 --- a/corese-server/build-docker/corese/corese-server.sh +++ b/corese-server/build-docker/corese/corese-server.sh @@ -1,7 +1,7 @@ #!/bin/bash CORESE=/usr/local/corese -JAR=$CORESE/corese-server-4.5.0.jar +JAR=$CORESE/corese-server-4.5.1.jar PROFILE=$CORESE/config/corese-profile.ttl PROPERTIES=$CORESE/config/corese-properties.properties OPTIONS=${OPTIONS:-} diff --git a/corese-server/src/main/java/fr/inria/corese/server/webservice/EmbeddedJettyServer.java b/corese-server/src/main/java/fr/inria/corese/server/webservice/EmbeddedJettyServer.java index c691e9806..39dd29f1d 100644 --- a/corese-server/src/main/java/fr/inria/corese/server/webservice/EmbeddedJettyServer.java +++ b/corese-server/src/main/java/fr/inria/corese/server/webservice/EmbeddedJettyServer.java @@ -188,7 +188,7 @@ public static void main(String args[]) throws Exception { port = Integer.parseInt(cmd.getOptionValue("p")); } if (cmd.hasOption("v")) { - logger.info("version 4.5.0"); + logger.info("version 4.5.1"); System.exit(0); } if (cmd.hasOption("e")) { diff --git a/docs/corese-python/Corese-library with Python.md b/docs/corese-python/Corese-library with Python.md index 396ca669a..3e9fdbd2d 100644 --- a/docs/corese-python/Corese-library with Python.md +++ b/docs/corese-python/Corese-library with Python.md @@ -4,8 +4,8 @@ 1. Install java and python 2. Install python dependencies `pip install --user py4j` -3. Download [corese-library-python-4.5.0.jar](https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-library-python-4.5.0.jar) -4. Place in the same directory `corese-library-python-4.5.0.jar` and your code `myCode.py` +3. Download [corese-library-python-4.5.1.jar](https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-library-python-4.5.1.jar) +4. Place in the same directory `corese-library-python-4.5.1.jar` and your code `myCode.py` 5. Run with `python myCode.py` ## 2. Template @@ -22,7 +22,7 @@ from py4j.java_gateway import JavaGateway # Start java gateway java_process = subprocess.Popen( - ['java', '-jar', '-Dfile.encoding=UTF-8', 'corese-library-python-4.5.0.jar']) + ['java', '-jar', '-Dfile.encoding=UTF-8', 'corese-library-python-4.5.1.jar']) sleep(1) gateway = JavaGateway() @@ -59,7 +59,7 @@ from py4j.java_gateway import JavaGateway # Start java gateway java_process = subprocess.Popen( - ['java', '-jar', '-Dfile.encoding=UTF-8', 'corese-library-python-4.5.0.jar']) + ['java', '-jar', '-Dfile.encoding=UTF-8', 'corese-library-python-4.5.1.jar']) sleep(1) gateway = JavaGateway() diff --git a/pom.xml b/pom.xml index 1916253de..4aa12d3e8 100644 --- a/pom.xml +++ b/pom.xml @@ -118,7 +118,7 @@ b = build number. For more details: https://docs.oracle.com/middleware/1212/core/MAVEN/maven_version.htm#MAVEN400 --> - 4.5.0 + 4.5.1 -Xdoclint:none From deab970a17a234603a961cf96759ac29bb23b4e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 24 May 2024 10:23:06 +0200 Subject: [PATCH 078/146] feat: Add nightly build workflow This commit adds a new workflow file, `.github/workflows/nightly-build.yml`, which sets up a nightly build for the project. The workflow is triggered on pushes to the `development` branch and can also be manually triggered using the workflow dispatch event. The workflow runs on the latest version of Ubuntu and performs the following steps: - Checks out the repository at the `development` branch - Sets up JDK 11 using the Temurin distribution - Builds the project using Maven - Renames the generated JAR files with a suffix based on the short commit hash - Archives the build artifacts - Notifies success or failure of the build This workflow will help automate the process of generating nightly builds for the project. --- .github/workflows/nightly-build.yml | 58 +++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100644 .github/workflows/nightly-build.yml diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml new file mode 100644 index 000000000..246768c7e --- /dev/null +++ b/.github/workflows/nightly-build.yml @@ -0,0 +1,58 @@ +name: Nightly Build + +on: + push: + branches: + - development + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development + + - name: Set up JDK 11 + uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: '11' + + - name: Build with Maven + run: mvn clean install + + - name: Get short commit hash + id: vars + run: echo "::set-output name=short_commit::$(git rev-parse --short HEAD)" + + - name: Rename Artifacts + run: | + for file in $(find . -name '*.jar' | grep -v 'original'); do + base=$(basename $file) + new_base=$(echo $base | sed "s/4\.5\.1/${{ steps.vars.outputs.short_commit }}-nightly/") + mv $file $(dirname $file)/$new_base + done + + - name: Archive Build Artifacts + uses: actions/upload-artifact@v4 + with: + name: nightly-build + path: | + corese-core/target/corese-core-*-nightly.jar + corese-command/target/corese-command-*-nightly.jar + corese-gui/target/corese-gui-*-nightly.jar + corese-jena/target/corese-jena-*-nightly.jar + corese-rdf4j/target/corese-rdf4j-*-nightly.jar + corese-server/target/corese-server-*-nightly.jar + + - name: Notify Success + if: ${{ success() }} + run: echo "Nightly build completed successfully!" + + - name: Notify Failure + if: ${{ failure() }} + run: echo "Nightly build failed." From ccebccbe16b01df9138d0b605ef0795feda05067 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 24 May 2024 10:47:51 +0200 Subject: [PATCH 079/146] feat: Automate nightly builds with workflow --- .github/workflows/nightly-build.yml | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index 246768c7e..3c75ab0f0 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -27,13 +27,13 @@ jobs: - name: Get short commit hash id: vars - run: echo "::set-output name=short_commit::$(git rev-parse --short HEAD)" + run: echo "SHORT_COMMIT=$(git rev-parse --short HEAD)" >> $GITHUB_ENV - name: Rename Artifacts run: | for file in $(find . -name '*.jar' | grep -v 'original'); do base=$(basename $file) - new_base=$(echo $base | sed "s/4\.5\.1/${{ steps.vars.outputs.short_commit }}-nightly/") + new_base=$(echo $base | sed "s/4\.5\.1/${SHORT_COMMIT}-nightly/") mv $file $(dirname $file)/$new_base done @@ -49,6 +49,25 @@ jobs: corese-rdf4j/target/corese-rdf4j-*-nightly.jar corese-server/target/corese-server-*-nightly.jar + - name: Delete previous prerelease (on success) + if: ${{ success() }} + run: | + latest_release=$(gh release list --limit 1 --prerelease --json tagName -q '.[0].tagName') + if [ -n "$latest_release" ]; then + gh release delete $latest_release -y + fi + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Create new prerelease + if: ${{ success() }} + run: | + tag_name="nightly-${SHORT_COMMIT}" + gh release create $tag_name -p -t "Nightly Build ${SHORT_COMMIT}" -n "This is an automated nightly build. It is a development version and not stable." --target development + gh release upload $tag_name corese-core/target/corese-core-*-nightly.jar corese-command/target/corese-command-*-nightly.jar corese-gui/target/corese-gui-*-nightly.jar corese-jena/target/corese-jena-*-nightly.jar corese-rdf4j/target/corese-rdf4j-*-nightly.jar corese-server/target/corese-server-*-nightly.jar + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Notify Success if: ${{ success() }} run: echo "Nightly build completed successfully!" From 000ac97a685d513ff8406c0c75556f89f7bcdffd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 24 May 2024 10:59:53 +0200 Subject: [PATCH 080/146] Automate nightly builds with workflow --- .github/workflows/nightly-build.yml | 38 ++++++++++++++++++++++------- 1 file changed, 29 insertions(+), 9 deletions(-) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index 3c75ab0f0..45c4e4929 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -37,7 +37,7 @@ jobs: mv $file $(dirname $file)/$new_base done - - name: Archive Build Artifacts + - name: Upload Build Artifacts uses: actions/upload-artifact@v4 with: name: nightly-build @@ -49,29 +49,49 @@ jobs: corese-rdf4j/target/corese-rdf4j-*-nightly.jar corese-server/target/corese-server-*-nightly.jar - - name: Delete previous prerelease (on success) - if: ${{ success() }} + delete_previous_release: + runs-on: ubuntu-latest + needs: build + steps: + - name: Delete previous prerelease + if: ${{ needs.build.result == 'success' }} run: | latest_release=$(gh release list --limit 1 --prerelease --json tagName -q '.[0].tagName') if [ -n "$latest_release" ]; then gh release delete $latest_release -y + else + echo "No previous prerelease found." fi env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + create_prerelease: + runs-on: ubuntu-latest + needs: [build, delete_previous_release] + steps: + - name: Download Build Artifacts + uses: actions/download-artifact@v4 + with: + name: nightly-build + path: ./artifacts + - name: Create new prerelease - if: ${{ success() }} + if: ${{ needs.build.result == 'success' }} run: | - tag_name="nightly-${SHORT_COMMIT}" - gh release create $tag_name -p -t "Nightly Build ${SHORT_COMMIT}" -n "This is an automated nightly build. It is a development version and not stable." --target development - gh release upload $tag_name corese-core/target/corese-core-*-nightly.jar corese-command/target/corese-command-*-nightly.jar corese-gui/target/corese-gui-*-nightly.jar corese-jena/target/corese-jena-*-nightly.jar corese-rdf4j/target/corese-rdf4j-*-nightly.jar corese-server/target/corese-server-*-nightly.jar + tag_name="nightly-${{ env.SHORT_COMMIT }}" + gh release create $tag_name -p -t "Nightly Build ${{ env.SHORT_COMMIT }}" -n "This is an automated nightly build. It is a development version and not stable." --target development + gh release upload $tag_name ./artifacts/corese-core-*-nightly.jar ./artifacts/corese-command-*-nightly.jar ./artifacts/corese-gui-*-nightly.jar ./artifacts/corese-jena-*-nightly.jar ./artifacts/corese-rdf4j-*-nightly.jar ./artifacts/corese-server-*-nightly.jar env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + notify: + runs-on: ubuntu-latest + needs: [build, create_prerelease] + steps: - name: Notify Success - if: ${{ success() }} + if: ${{ needs.build.result == 'success' }} run: echo "Nightly build completed successfully!" - name: Notify Failure - if: ${{ failure() }} + if: ${{ needs.build.result != 'success' }} run: echo "Nightly build failed." From 824196d94677f9af350ebe192ee680d0df002126 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 24 May 2024 11:10:50 +0200 Subject: [PATCH 081/146] Fix delete_previous_prerelease in workflow --- .github/workflows/nightly-build.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index 45c4e4929..9f8ebabbc 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -49,16 +49,16 @@ jobs: corese-rdf4j/target/corese-rdf4j-*-nightly.jar corese-server/target/corese-server-*-nightly.jar - delete_previous_release: + delete_previous_prerelease: runs-on: ubuntu-latest needs: build steps: - name: Delete previous prerelease if: ${{ needs.build.result == 'success' }} run: | - latest_release=$(gh release list --limit 1 --prerelease --json tagName -q '.[0].tagName') - if [ -n "$latest_release" ]; then - gh release delete $latest_release -y + latest_prerelease=$(gh release list --limit 1 --exclude-drafts --json tagName,isPrerelease -q '.[] | select(.isPrerelease) | .tagName') + if [ -n "$latest_prerelease" ]; then + gh release delete $latest_prerelease -y else echo "No previous prerelease found." fi @@ -67,7 +67,7 @@ jobs: create_prerelease: runs-on: ubuntu-latest - needs: [build, delete_previous_release] + needs: [build, delete_previous_prerelease] steps: - name: Download Build Artifacts uses: actions/download-artifact@v4 From 4b9c2e4c5175b4baf24fec385802e7ac3c1f3177 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 24 May 2024 11:18:13 +0200 Subject: [PATCH 082/146] chore: Update nightly-build workflow to checkout 'development' branch This commit updates the 'nightly-build.yml' workflow to include a step that checks out the 'development' branch before performing the build. This ensures that the latest code changes on the 'development' branch are included in the nightly build. Closes #123 --- .github/workflows/nightly-build.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index 9f8ebabbc..2f9baf343 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -53,6 +53,11 @@ jobs: runs-on: ubuntu-latest needs: build steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development + - name: Delete previous prerelease if: ${{ needs.build.result == 'success' }} run: | @@ -69,6 +74,11 @@ jobs: runs-on: ubuntu-latest needs: [build, delete_previous_prerelease] steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development + - name: Download Build Artifacts uses: actions/download-artifact@v4 with: From 15558b8dd3eb36954b08709c62380bd470186bcb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 24 May 2024 11:48:15 +0200 Subject: [PATCH 083/146] chore: Update nightly-build workflow to include date in artifact names This commit modifies the 'nightly-build.yml' workflow to include the current date in the names of the generated artifact files. The date is obtained using the `date` command and is stored in the `DATE` environment variable. This change ensures that each nightly build has a unique identifier based on the date. Closes #123 --- .github/workflows/nightly-build.yml | 32 ++++++++++++++++------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index 2f9baf343..5538e37b5 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -29,11 +29,14 @@ jobs: id: vars run: echo "SHORT_COMMIT=$(git rev-parse --short HEAD)" >> $GITHUB_ENV + - name: Get date + run: echo "DATE=$(date +'%Y%m%d')" >> $GITHUB_ENV + - name: Rename Artifacts run: | for file in $(find . -name '*.jar' | grep -v 'original'); do base=$(basename $file) - new_base=$(echo $base | sed "s/4\.5\.1/${SHORT_COMMIT}-nightly/") + new_base=$(echo $base | sed "s/4\.5\.1/${{ env.SHORT_COMMIT }}-nightly-${{ env.DATE }}/") mv $file $(dirname $file)/$new_base done @@ -42,12 +45,12 @@ jobs: with: name: nightly-build path: | - corese-core/target/corese-core-*-nightly.jar - corese-command/target/corese-command-*-nightly.jar - corese-gui/target/corese-gui-*-nightly.jar - corese-jena/target/corese-jena-*-nightly.jar - corese-rdf4j/target/corese-rdf4j-*-nightly.jar - corese-server/target/corese-server-*-nightly.jar + corese-core/target/corese-core-*-nightly-*.jar + corese-command/target/corese-command-*-nightly-*.jar + corese-gui/target/corese-gui-*-nightly-*.jar + corese-jena/target/corese-jena-*-nightly-*.jar + corese-rdf4j/target/corese-rdf4j-*-nightly-*.jar + corese-server/target/corese-server-*-nightly-*.jar delete_previous_prerelease: runs-on: ubuntu-latest @@ -58,14 +61,15 @@ jobs: with: ref: development - - name: Delete previous prerelease + - name: Delete previous nightly tag if: ${{ needs.build.result == 'success' }} run: | - latest_prerelease=$(gh release list --limit 1 --exclude-drafts --json tagName,isPrerelease -q '.[] | select(.isPrerelease) | .tagName') - if [ -n "$latest_prerelease" ]; then - gh release delete $latest_prerelease -y + latest_nightly_tag=$(gh release list --limit 1 --exclude-drafts --json tagName,isPrerelease -q '.[] | select(.isPrerelease) | .tagName') + if [ -n "$latest_nightly_tag" ]; then + gh release delete $latest_nightly_tag -y + gh api --method DELETE "/repos/${{ github.repository }}/git/refs/tags/$latest_nightly_tag" else - echo "No previous prerelease found." + echo "No previous nightly tag found." fi env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -88,9 +92,9 @@ jobs: - name: Create new prerelease if: ${{ needs.build.result == 'success' }} run: | - tag_name="nightly-${{ env.SHORT_COMMIT }}" + tag_name="nightly-${{ env.DATE }}-${{ env.SHORT_COMMIT }}" gh release create $tag_name -p -t "Nightly Build ${{ env.SHORT_COMMIT }}" -n "This is an automated nightly build. It is a development version and not stable." --target development - gh release upload $tag_name ./artifacts/corese-core-*-nightly.jar ./artifacts/corese-command-*-nightly.jar ./artifacts/corese-gui-*-nightly.jar ./artifacts/corese-jena-*-nightly.jar ./artifacts/corese-rdf4j-*-nightly.jar ./artifacts/corese-server-*-nightly.jar + gh release upload $tag_name ./artifacts/corese-core-*-nightly-*.jar ./artifacts/corese-command-*-nightly-*.jar ./artifacts/corese-gui-*-nightly-*.jar ./artifacts/corese-jena-*-nightly-*.jar ./artifacts/corese-rdf4j-*-nightly-*.jar ./artifacts/corese-server-*-nightly-*.jar env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From c0bb2d170c71d7945a9150456d2232299eb898bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 24 May 2024 11:55:35 +0200 Subject: [PATCH 084/146] chore: Update nightly-build workflow to include date in artifact names --- .github/workflows/nightly-build.yml | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index 5538e37b5..72f13d38d 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -10,6 +10,10 @@ jobs: build: runs-on: ubuntu-latest + outputs: + short_commit: ${{ steps.vars.outputs.short_commit }} + date: ${{ steps.date.outputs.date }} + steps: - name: Checkout Repository uses: actions/checkout@v4 @@ -27,16 +31,17 @@ jobs: - name: Get short commit hash id: vars - run: echo "SHORT_COMMIT=$(git rev-parse --short HEAD)" >> $GITHUB_ENV + run: echo "::set-output name=short_commit::$(git rev-parse --short HEAD)" - name: Get date - run: echo "DATE=$(date +'%Y%m%d')" >> $GITHUB_ENV + id: date + run: echo "::set-output name=date::$(date +'%Y%m%d')" - name: Rename Artifacts run: | for file in $(find . -name '*.jar' | grep -v 'original'); do base=$(basename $file) - new_base=$(echo $base | sed "s/4\.5\.1/${{ env.SHORT_COMMIT }}-nightly-${{ env.DATE }}/") + new_base=$(echo $base | sed "s/4\.5\.1/${{ steps.vars.outputs.short_commit }}-nightly-${{ steps.date.outputs.date }}/") mv $file $(dirname $file)/$new_base done @@ -92,8 +97,8 @@ jobs: - name: Create new prerelease if: ${{ needs.build.result == 'success' }} run: | - tag_name="nightly-${{ env.DATE }}-${{ env.SHORT_COMMIT }}" - gh release create $tag_name -p -t "Nightly Build ${{ env.SHORT_COMMIT }}" -n "This is an automated nightly build. It is a development version and not stable." --target development + tag_name="nightly-${{ needs.build.outputs.date }}-${{ needs.build.outputs.short_commit }}" + gh release create $tag_name -p -t "Nightly Build ${{ needs.build.outputs.short_commit }}" -n "This is an automated nightly build. It is a development version and not stable." --target development gh release upload $tag_name ./artifacts/corese-core-*-nightly-*.jar ./artifacts/corese-command-*-nightly-*.jar ./artifacts/corese-gui-*-nightly-*.jar ./artifacts/corese-jena-*-nightly-*.jar ./artifacts/corese-rdf4j-*-nightly-*.jar ./artifacts/corese-server-*-nightly-*.jar env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From a49a5529f2bcbe8a5fe0c426d42879578275c4bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 24 May 2024 12:14:36 +0200 Subject: [PATCH 085/146] chore: Update nightly-build workflow to include date in artifact names --- .github/workflows/nightly-build.yml | 37 +++++++++++++++-------------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index 72f13d38d..647dd61c5 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -31,31 +31,32 @@ jobs: - name: Get short commit hash id: vars - run: echo "::set-output name=short_commit::$(git rev-parse --short HEAD)" + run: echo "short_commit=$(git rev-parse --short HEAD)" >> $GITHUB_ENV - name: Get date id: date - run: echo "::set-output name=date::$(date +'%Y%m%d')" + run: echo "date=$(date +'%Y%m%d')" >> $GITHUB_ENV - name: Rename Artifacts run: | - for file in $(find . -name '*.jar' | grep -v 'original'); do - base=$(basename $file) - new_base=$(echo $base | sed "s/4\.5\.1/${{ steps.vars.outputs.short_commit }}-nightly-${{ steps.date.outputs.date }}/") - mv $file $(dirname $file)/$new_base - done + mv $(find ./corese-core/target/ -name 'corese-core-*-jar-with-dependencies.jar') ./corese-core/target/corese-core-${{ env.short_commit }}-nightly.jar + mv $(find ./corese-command/target/ -name 'corese-command-*.jar') ./corese-command/target/corese-command-${{ env.short_commit }}-nightly.jar + mv $(find ./corese-gui/target/ -name 'corese-gui-*.jar') ./corese-gui/target/corese-gui-${{ env.short_commit }}-nightly.jar + mv $(find ./corese-jena/target/ -name 'corese-jena-*.jar') ./corese-jena/target/corese-jena-${{ env.short_commit }}-nightly.jar + mv $(find ./corese-rdf4j/target/ -name 'corese-rdf4j-*.jar') ./corese-rdf4j/target/corese-rdf4j-${{ env.short_commit }}-nightly.jar + mv $(find ./corese-server/target/ -name 'corese-server-*.jar') ./corese-server/target/corese-server-${{ env.short_commit }}-nightly.jar - name: Upload Build Artifacts uses: actions/upload-artifact@v4 with: name: nightly-build path: | - corese-core/target/corese-core-*-nightly-*.jar - corese-command/target/corese-command-*-nightly-*.jar - corese-gui/target/corese-gui-*-nightly-*.jar - corese-jena/target/corese-jena-*-nightly-*.jar - corese-rdf4j/target/corese-rdf4j-*-nightly-*.jar - corese-server/target/corese-server-*-nightly-*.jar + ./corese-core/target/corese-core-${{ env.short_commit }}-nightly.jar + ./corese-command/target/corese-command-${{ env.short_commit }}-nightly.jar + ./corese-gui/target/corese-gui-${{ env.short_commit }}-nightly.jar + ./corese-jena/target/corese-jena-${{ env.short_commit }}-nightly.jar + ./corese-rdf4j/target/corese-rdf4j-${{ env.short_commit }}-nightly.jar + ./corese-server/target/corese-server-${{ env.short_commit }}-nightly.jar delete_previous_prerelease: runs-on: ubuntu-latest @@ -81,7 +82,7 @@ jobs: create_prerelease: runs-on: ubuntu-latest - needs: [build, delete_previous_prerelease] + needs: delete_previous_prerelease steps: - name: Checkout Repository uses: actions/checkout@v4 @@ -97,15 +98,15 @@ jobs: - name: Create new prerelease if: ${{ needs.build.result == 'success' }} run: | - tag_name="nightly-${{ needs.build.outputs.date }}-${{ needs.build.outputs.short_commit }}" - gh release create $tag_name -p -t "Nightly Build ${{ needs.build.outputs.short_commit }}" -n "This is an automated nightly build. It is a development version and not stable." --target development - gh release upload $tag_name ./artifacts/corese-core-*-nightly-*.jar ./artifacts/corese-command-*-nightly-*.jar ./artifacts/corese-gui-*-nightly-*.jar ./artifacts/corese-jena-*-nightly-*.jar ./artifacts/corese-rdf4j-*-nightly-*.jar ./artifacts/corese-server-*-nightly-*.jar + tag_name="nightly-${{ env.date }}-${{ env.short_commit }}" + gh release create $tag_name -p -t "Nightly Build ${{ env.short_commit }}" -n "This is an automated nightly build. It is a development version and not stable." --target development + gh release upload $tag_name ./artifacts/corese-core-${{ env.short_commit }}-nightly.jar ./artifacts/corese-command-${{ env.short_commit }}-nightly.jar ./artifacts/corese-gui-${{ env.short_commit }}-nightly.jar ./artifacts/corese-jena-${{ env.short_commit }}-nightly.jar ./artifacts/corese-rdf4j-${{ env.short_commit }}-nightly.jar ./artifacts/corese-server-${{ env.short_commit }}-nightly.jar env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} notify: runs-on: ubuntu-latest - needs: [build, create_prerelease] + needs: create_prerelease steps: - name: Notify Success if: ${{ needs.build.result == 'success' }} From b55a4848757ff72a07112d1aa475add08f45c373 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 24 May 2024 14:05:58 +0200 Subject: [PATCH 086/146] chore: Remove unuse conditions --- .github/workflows/nightly-build.yml | 211 +++++++++++++--------------- 1 file changed, 99 insertions(+), 112 deletions(-) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index 647dd61c5..4ed61a70e 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -1,117 +1,104 @@ name: Nightly Build on: - push: - branches: - - development - workflow_dispatch: + push: + branches: + - development + workflow_dispatch: jobs: - build: - runs-on: ubuntu-latest - - outputs: - short_commit: ${{ steps.vars.outputs.short_commit }} - date: ${{ steps.date.outputs.date }} - - steps: - - name: Checkout Repository - uses: actions/checkout@v4 - with: - ref: development - - - name: Set up JDK 11 - uses: actions/setup-java@v4 - with: - distribution: 'temurin' - java-version: '11' - - - name: Build with Maven - run: mvn clean install - - - name: Get short commit hash - id: vars - run: echo "short_commit=$(git rev-parse --short HEAD)" >> $GITHUB_ENV - - - name: Get date - id: date - run: echo "date=$(date +'%Y%m%d')" >> $GITHUB_ENV - - - name: Rename Artifacts - run: | - mv $(find ./corese-core/target/ -name 'corese-core-*-jar-with-dependencies.jar') ./corese-core/target/corese-core-${{ env.short_commit }}-nightly.jar - mv $(find ./corese-command/target/ -name 'corese-command-*.jar') ./corese-command/target/corese-command-${{ env.short_commit }}-nightly.jar - mv $(find ./corese-gui/target/ -name 'corese-gui-*.jar') ./corese-gui/target/corese-gui-${{ env.short_commit }}-nightly.jar - mv $(find ./corese-jena/target/ -name 'corese-jena-*.jar') ./corese-jena/target/corese-jena-${{ env.short_commit }}-nightly.jar - mv $(find ./corese-rdf4j/target/ -name 'corese-rdf4j-*.jar') ./corese-rdf4j/target/corese-rdf4j-${{ env.short_commit }}-nightly.jar - mv $(find ./corese-server/target/ -name 'corese-server-*.jar') ./corese-server/target/corese-server-${{ env.short_commit }}-nightly.jar - - - name: Upload Build Artifacts - uses: actions/upload-artifact@v4 - with: - name: nightly-build - path: | - ./corese-core/target/corese-core-${{ env.short_commit }}-nightly.jar - ./corese-command/target/corese-command-${{ env.short_commit }}-nightly.jar - ./corese-gui/target/corese-gui-${{ env.short_commit }}-nightly.jar - ./corese-jena/target/corese-jena-${{ env.short_commit }}-nightly.jar - ./corese-rdf4j/target/corese-rdf4j-${{ env.short_commit }}-nightly.jar - ./corese-server/target/corese-server-${{ env.short_commit }}-nightly.jar - - delete_previous_prerelease: - runs-on: ubuntu-latest - needs: build - steps: - - name: Checkout Repository - uses: actions/checkout@v4 - with: - ref: development - - - name: Delete previous nightly tag - if: ${{ needs.build.result == 'success' }} - run: | - latest_nightly_tag=$(gh release list --limit 1 --exclude-drafts --json tagName,isPrerelease -q '.[] | select(.isPrerelease) | .tagName') - if [ -n "$latest_nightly_tag" ]; then - gh release delete $latest_nightly_tag -y - gh api --method DELETE "/repos/${{ github.repository }}/git/refs/tags/$latest_nightly_tag" - else - echo "No previous nightly tag found." - fi - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - create_prerelease: - runs-on: ubuntu-latest - needs: delete_previous_prerelease - steps: - - name: Checkout Repository - uses: actions/checkout@v4 - with: - ref: development - - - name: Download Build Artifacts - uses: actions/download-artifact@v4 - with: - name: nightly-build - path: ./artifacts - - - name: Create new prerelease - if: ${{ needs.build.result == 'success' }} - run: | - tag_name="nightly-${{ env.date }}-${{ env.short_commit }}" - gh release create $tag_name -p -t "Nightly Build ${{ env.short_commit }}" -n "This is an automated nightly build. It is a development version and not stable." --target development - gh release upload $tag_name ./artifacts/corese-core-${{ env.short_commit }}-nightly.jar ./artifacts/corese-command-${{ env.short_commit }}-nightly.jar ./artifacts/corese-gui-${{ env.short_commit }}-nightly.jar ./artifacts/corese-jena-${{ env.short_commit }}-nightly.jar ./artifacts/corese-rdf4j-${{ env.short_commit }}-nightly.jar ./artifacts/corese-server-${{ env.short_commit }}-nightly.jar - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - notify: - runs-on: ubuntu-latest - needs: create_prerelease - steps: - - name: Notify Success - if: ${{ needs.build.result == 'success' }} - run: echo "Nightly build completed successfully!" - - - name: Notify Failure - if: ${{ needs.build.result != 'success' }} - run: echo "Nightly build failed." + build: + runs-on: ubuntu-latest + + outputs: + short_commit: ${{ steps.vars.outputs.short_commit }} + date: ${{ steps.date.outputs.date }} + + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development + + - name: Set up JDK 11 + uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: '11' + + - name: Build with Maven + run: mvn clean install + + - name: Get short commit hash + id: vars + run: echo "short_commit=$(git rev-parse --short HEAD)" >> $GITHUB_ENV + + - name: Get date + id: date + run: echo "date=$(date +'%Y%m%d')" >> $GITHUB_ENV + + - name: Rename Artifacts + run: | + mv $(find ./corese-core/target/ -name 'corese-core-*-jar-with-dependencies.jar') ./corese-core/target/corese-core-${{ env.short_commit }}-nightly.jar + mv $(find ./corese-command/target/ -name 'corese-command-*.jar') ./corese-command/target/corese-command-${{ env.short_commit }}-nightly.jar + mv $(find ./corese-gui/target/ -name 'corese-gui-*.jar') ./corese-gui/target/corese-gui-${{ env.short_commit }}-nightly.jar + mv $(find ./corese-jena/target/ -name 'corese-jena-*.jar') ./corese-jena/target/corese-jena-${{ env.short_commit }}-nightly.jar + mv $(find ./corese-rdf4j/target/ -name 'corese-rdf4j-*.jar') ./corese-rdf4j/target/corese-rdf4j-${{ env.short_commit }}-nightly.jar + mv $(find ./corese-server/target/ -name 'corese-server-*.jar') ./corese-server/target/corese-server-${{ env.short_commit }}-nightly.jar + + - name: Upload Build Artifacts + uses: actions/upload-artifact@v4 + with: + name: nightly-build + path: | + ./corese-core/target/corese-core-${{ env.short_commit }}-nightly.jar + ./corese-command/target/corese-command-${{ env.short_commit }}-nightly.jar + ./corese-gui/target/corese-gui-${{ env.short_commit }}-nightly.jar + ./corese-jena/target/corese-jena-${{ env.short_commit }}-nightly.jar + ./corese-rdf4j/target/corese-rdf4j-${{ env.short_commit }}-nightly.jar + ./corese-server/target/corese-server-${{ env.short_commit }}-nightly.jar + + delete_previous_prerelease: + runs-on: ubuntu-latest + needs: build + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development + + - name: Delete previous nightly tag + if: ${{ needs.build.result == 'success' }} + run: | + latest_nightly_tag=$(gh release list --limit 1 --exclude-drafts --json tagName,isPrerelease -q '.[] | select(.isPrerelease) | .tagName') + if [ -n "$latest_nightly_tag" ]; then + gh release delete $latest_nightly_tag -y + gh api --method DELETE "/repos/${{ github.repository }}/git/refs/tags/$latest_nightly_tag" + else + echo "No previous nightly tag found." + fi + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + create_prerelease: + runs-on: ubuntu-latest + needs: delete_previous_prerelease + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development + + - name: Download Build Artifacts + uses: actions/download-artifact@v4 + with: + name: nightly-build + path: ./artifacts + + - name: Create new prerelease + run: | + tag_name="nightly-${{ env.date }}-${{ env.short_commit }}" + gh release create $tag_name -p -t "Nightly Build ${{ env.short_commit }}" -n "This is an automated nightly build. It is a development version and not stable." --target development + gh release upload $tag_name ./artifacts/corese-core-${{ env.short_commit }}-nightly.jar ./artifacts/corese-command-${{ env.short_commit }}-nightly.jar ./artifacts/corese-gui-${{ env.short_commit }}-nightly.jar ./artifacts/corese-jena-${{ env.short_commit }}-nightly.jar ./artifacts/corese-rdf4j-${{ env.short_commit }}-nightly.jar ./artifacts/corese-server-${{ env.short_commit }}-nightly.jar + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 549d4330f3cc7c10f3872ec6be1cba91f9c96c81 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 24 May 2024 15:03:51 +0200 Subject: [PATCH 087/146] chore: Update nightly-build workflow to include date in artifact names --- .github/workflows/nightly-build.yml | 47 +++++++++++------------------ 1 file changed, 18 insertions(+), 29 deletions(-) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index 4ed61a70e..c671e477e 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -10,10 +10,6 @@ jobs: build: runs-on: ubuntu-latest - outputs: - short_commit: ${{ steps.vars.outputs.short_commit }} - date: ${{ steps.date.outputs.date }} - steps: - name: Checkout Repository uses: actions/checkout@v4 @@ -29,34 +25,27 @@ jobs: - name: Build with Maven run: mvn clean install - - name: Get short commit hash - id: vars - run: echo "short_commit=$(git rev-parse --short HEAD)" >> $GITHUB_ENV - - - name: Get date - id: date - run: echo "date=$(date +'%Y%m%d')" >> $GITHUB_ENV - - name: Rename Artifacts run: | - mv $(find ./corese-core/target/ -name 'corese-core-*-jar-with-dependencies.jar') ./corese-core/target/corese-core-${{ env.short_commit }}-nightly.jar - mv $(find ./corese-command/target/ -name 'corese-command-*.jar') ./corese-command/target/corese-command-${{ env.short_commit }}-nightly.jar - mv $(find ./corese-gui/target/ -name 'corese-gui-*.jar') ./corese-gui/target/corese-gui-${{ env.short_commit }}-nightly.jar - mv $(find ./corese-jena/target/ -name 'corese-jena-*.jar') ./corese-jena/target/corese-jena-${{ env.short_commit }}-nightly.jar - mv $(find ./corese-rdf4j/target/ -name 'corese-rdf4j-*.jar') ./corese-rdf4j/target/corese-rdf4j-${{ env.short_commit }}-nightly.jar - mv $(find ./corese-server/target/ -name 'corese-server-*.jar') ./corese-server/target/corese-server-${{ env.short_commit }}-nightly.jar + short_commit=$(git rev-parse --short HEAD) + mv $(find ./corese-core/target/ -name 'corese-core-*-jar-with-dependencies.jar') ./corese-core/target/corese-core-${short_commit}-nightly.jar + mv $(find ./corese-command/target/ -name 'corese-command-*.jar') ./corese-command/target/corese-command-${short_commit}-nightly.jar + mv $(find ./corese-gui/target/ -name 'corese-gui-*.jar') ./corese-gui/target/corese-gui-${short_commit}-nightly.jar + mv $(find ./corese-jena/target/ -name 'corese-jena-*.jar') ./corese-jena/target/corese-jena-${short_commit}-nightly.jar + mv $(find ./corese-rdf4j/target/ -name 'corese-rdf4j-*.jar') ./corese-rdf4j/target/corese-rdf4j-${short_commit}-nightly.jar + mv $(find ./corese-server/target/ -name 'corese-server-*.jar') ./corese-server/target/corese-server-${short_commit}-nightly.jar - name: Upload Build Artifacts uses: actions/upload-artifact@v4 with: name: nightly-build path: | - ./corese-core/target/corese-core-${{ env.short_commit }}-nightly.jar - ./corese-command/target/corese-command-${{ env.short_commit }}-nightly.jar - ./corese-gui/target/corese-gui-${{ env.short_commit }}-nightly.jar - ./corese-jena/target/corese-jena-${{ env.short_commit }}-nightly.jar - ./corese-rdf4j/target/corese-rdf4j-${{ env.short_commit }}-nightly.jar - ./corese-server/target/corese-server-${{ env.short_commit }}-nightly.jar + ./corese-core/target/corese-core-*-nightly.jar + ./corese-command/target/corese-command-*-nightly.jar + ./corese-gui/target/corese-gui-*-nightly.jar + ./corese-jena/target/corese-jena-*-nightly.jar + ./corese-rdf4j/target/corese-rdf4j-*-nightly.jar + ./corese-server/target/corese-server-*-nightly.jar delete_previous_prerelease: runs-on: ubuntu-latest @@ -68,7 +57,6 @@ jobs: ref: development - name: Delete previous nightly tag - if: ${{ needs.build.result == 'success' }} run: | latest_nightly_tag=$(gh release list --limit 1 --exclude-drafts --json tagName,isPrerelease -q '.[] | select(.isPrerelease) | .tagName') if [ -n "$latest_nightly_tag" ]; then @@ -76,7 +64,6 @@ jobs: gh api --method DELETE "/repos/${{ github.repository }}/git/refs/tags/$latest_nightly_tag" else echo "No previous nightly tag found." - fi env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -97,8 +84,10 @@ jobs: - name: Create new prerelease run: | - tag_name="nightly-${{ env.date }}-${{ env.short_commit }}" - gh release create $tag_name -p -t "Nightly Build ${{ env.short_commit }}" -n "This is an automated nightly build. It is a development version and not stable." --target development - gh release upload $tag_name ./artifacts/corese-core-${{ env.short_commit }}-nightly.jar ./artifacts/corese-command-${{ env.short_commit }}-nightly.jar ./artifacts/corese-gui-${{ env.short_commit }}-nightly.jar ./artifacts/corese-jena-${{ env.short_commit }}-nightly.jar ./artifacts/corese-rdf4j-${{ env.short_commit }}-nightly.jar ./artifacts/corese-server-${{ env.short_commit }}-nightly.jar + short_commit=$(git rev-parse --short HEAD) + date=$(date +'%Y%m%d') + tag_name="nightly-${date}-${short_commit}" + gh release create $tag_name -p -t "Nightly Build ${short_commit}" -n "This is an automated nightly build. It is a development version and not stable." --target development + gh release upload $tag_name ./artifacts/corese-core-${short_commit}-nightly.jar ./artifacts/corese-command-${short_commit}-nightly.jar ./artifacts/corese-gui-${short_commit}-nightly.jar ./artifacts/corese-jena-${short_commit}-nightly.jar ./artifacts/corese-rdf4j-${short_commit}-nightly.jar ./artifacts/corese-server-${short_commit}-nightly.jar env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From e3ae9c2acd95a57c517b1cc71b31dfad0cad0710 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 24 May 2024 15:12:23 +0200 Subject: [PATCH 088/146] chore: Fix syntax error --- .github/workflows/nightly-build.yml | 156 ++++++++++++++-------------- 1 file changed, 78 insertions(+), 78 deletions(-) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index c671e477e..a554833ad 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -1,93 +1,93 @@ name: Nightly Build on: - push: - branches: - - development - workflow_dispatch: + push: + branches: + - development + workflow_dispatch: jobs: - build: - runs-on: ubuntu-latest + build: + runs-on: ubuntu-latest - steps: - - name: Checkout Repository - uses: actions/checkout@v4 - with: - ref: development + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development - - name: Set up JDK 11 - uses: actions/setup-java@v4 - with: - distribution: 'temurin' - java-version: '11' + - name: Set up JDK 11 + uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: '11' - - name: Build with Maven - run: mvn clean install + - name: Build with Maven + run: mvn clean install - - name: Rename Artifacts - run: | - short_commit=$(git rev-parse --short HEAD) - mv $(find ./corese-core/target/ -name 'corese-core-*-jar-with-dependencies.jar') ./corese-core/target/corese-core-${short_commit}-nightly.jar - mv $(find ./corese-command/target/ -name 'corese-command-*.jar') ./corese-command/target/corese-command-${short_commit}-nightly.jar - mv $(find ./corese-gui/target/ -name 'corese-gui-*.jar') ./corese-gui/target/corese-gui-${short_commit}-nightly.jar - mv $(find ./corese-jena/target/ -name 'corese-jena-*.jar') ./corese-jena/target/corese-jena-${short_commit}-nightly.jar - mv $(find ./corese-rdf4j/target/ -name 'corese-rdf4j-*.jar') ./corese-rdf4j/target/corese-rdf4j-${short_commit}-nightly.jar - mv $(find ./corese-server/target/ -name 'corese-server-*.jar') ./corese-server/target/corese-server-${short_commit}-nightly.jar + - name: Rename Artifacts + run: | + short_commit=$(git rev-parse --short HEAD) + mv $(find ./corese-core/target/ -name 'corese-core-*-jar-with-dependencies.jar') ./corese-core/target/corese-core-${short_commit}-nightly.jar + mv $(find ./corese-command/target/ -name 'corese-command-*.jar') ./corese-command/target/corese-command-${short_commit}-nightly.jar + mv $(find ./corese-gui/target/ -name 'corese-gui-*.jar') ./corese-gui/target/corese-gui-${short_commit}-nightly.jar + mv $(find ./corese-jena/target/ -name 'corese-jena-*.jar') ./corese-jena/target/corese-jena-${short_commit}-nightly.jar + mv $(find ./corese-rdf4j/target/ -name 'corese-rdf4j-*.jar') ./corese-rdf4j/target/corese-rdf4j-${short_commit}-nightly.jar + mv $(find ./corese-server/target/ -name 'corese-server-*.jar') ./corese-server/target/corese-server-${short_commit}-nightly.jar - - name: Upload Build Artifacts - uses: actions/upload-artifact@v4 - with: - name: nightly-build - path: | - ./corese-core/target/corese-core-*-nightly.jar - ./corese-command/target/corese-command-*-nightly.jar - ./corese-gui/target/corese-gui-*-nightly.jar - ./corese-jena/target/corese-jena-*-nightly.jar - ./corese-rdf4j/target/corese-rdf4j-*-nightly.jar - ./corese-server/target/corese-server-*-nightly.jar + - name: Upload Build Artifacts + uses: actions/upload-artifact@v4 + with: + name: nightly-build + path: | + ./corese-core/target/corese-core-*-nightly.jar + ./corese-command/target/corese-command-*-nightly.jar + ./corese-gui/target/corese-gui-*-nightly.jar + ./corese-jena/target/corese-jena-*-nightly.jar + ./corese-rdf4j/target/corese-rdf4j-*-nightly.jar + ./corese-server/target/corese-server-*-nightly.jar - delete_previous_prerelease: - runs-on: ubuntu-latest - needs: build - steps: - - name: Checkout Repository - uses: actions/checkout@v4 - with: - ref: development + delete_previous_prerelease: + runs-on: ubuntu-latest + needs: build + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development - - name: Delete previous nightly tag - run: | - latest_nightly_tag=$(gh release list --limit 1 --exclude-drafts --json tagName,isPrerelease -q '.[] | select(.isPrerelease) | .tagName') - if [ -n "$latest_nightly_tag" ]; then - gh release delete $latest_nightly_tag -y - gh api --method DELETE "/repos/${{ github.repository }}/git/refs/tags/$latest_nightly_tag" - else - echo "No previous nightly tag found." - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Delete previous nightly tag + run: | + latest_nightly_tag=$(gh release list --limit 1 --exclude-drafts --json tagName,isPrerelease -q '.[] | select(.isPrerelease) | .tagName') + if [ -n "$latest_nightly_tag" ]; then + gh release delete "$latest_nightly_tag" -y + gh api --method DELETE "/repos/${{ github.repository }}/git/refs/tags/$latest_nightly_tag" + else + echo "No previous nightly tag found." + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - create_prerelease: - runs-on: ubuntu-latest - needs: delete_previous_prerelease - steps: - - name: Checkout Repository - uses: actions/checkout@v4 - with: - ref: development + create_prerelease: + runs-on: ubuntu-latest + needs: delete_previous_prerelease + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development - - name: Download Build Artifacts - uses: actions/download-artifact@v4 - with: - name: nightly-build - path: ./artifacts + - name: Download Build Artifacts + uses: actions/download-artifact@v4 + with: + name: nightly-build + path: ./artifacts - - name: Create new prerelease - run: | - short_commit=$(git rev-parse --short HEAD) - date=$(date +'%Y%m%d') - tag_name="nightly-${date}-${short_commit}" - gh release create $tag_name -p -t "Nightly Build ${short_commit}" -n "This is an automated nightly build. It is a development version and not stable." --target development - gh release upload $tag_name ./artifacts/corese-core-${short_commit}-nightly.jar ./artifacts/corese-command-${short_commit}-nightly.jar ./artifacts/corese-gui-${short_commit}-nightly.jar ./artifacts/corese-jena-${short_commit}-nightly.jar ./artifacts/corese-rdf4j-${short_commit}-nightly.jar ./artifacts/corese-server-${short_commit}-nightly.jar - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Create new prerelease + run: | + short_commit=$(git rev-parse --short HEAD) + date=$(date +'%Y%m%d') + tag_name="nightly-${date}-${short_commit}" + gh release create "$tag_name" -p -t "Nightly Build ${short_commit}" -n "This is an automated nightly build. It is a development version and not stable." --target development + gh release upload "$tag_name" ./artifacts/corese-core-${short_commit}-nightly.jar ./artifacts/corese-command-${short_commit}-nightly.jar ./artifacts/corese-gui-${short_commit}-nightly.jar ./artifacts/corese-jena-${short_commit}-nightly.jar ./artifacts/corese-rdf4j-${short_commit}-nightly.jar ./artifacts/corese-server-${short_commit}-nightly.jar + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From a3c727ee2017cacc6639ef34abcdf68a6b0aad2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 24 May 2024 15:27:23 +0200 Subject: [PATCH 089/146] chore: Fix typo in bash algo --- .github/workflows/nightly-build.yml | 127 ++++++++++++++-------------- 1 file changed, 64 insertions(+), 63 deletions(-) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index a554833ad..89a0a65c2 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -11,83 +11,84 @@ jobs: runs-on: ubuntu-latest steps: - - name: Checkout Repository - uses: actions/checkout@v4 - with: - ref: development + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development - - name: Set up JDK 11 - uses: actions/setup-java@v4 - with: - distribution: 'temurin' - java-version: '11' + - name: Set up JDK 11 + uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: '11' - - name: Build with Maven - run: mvn clean install + - name: Build with Maven + run: mvn clean install - - name: Rename Artifacts - run: | - short_commit=$(git rev-parse --short HEAD) - mv $(find ./corese-core/target/ -name 'corese-core-*-jar-with-dependencies.jar') ./corese-core/target/corese-core-${short_commit}-nightly.jar - mv $(find ./corese-command/target/ -name 'corese-command-*.jar') ./corese-command/target/corese-command-${short_commit}-nightly.jar - mv $(find ./corese-gui/target/ -name 'corese-gui-*.jar') ./corese-gui/target/corese-gui-${short_commit}-nightly.jar - mv $(find ./corese-jena/target/ -name 'corese-jena-*.jar') ./corese-jena/target/corese-jena-${short_commit}-nightly.jar - mv $(find ./corese-rdf4j/target/ -name 'corese-rdf4j-*.jar') ./corese-rdf4j/target/corese-rdf4j-${short_commit}-nightly.jar - mv $(find ./corese-server/target/ -name 'corese-server-*.jar') ./corese-server/target/corese-server-${short_commit}-nightly.jar + - name: Rename Artifacts + run: | + short_commit=$(git rev-parse --short HEAD) + mv $(find ./corese-core/target/ -name 'corese-core-*-jar-with-dependencies.jar') ./corese-core/target/corese-core-${short_commit}-nightly.jar + mv $(find ./corese-command/target/ -name 'corese-command-*.jar') ./corese-command/target/corese-command-${short_commit}-nightly.jar + mv $(find ./corese-gui/target/ -name 'corese-gui-*.jar') ./corese-gui/target/corese-gui-${short_commit}-nightly.jar + mv $(find ./corese-jena/target/ -name 'corese-jena-*.jar') ./corese-jena/target/corese-jena-${short_commit}-nightly.jar + mv $(find ./corese-rdf4j/target/ -name 'corese-rdf4j-*.jar') ./corese-rdf4j/target/corese-rdf4j-${short_commit}-nightly.jar + mv $(find ./corese-server/target/ -name 'corese-server-*.jar') ./corese-server/target/corese-server-${short_commit}-nightly.jar - - name: Upload Build Artifacts - uses: actions/upload-artifact@v4 - with: - name: nightly-build - path: | - ./corese-core/target/corese-core-*-nightly.jar - ./corese-command/target/corese-command-*-nightly.jar - ./corese-gui/target/corese-gui-*-nightly.jar - ./corese-jena/target/corese-jena-*-nightly.jar - ./corese-rdf4j/target/corese-rdf4j-*-nightly.jar - ./corese-server/target/corese-server-*-nightly.jar + - name: Upload Build Artifacts + uses: actions/upload-artifact@v4 + with: + name: nightly-build + path: | + ./corese-core/target/corese-core-*-nightly.jar + ./corese-command/target/corese-command-*-nightly.jar + ./corese-gui/target/corese-gui-*-nightly.jar + ./corese-jena/target/corese-jena-*-nightly.jar + ./corese-rdf4j/target/corese-rdf4j-*-nightly.jar + ./corese-server/target/corese-server-*-nightly.jar delete_previous_prerelease: runs-on: ubuntu-latest needs: build steps: - - name: Checkout Repository - uses: actions/checkout@v4 - with: - ref: development + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development - - name: Delete previous nightly tag - run: | - latest_nightly_tag=$(gh release list --limit 1 --exclude-drafts --json tagName,isPrerelease -q '.[] | select(.isPrerelease) | .tagName') - if [ -n "$latest_nightly_tag" ]; then - gh release delete "$latest_nightly_tag" -y - gh api --method DELETE "/repos/${{ github.repository }}/git/refs/tags/$latest_nightly_tag" - else - echo "No previous nightly tag found." - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Delete previous nightly tag + run: | + latest_nightly_tag=$(gh release list --limit 1 --exclude-drafts --json tagName,isPrerelease -q '.[] | select(.isPrerelease) | .tagName') + if [ -n "$latest_nightly_tag" ]; then + gh release delete "$latest_nightly_tag" -y + gh api --method DELETE "/repos/${{ github.repository }}/git/refs/tags/$latest_nightly_tag" + else + echo "No previous nightly tag found." + fi + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} create_prerelease: runs-on: ubuntu-latest needs: delete_previous_prerelease steps: - - name: Checkout Repository - uses: actions/checkout@v4 - with: - ref: development + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development - - name: Download Build Artifacts - uses: actions/download-artifact@v4 - with: - name: nightly-build - path: ./artifacts + - name: Download Build Artifacts + uses: actions/download-artifact@v4 + with: + name: nightly-build + path: ./artifacts - - name: Create new prerelease - run: | - short_commit=$(git rev-parse --short HEAD) - date=$(date +'%Y%m%d') - tag_name="nightly-${date}-${short_commit}" - gh release create "$tag_name" -p -t "Nightly Build ${short_commit}" -n "This is an automated nightly build. It is a development version and not stable." --target development - gh release upload "$tag_name" ./artifacts/corese-core-${short_commit}-nightly.jar ./artifacts/corese-command-${short_commit}-nightly.jar ./artifacts/corese-gui-${short_commit}-nightly.jar ./artifacts/corese-jena-${short_commit}-nightly.jar ./artifacts/corese-rdf4j-${short_commit}-nightly.jar ./artifacts/corese-server-${short_commit}-nightly.jar - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Create new prerelease + run: | + short_commit=$(git rev-parse --short HEAD) + date=$(date +'%Y%m%d') + tag_name="nightly-${date}-${short_commit}" + gh release create "$tag_name" -p -t "Nightly Build ${short_commit}" -n "This is an automated nightly build. It is a development version and not stable." --target development + gh release upload "$tag_name" ./artifacts/corese-core-${short_commit}-nightly.jar ./artifacts/corese-command-${short_commit}-nightly.jar ./artifacts/corese-gui-${short_commit}-nightly.jar ./artifacts/corese-jena-${short_commit}-nightly.jar ./artifacts/corese-rdf4j-${short_commit}-nightly.jar ./artifacts/corese-server-${short_commit}-nightly.jar + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From f29b7ae9697d2e765639d1011759a22e21ad55d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20C=C3=A9r=C3=A8s?= Date: Fri, 24 May 2024 16:06:30 +0200 Subject: [PATCH 090/146] Fix Create new prerelease workfow --- .github/workflows/nightly-build.yml | 24 +++++++++--------------- 1 file changed, 9 insertions(+), 15 deletions(-) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index 89a0a65c2..2600bd6ff 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -25,27 +25,21 @@ jobs: - name: Build with Maven run: mvn clean install - - name: Rename Artifacts + - name: Rename and Copy Artifacts run: | short_commit=$(git rev-parse --short HEAD) - mv $(find ./corese-core/target/ -name 'corese-core-*-jar-with-dependencies.jar') ./corese-core/target/corese-core-${short_commit}-nightly.jar - mv $(find ./corese-command/target/ -name 'corese-command-*.jar') ./corese-command/target/corese-command-${short_commit}-nightly.jar - mv $(find ./corese-gui/target/ -name 'corese-gui-*.jar') ./corese-gui/target/corese-gui-${short_commit}-nightly.jar - mv $(find ./corese-jena/target/ -name 'corese-jena-*.jar') ./corese-jena/target/corese-jena-${short_commit}-nightly.jar - mv $(find ./corese-rdf4j/target/ -name 'corese-rdf4j-*.jar') ./corese-rdf4j/target/corese-rdf4j-${short_commit}-nightly.jar - mv $(find ./corese-server/target/ -name 'corese-server-*.jar') ./corese-server/target/corese-server-${short_commit}-nightly.jar + cp $(find ./corese-core/target/ -name 'corese-core-*-jar-with-dependencies.jar') ./corese-core-${short_commit}-nightly.jar + cp $(find ./corese-command/target/ -name 'corese-command-*.jar') ./corese-command-${short_commit}-nightly.jar + cp $(find ./corese-gui/target/ -name 'corese-gui-*.jar') ./corese-gui-${short_commit}-nightly.jar + cp $(find ./corese-jena/target/ -name 'corese-jena-*.jar') ./corese-jena-${short_commit}-nightly.jar + cp $(find ./corese-rdf4j/target/ -name 'corese-rdf4j-*.jar') ./corese-rdf4j-${short_commit}-nightly.jar + cp $(find ./corese-server/target/ -name 'corese-server-*.jar') ./corese-server-${short_commit}-nightly.jar - name: Upload Build Artifacts uses: actions/upload-artifact@v4 with: name: nightly-build - path: | - ./corese-core/target/corese-core-*-nightly.jar - ./corese-command/target/corese-command-*-nightly.jar - ./corese-gui/target/corese-gui-*-nightly.jar - ./corese-jena/target/corese-jena-*-nightly.jar - ./corese-rdf4j/target/corese-rdf4j-*-nightly.jar - ./corese-server/target/corese-server-*-nightly.jar + path: ./corese-*-nightly.jar delete_previous_prerelease: runs-on: ubuntu-latest @@ -89,6 +83,6 @@ jobs: date=$(date +'%Y%m%d') tag_name="nightly-${date}-${short_commit}" gh release create "$tag_name" -p -t "Nightly Build ${short_commit}" -n "This is an automated nightly build. It is a development version and not stable." --target development - gh release upload "$tag_name" ./artifacts/corese-core-${short_commit}-nightly.jar ./artifacts/corese-command-${short_commit}-nightly.jar ./artifacts/corese-gui-${short_commit}-nightly.jar ./artifacts/corese-jena-${short_commit}-nightly.jar ./artifacts/corese-rdf4j-${short_commit}-nightly.jar ./artifacts/corese-server-${short_commit}-nightly.jar + gh release upload "$tag_name" ./artifacts/corese-*-nightly.jar env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 0f62c5cebe04bea72a9710a0c99f53fda8ccbac5 Mon Sep 17 00:00:00 2001 From: Anna Bobasheva <33026767+AnnaBobasheva@users.noreply.github.com> Date: Tue, 16 Apr 2024 16:02:26 +0200 Subject: [PATCH 091/146] first commit of the doc development Update .gitignore Update .gitignore removed README.md Update sphinx.yml Update sphinx.yml Create sphinx.yml Create corese.svg --- .github/sphinx.yml | 30 +++++ docs/.gitignore | 5 + docs/Makefile | 20 +++ docs/README.md | 29 ---- docs/make.bat | 35 +++++ docs/requirements.txt | 5 + docs/source/_static/corese.svg | 200 ++++++++++++++++++++++++++++ docs/source/_static/css/custom.css | 113 ++++++++++++++++ docs/source/_templates/class.rst | 22 +++ docs/source/_templates/function.rst | 13 ++ docs/source/api.rst | 85 ++++++++++++ docs/source/conf.py | 79 +++++++++++ docs/source/index.rst | 103 ++++++++++++++ docs/source/install.rst | 20 +++ docs/source/user_guide.rst | 47 +++++++ 15 files changed, 777 insertions(+), 29 deletions(-) create mode 100644 .github/sphinx.yml create mode 100644 docs/.gitignore create mode 100644 docs/Makefile delete mode 100644 docs/README.md create mode 100644 docs/make.bat create mode 100644 docs/requirements.txt create mode 100644 docs/source/_static/corese.svg create mode 100644 docs/source/_static/css/custom.css create mode 100644 docs/source/_templates/class.rst create mode 100644 docs/source/_templates/function.rst create mode 100644 docs/source/api.rst create mode 100644 docs/source/conf.py create mode 100644 docs/source/index.rst create mode 100644 docs/source/install.rst create mode 100644 docs/source/user_guide.rst diff --git a/.github/sphinx.yml b/.github/sphinx.yml new file mode 100644 index 000000000..037ecdb4a --- /dev/null +++ b/.github/sphinx.yml @@ -0,0 +1,30 @@ +name: "Sphinx: Render docs" + +on: + push: + branches: + - dev_documentation + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout dev_documentation branch + uses: actions/checkout@v4 + with: + ref: dev_documentation + - name: Build HTML + uses: ammaraskar/sphinx-action@master + - name: Upload artifacts + uses: actions/upload-artifact@v4 + with: + name: html-docs + path: docs/build/html/ + - name: Deploy + uses: peaceiris/actions-gh-pages@v3 + if: github.ref == 'refs/heads/doc_development' + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: docs/build/html diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 000000000..ae0c4ba2c --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,5 @@ +# Generated files for docs +generated/ +bak/ +build/ + diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 000000000..d0c3cbf10 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/README.md b/docs/README.md deleted file mode 100644 index 4b3ed72f6..000000000 --- a/docs/README.md +++ /dev/null @@ -1,29 +0,0 @@ -# Corese documentation - -The purpose of this page is to gather the documentation of Corese. - -## Corese-library - -- [Getting Started With Corese-library](getting%20started/Getting%20Started%20With%20Corese-library.md) -- [Use Corese-library and RDF4J](rdf4j/RDF4J%20API%20in%20Corese.md) - -## Corese-server - -- [Getting Started With Corese-server](getting%20started/Getting%20Started%20With%20Corese-server.md) -- [Use Corese-server with Python](corese-python/Corese-server%20with%20Python.md) - -## Corese-Command - -- [Getting Started With Corese-command](./getting%20started/Getting%20Started%20With%20Corese-command.md) - -## Corese-Python - -- [Getting Started With Corese-python](corese-python/Corese-library%20with%20Python.md) - -## Storage Systems - -- [Configuring and Connecting to Different Storage Systems in Corese](storage/Configuring%20and%20Connecting%20to%20Different%20Storage%20Systems%20in%20Corese.md) - -## Federated and Distributed Queries - -- [Federated and Distributed Queries](federation/Federated%20and%20Distributed%20Queries.md) diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 000000000..747ffb7b3 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 000000000..538a134a0 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,5 @@ +pydata_sphinx_theme==0.14.4 +babel>=2.9 +packaging>=21.0 +docutils<0.21,>=0.18.1 +sphinx-design diff --git a/docs/source/_static/corese.svg b/docs/source/_static/corese.svg new file mode 100644 index 000000000..1e2386bb0 --- /dev/null +++ b/docs/source/_static/corese.svg @@ -0,0 +1,200 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + C + RESE + + + + + + diff --git a/docs/source/_static/css/custom.css b/docs/source/_static/css/custom.css new file mode 100644 index 000000000..c240b07fa --- /dev/null +++ b/docs/source/_static/css/custom.css @@ -0,0 +1,113 @@ +/** Styling **************************************************************/ + +.bd-article { + padding-left: 0px !important; + padding-top: 0px !important; +} + +.bd-content { + justify-content: center; +} + +.bd-article-container { + padding-top: 0%; +} + +.bd-main .bd-content .bd-article-container { + max-width: 100%; + /*padding-top: 0%;*/ +} + +.bd-main .bd-content { + display: flex; + height: 100%; + justify-content: flex-end; + gap: 60px; + /* margin: 0 20px; */ +} + +#skrub-prepping-tables-for-machine-learning > img { + background: rgba(0,0,0,0) +} + +.card-body { + padding-left: 32px; + padding-right: 32px; +} + +.card-title { + margin-bottom: 1.0rem; + text-align: left; +} + +.card-body p { + text-align: left; +} + +.schema-container { + display: flex; + justify-content: center; + max-width: 70em; + /* Vertical spacing as a function of the viewport height, to use better the page on very big displays */ + margin-top: calc(10px + 1vh); + margin-bottom: calc(10px + 2vh); +} + +.row justify-content-md-center { + gap: 30px; +} + +.sk-landing-header text-white text-monospace { + margin-top: -20px; +} + + +/** Landing header *******************************/ + + +div.sk-landing-container { + max-width: 1400px; + margin-top: 35px; + min-height: 130px; + } + +div.sk-landing-container .text-white { + text-shadow: 0px 0px 8px rgb(42, 98, 128); +} + +ul.sk-landing-header-body { + margin-top: auto; + margin-bottom: auto; + font-size: 1.2rem; + font-weight: 500; + color: black; +} + +div.sk-landing-bg-more-info dd { + padding-left: 0; +} + +div.sk-landing-bg-more-info dd { + padding-left: 0; +} + +div.sk-landing-bg { + background-image: linear-gradient(160deg, rgba(0,48,70,.75) 0%, rgba(0,118,179,.75) 17%, rgba(255,239,193,.75) 59%, rgba(255,149,40,.75) 100%); + margin-top: -18px !important; + margin-bottom: 45px; +} + +div.sk-landing-bg-more-info { + background-color: #f8f8f8; + font-size: 0.96rem; + } + +.sk-landing-header { + font-size: 3.2rem; + margin-bottom: 5px; +} + +.sk-landing-subheader { + letter-spacing: 0.17rem; + margin-top: 5px; +} \ No newline at end of file diff --git a/docs/source/_templates/class.rst b/docs/source/_templates/class.rst new file mode 100644 index 000000000..a96363b95 --- /dev/null +++ b/docs/source/_templates/class.rst @@ -0,0 +1,22 @@ +:mod:`{{module}}`.{{objname}} +{{ underline }}============== + +.. rst-class:: side_comment + + Usage examples at the bottom of this page. + +.. currentmodule:: {{ module }} + +.. autoclass:: {{ objname }} + :inherited-members: + + {% block methods %} + + {% endblock %} + +.. include:: {{module}}.{{objname}}.examples + +.. raw:: html + +
      + \ No newline at end of file diff --git a/docs/source/_templates/function.rst b/docs/source/_templates/function.rst new file mode 100644 index 000000000..f133d5a7c --- /dev/null +++ b/docs/source/_templates/function.rst @@ -0,0 +1,13 @@ +:mod:`{{module}}`.{{objname}} +{{ underline }}==================== + +.. currentmodule:: {{ module }} + +.. autofunction:: {{ objname }} + +.. include:: {{module}}.{{objname}}.examples + +.. raw:: html + +
      + \ No newline at end of file diff --git a/docs/source/api.rst b/docs/source/api.rst new file mode 100644 index 000000000..6562ae2df --- /dev/null +++ b/docs/source/api.rst @@ -0,0 +1,85 @@ +====================== +API +====================== + +.. module:: corese + +Introduction +------------ + +The CORESE API provides a set of functions and classes for interacting with the CORESE system. This documentation serves as a reference guide for developers who want to use the CORESE API in their applications. + +Installation +------------ + +To use the CORESE API, you need to install the CORESE library. You can install it using pip: + +.. code-block:: bash + + $ pip install corese + +Usage +----- + +To use the CORESE API in your Python code, you need to import the `corese` module: + +.. code-block:: python + + import corese + +Functions +--------- + +.. function:: corese.function_name(arg1, arg2, ...) + + Description of the function. + + :param arg1: Description of the first argument. + :type arg1: Type of the first argument. + :param arg2: Description of the second argument. + :type arg2: Type of the second argument. + :return: Description of the return value. + :rtype: Type of the return value. + +Classes +------- + +.. class:: corese.ClassName + + Description of the class. + + .. method:: method_name(arg1, arg2, ...) + + Description of the method. + + :param arg1: Description of the first argument. + :type arg1: Type of the first argument. + :param arg2: Description of the second argument. + :type arg2: Type of the second argument. + :return: Description of the return value. + :rtype: Type of the return value. + +Examples +-------- + +Here are some examples of how to use the CORESE API: + +.. code-block:: python + + import corese + + # Example 1 + ... + + # Example 2 + ... + + # Example 3 + ... + +References +---------- + +For more information, please refer to the official CORESE documentation: + +- `CORESE Documentation `_ diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 000000000..ee1b599c4 --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,79 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. +import pathlib +import sys + +sys.path.insert(0, pathlib.Path(__file__).parents[2].resolve().as_posix()) +#sys.path.insert(0, pathlib.Path(__file__).parents[2].joinpath('code').resolve().as_posix()) + +project = 'CORESE' +copyright = '2024, WIMMICS' +author = 'WIMMICS' +release = '4.5' + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + 'sphinx.ext.duration', + 'sphinx.ext.doctest', + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx_design' + ] + +templates_path = ['_templates'] +exclude_patterns = [] + +# The suffix(es) of source filenames. +source_suffix = ['.rst', '.md'] + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = 'pydata_sphinx_theme' +html_static_path = ['_static'] + +html_css_files = [ + "css/custom.css", +] +html_js_files = [] + +# Project logo, to place at the top of the sidebar. +html_logo = "_static/corese.svg" + +# Icon to put in the browser tab. +html_favicon = "_static/corese.svg" + +# Modify the title to get good social-media links +html_title = "CORESE" +html_short_title = "CORESE" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "logo": { + "image_relative": "_static/corese.svg", + "image_light": "_static/corese.svg", + "image_dark": "_static/corese.svg", + }, + "navbar_center": ["navbar-nav"], + "icon_links": [ + { + "name": "GitHub", + "url": "https://github.com/Wimmics/corese", + "icon": "fab fa-github-square", + } + ], + + } + diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 000000000..f41df019c --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,103 @@ +.. CORESE documentation master file, created by + sphinx-quickstart on Tue Apr 16 14:51:03 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + + +.. image:: _static/corese.svg + :align: center + :width: 400px + +.. centered:: Software platform for the Semantic Web of Linked Data + +Corese is a software platform implementing and extending the standards of the Semantic Web. It allows to create, manipulate, parse, serialize, query, reason and validate RDF data. + + + +.. Define named hyperlinks for the references of W3C standards +.. _RDF: https://www.w3.org/RDF/ +.. _RDFS: https://www.w3.org/2001/sw/wiki/RDFS +.. _SPARQL1.1 Query & Update: https://www.w3.org/2001/sw/wiki/SPARQL +.. _OWL RL: https://www.w3.org/2005/rules/wiki/OWLRL +.. _SHACL: https://www.w3.org/TR/shacl/ + +.. TODO: Ask Remi if the documents below should be copied + +.. _STTL SPARQL: https://files.inria.fr/corese/doc/sttl.html +.. _SPARQL Rule: https://files.inria.fr/corese/doc/rule.html +.. _LDScript: https://files.inria.fr/corese/doc/ldscript.html + + + +.. DELETEME:Corese implements W3C standarts `RDF`_, `RDFS`_, `SPARQL1.1 Query & Update`_, `OWL RL`_, `SHACL`_ … It also implements extensions like `STTL SPARQL`_, `SPARQL Rule`_ and `LDScript`_. + +.. ############################################################################# +.. The statements below are to produce the grid of cards in the home page + TODO: Add the link to the pages of the documentation +.. grid:: 2 + + .. grid-item-card:: + :shadow: sm + :class-card: sd-rounded-3 + + Corese implements W3C standards and extensions + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + * W3C standards + * `RDF`_ + * `RDFS`_ + * `SPARQL1.1 Query & Update`_ + * `OWL RL`_ + * `SHACL`_ + * Extensions + * `STTL SPARQL`_ + * `SPARQL Rule`_ + * `LDScript`_ + + .. grid-item-card:: + :shadow: sm + :class-card: sd-rounded-3 + + Corese offers several interfaces + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + * **Corese-library__**: Java library to process RDF data and use Corese features via an API. + * **Corese-server**: Tool to easily create, configure and manage SPARQL endpoints. + * **Corese-gui**: Graphical interface that allows an easy and visual use of Corese features. + * **Corese-Python (beta)**: Python wrapper for accessing and manipulating RDF data with Corese features using py4j. + * **Corese-Command (beta)**: Command Line Interface for Corese that allows users to interact with Corese features from the terminal. + +.. raw:: html + +

      Contributions and discussions

      + +.. _discussion forum: https://github.com/Wimmics/corese/discussions/ +.. _issue reports: https://github.com/Wimmics/corese/issues/ +.. _pull requests: https://github.com/Wimmics/corese/pulls/ + +For support questions, comments, and any ideas for improvements you’d like to discuss, please use our `discussion forum`_. We welcome everyone to contribute to `issue reports`_, suggest new features, and create `pull requests`_. + +.. TODO: Talk to Remi about General Information section + +.. ############################################################################# +.. The statesments below are to produce the title of the page in the tab + and a menu with the links to the pages of the documentation + +.. raw html below is used to hide the title of the page but retain it in the + tab title. https://github.com/sphinx-doc/sphinx/issues/8356 +.. raw:: html + +
      + +CORESE documentation +=================================== + +.. raw:: html + +
      + +.. toctree:: + :hidden: + + install + user_guide + api + diff --git a/docs/source/install.rst b/docs/source/install.rst new file mode 100644 index 000000000..966b961e5 --- /dev/null +++ b/docs/source/install.rst @@ -0,0 +1,20 @@ +Installation +================================ + +1. Download the latest version of CORESE from the official website. + +2. Extract the downloaded file to your desired location. + +3. Navigate to the extracted folder. You should see a file named `corese.jar`. + +4. To run CORESE, open a terminal window and navigate to the directory containing `corese.jar`. + +5. Run the following command to start CORESE: + + .. code-block:: bash + + java -jar corese.jar + +6. If everything is set up correctly, you should see a message indicating that CORESE has started successfully. + +Please note that these instructions are for a basic setup. For more advanced configurations, please refer to the official CORESE documentation. \ No newline at end of file diff --git a/docs/source/user_guide.rst b/docs/source/user_guide.rst new file mode 100644 index 000000000..3ef66e42a --- /dev/null +++ b/docs/source/user_guide.rst @@ -0,0 +1,47 @@ +User Guide +================= + +Introduction +------------ + +CORESE is a Semantic Web Factory that implements W3C RDF, RDFS and SPARQL recommendations. It provides a rule-based inference engine and a query engine for SPARQL. + +Getting Started +--------------- + +To start CORESE, navigate to the directory containing `corese.jar` and run the following command: + +.. code-block:: bash + + java -jar corese.jar + +This will start the CORESE engine. + +Loading Data +------------ + +To load RDF data into CORESE, use the following command: + +.. code-block:: bash + + java -jar corese.jar -load + +Querying Data +------------- + +To query the loaded data, you can use the SPARQL query language. Here is an example of a simple SPARQL query: + +.. code-block:: sparql + + SELECT ?s ?p ?o WHERE { + ?s ?p ?o . + } + +This query will return all triples in the loaded data. + +For more complex queries, please refer to the SPARQL specification. + +Conclusion +---------- + +This guide provides a basic introduction to using CORESE. For more detailed information, please refer to the official CORESE documentation. \ No newline at end of file From e8c8f15956f43ceb4e4003a5797a0aecfce2a763 Mon Sep 17 00:00:00 2001 From: Anna Bobasheva <33026767+AnnaBobasheva@users.noreply.github.com> Date: Wed, 24 Apr 2024 17:50:35 +0200 Subject: [PATCH 092/146] Added markdown parser Update requirements.txt Update requirements.txt Delete .github/sphinx.yml Create sphinx.yml Update install.rst --- .github/{ => workflows}/sphinx.yml | 10 ++++------ docs/requirements.txt | 2 ++ docs/source/conf.py | 10 ++++++++-- docs/source/index.rst | 2 +- docs/source/install.rst | 3 ++- 5 files changed, 17 insertions(+), 10 deletions(-) rename .github/{ => workflows}/sphinx.yml (81%) diff --git a/.github/sphinx.yml b/.github/workflows/sphinx.yml similarity index 81% rename from .github/sphinx.yml rename to .github/workflows/sphinx.yml index 037ecdb4a..98005afa7 100644 --- a/.github/sphinx.yml +++ b/.github/workflows/sphinx.yml @@ -1,9 +1,6 @@ name: "Sphinx: Render docs" -on: - push: - branches: - - dev_documentation +on: push jobs: build: @@ -11,10 +8,10 @@ jobs: permissions: contents: write steps: - - name: Checkout dev_documentation branch + - name: Checkout doc_development branch uses: actions/checkout@v4 with: - ref: dev_documentation + ref: doc_development - name: Build HTML uses: ammaraskar/sphinx-action@master - name: Upload artifacts @@ -28,3 +25,4 @@ jobs: with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: docs/build/html + diff --git a/docs/requirements.txt b/docs/requirements.txt index 538a134a0..900c31208 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -2,4 +2,6 @@ pydata_sphinx_theme==0.14.4 babel>=2.9 packaging>=21.0 docutils<0.21,>=0.18.1 +Jinja2>=3.0 sphinx-design +myst-parser \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py index ee1b599c4..1911df8f0 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -11,6 +11,7 @@ import pathlib import sys +sys.path.insert(0, pathlib.Path(__file__).parents[1].resolve().as_posix()) sys.path.insert(0, pathlib.Path(__file__).parents[2].resolve().as_posix()) #sys.path.insert(0, pathlib.Path(__file__).parents[2].joinpath('code').resolve().as_posix()) @@ -27,14 +28,19 @@ 'sphinx.ext.doctest', 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', - 'sphinx_design' + 'sphinx_design', # to render panels + 'myst_parser', # to parse markdown ] templates_path = ['_templates'] exclude_patterns = [] # The suffix(es) of source filenames. -source_suffix = ['.rst', '.md'] +source_suffix = { + '.rst': 'restructuredtext', + '.txt': 'markdown', + '.md': 'markdown', +} # -- Options for HTML output ------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output diff --git a/docs/source/index.rst b/docs/source/index.rst index f41df019c..2fb234de3 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -100,4 +100,4 @@ CORESE documentation install user_guide api - + Demo \ No newline at end of file diff --git a/docs/source/install.rst b/docs/source/install.rst index 966b961e5..cec1b3b50 100644 --- a/docs/source/install.rst +++ b/docs/source/install.rst @@ -17,4 +17,5 @@ Installation 6. If everything is set up correctly, you should see a message indicating that CORESE has started successfully. -Please note that these instructions are for a basic setup. For more advanced configurations, please refer to the official CORESE documentation. \ No newline at end of file +Please note that these instructions are for a basic setup. For more advanced configurations, please refer to the official CORESE documentation. + From 53546090c91f8d56cb647f379ea32bd11159c014 Mon Sep 17 00:00:00 2001 From: Anna Bobasheva <33026767+AnnaBobasheva@users.noreply.github.com> Date: Mon, 6 May 2024 11:44:13 +0200 Subject: [PATCH 093/146] Added Java API DOC generation Update sphinx.yml Update sphinx.yml Update sphinx.yml Update sphinx.yml Update sphinx.yml Update sphinx.yml Update sphinx.yml Update sphinx.yml Revert "Update sphinx.yml" This reverts commit 3a0914b7226811c8f5277fe8d5b29a6aa73d3e3b. Update sphinx.yml Update requirements.txt Revert "Update requirements.txt" This reverts commit 31c03c2a878d56541246633dd5ed4e3b2234486a. Update requirements.txt Update sphinx.yml Update sphinx.yml Update sphinx.yml Update requirements.txt Update requirements.txt Update Makefile Update requirements.txt --- .github/workflows/sphinx.yml | 10 +- docs/.gitignore | 4 +- docs/Makefile | 2 +- docs/requirements.txt | 9 +- docs/source/Doxyfile | 2846 ++++++++++++++++++++++++++++++++++ docs/source/api.rst | 85 - docs/source/conf.py | 52 + docs/source/index.rst | 2 +- 8 files changed, 2918 insertions(+), 92 deletions(-) create mode 100644 docs/source/Doxyfile delete mode 100644 docs/source/api.rst diff --git a/.github/workflows/sphinx.yml b/.github/workflows/sphinx.yml index 98005afa7..028a72996 100644 --- a/.github/workflows/sphinx.yml +++ b/.github/workflows/sphinx.yml @@ -8,17 +8,25 @@ jobs: permissions: contents: write steps: + - name: Checkout doc_development branch uses: actions/checkout@v4 with: ref: doc_development - - name: Build HTML + + - name: Build Sphinx Docs uses: ammaraskar/sphinx-action@master + with: + pre-build-command: "apt-get install doxygen -y" + build-command: "make html" + docs-folder: "docs/" + - name: Upload artifacts uses: actions/upload-artifact@v4 with: name: html-docs path: docs/build/html/ + - name: Deploy uses: peaceiris/actions-gh-pages@v3 if: github.ref == 'refs/heads/doc_development' diff --git a/docs/.gitignore b/docs/.gitignore index ae0c4ba2c..30128b35e 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -2,4 +2,6 @@ generated/ bak/ build/ - +java_api/ +*.exe +*.dll diff --git a/docs/Makefile b/docs/Makefile index d0c3cbf10..d03ac4e1a 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -12,7 +12,7 @@ BUILDDIR = build help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -.PHONY: help Makefile +.PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). diff --git a/docs/requirements.txt b/docs/requirements.txt index 900c31208..f3dac7969 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,7 +1,10 @@ -pydata_sphinx_theme==0.14.4 +sphinx>=7.1.2 +pydata_sphinx_theme>=0.14.4 babel>=2.9 packaging>=21.0 docutils<0.21,>=0.18.1 Jinja2>=3.0 -sphinx-design -myst-parser \ No newline at end of file +sphinx-design==0.5.0 +myst-parser==2.0.0 +breathe==4.35.0 +exhale==0.3.7 diff --git a/docs/source/Doxyfile b/docs/source/Doxyfile new file mode 100644 index 000000000..9a74e5991 --- /dev/null +++ b/docs/source/Doxyfile @@ -0,0 +1,2846 @@ +# Doxyfile 1.10.0 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a double hash (##) is considered a comment and is placed in +# front of the TAG it is preceding. +# +# All text after a single hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists, items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (\" \"). +# +# Note: +# +# Use doxygen to compare the used configuration file with the template +# configuration file: +# doxygen -x [configFile] +# Use doxygen to compare the used configuration file with the template +# configuration file without replacing the environment variables or CMake type +# replacement variables: +# doxygen -x_noenv [configFile] + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the configuration +# file that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# https://www.gnu.org/software/libiconv/ for the list of possible encodings. +# The default value is: UTF-8. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by +# double-quotes, unless you are using Doxywizard) that should identify the +# project for which the documentation is generated. This name is used in the +# title of most generated pages and in a few other places. +# The default value is: My Project. + +PROJECT_NAME = "corese" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. This +# could be handy for archiving the generated documentation or if some version +# control system is used. + +PROJECT_NUMBER = + +# Using the PROJECT_BRIEF tag one can provide an optional one line description +# for a project that appears at the top of each page and should give viewer a +# quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = + +# With the PROJECT_LOGO tag one can specify a logo or an icon that is included +# in the documentation. The maximum height of the logo should not exceed 55 +# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy +# the logo to the output directory. + +PROJECT_LOGO = + +# With the PROJECT_ICON tag one can specify an icon that is included in the tabs +# when the HTML document is shown. Doxygen will copy the logo to the output +# directory. + +PROJECT_ICON = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path +# into which the generated documentation will be written. If a relative path is +# entered, it will be relative to the location where doxygen was started. If +# left blank the current directory will be used. + +#OUTPUT_DIRECTORY = "./_doxygen" +OUTPUT_DIRECTORY = ../build + +# If the CREATE_SUBDIRS tag is set to YES then doxygen will create up to 4096 +# sub-directories (in 2 levels) under the output directory of each output format +# and will distribute the generated files over these directories. Enabling this +# option can be useful when feeding doxygen a huge amount of source files, where +# putting all generated files in the same directory would otherwise causes +# performance problems for the file system. Adapt CREATE_SUBDIRS_LEVEL to +# control the number of sub-directories. +# The default value is: NO. + +CREATE_SUBDIRS = NO + +# Controls the number of sub-directories that will be created when +# CREATE_SUBDIRS tag is set to YES. Level 0 represents 16 directories, and every +# level increment doubles the number of directories, resulting in 4096 +# directories at level 8 which is the default and also the maximum value. The +# sub-directories are organized in 2 levels, the first level always has a fixed +# number of 16 directories. +# Minimum value: 0, maximum value: 8, default value: 8. +# This tag requires that the tag CREATE_SUBDIRS is set to YES. + +CREATE_SUBDIRS_LEVEL = 8 + +# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII +# characters to appear in the names of generated files. If set to NO, non-ASCII +# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode +# U+3044. +# The default value is: NO. + +ALLOW_UNICODE_NAMES = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Bulgarian, +# Catalan, Chinese, Chinese-Traditional, Croatian, Czech, Danish, Dutch, English +# (United States), Esperanto, Farsi (Persian), Finnish, French, German, Greek, +# Hindi, Hungarian, Indonesian, Italian, Japanese, Japanese-en (Japanese with +# English messages), Korean, Korean-en (Korean with English messages), Latvian, +# Lithuanian, Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, +# Romanian, Russian, Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, +# Swedish, Turkish, Ukrainian and Vietnamese. +# The default value is: English. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member +# descriptions after the members that are listed in the file and class +# documentation (similar to Javadoc). Set to NO to disable this. +# The default value is: YES. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief +# description of a member or function before the detailed description +# +# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. +# The default value is: YES. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator that is +# used to form the text in various listings. Each string in this list, if found +# as the leading text of the brief description, will be stripped from the text +# and the result, after processing the whole list, is used as the annotated +# text. Otherwise, the brief description is used as-is. If left blank, the +# following values are used ($name is automatically replaced with the name of +# the entity):The $name class, The $name widget, The $name file, is, provides, +# specifies, contains, represents, a, an and the. + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# doxygen will generate a detailed section even if there is only a brief +# description. +# The default value is: NO. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. +# The default value is: NO. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path +# before files name in the file list and in the header files. If set to NO the +# shortest path that makes the file name unique will be used +# The default value is: YES. + +FULL_PATH_NAMES = YES + +# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. +# Stripping is only done if one of the specified strings matches the left-hand +# part of the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the path to +# strip. +# +# Note that you can specify absolute paths here, but also relative paths, which +# will be relative from the directory where doxygen is started. +# This tag requires that the tag FULL_PATH_NAMES is set to YES. + +STRIP_FROM_PATH = ".." + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the +# path mentioned in the documentation of a class, which tells the reader which +# header file to include in order to use a class. If left blank only the name of +# the header file containing the class definition is used. Otherwise one should +# specify the list of include paths that are normally passed to the compiler +# using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but +# less readable) file names. This can be useful is your file systems doesn't +# support long names like on DOS, Mac, or CD-ROM. +# The default value is: NO. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the +# first line (until the first dot) of a Javadoc-style comment as the brief +# description. If set to NO, the Javadoc-style will behave just like regular Qt- +# style comments (thus requiring an explicit @brief command for a brief +# description.) +# The default value is: NO. + +JAVADOC_AUTOBRIEF = YES + +# If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line +# such as +# /*************** +# as being the beginning of a Javadoc-style comment "banner". If set to NO, the +# Javadoc-style will behave just like regular comments and it will not be +# interpreted by doxygen. +# The default value is: NO. + +JAVADOC_BANNER = NO + +# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first +# line (until the first dot) of a Qt-style comment as the brief description. If +# set to NO, the Qt-style will behave just like regular Qt-style comments (thus +# requiring an explicit \brief command for a brief description.) +# The default value is: NO. + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a +# multi-line C++ special comment block (i.e. a block of //! or /// comments) as +# a brief description. This used to be the default behavior. The new default is +# to treat a multi-line C++ comment block as a detailed description. Set this +# tag to YES if you prefer the old behavior instead. +# +# Note that setting this tag to YES also means that rational rose comments are +# not recognized any more. +# The default value is: NO. + +MULTILINE_CPP_IS_BRIEF = NO + +# By default Python docstrings are displayed as preformatted text and doxygen's +# special commands cannot be used. By setting PYTHON_DOCSTRING to NO the +# doxygen's special commands can be used and the contents of the docstring +# documentation blocks is shown as doxygen documentation. +# The default value is: YES. + +PYTHON_DOCSTRING = NO + +# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the +# documentation from any documented member that it re-implements. +# The default value is: YES. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new +# page for each member. If set to NO, the documentation of a member will be part +# of the file/class/namespace that contains it. +# The default value is: NO. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen +# uses this value to replace tabs by spaces in code fragments. +# Minimum value: 1, maximum value: 16, default value: 4. + +TAB_SIZE = 4 + +# This tag can be used to specify a number of aliases that act as commands in +# the documentation. An alias has the form: +# name=value +# For example adding +# "sideeffect=@par Side Effects:^^" +# will allow you to put the command \sideeffect (or @sideeffect) in the +# documentation, which will result in a user-defined paragraph with heading +# "Side Effects:". Note that you cannot put \n's in the value part of an alias +# to insert newlines (in the resulting output). You can put ^^ in the value part +# of an alias to insert a newline as if a physical newline was in the original +# file. When you need a literal { or } or , in the value part of an alias you +# have to escape them by means of a backslash (\), this can lead to conflicts +# with the commands \{ and \} for these it is advised to use the version @{ and +# @} or use a double escape (\\{ and \\}) + +ALIASES = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources +# only. Doxygen will then generate output that is more tailored for C. For +# instance, some of the names that are used will be different. The list of all +# members will be omitted, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or +# Python sources only. Doxygen will then generate output that is more tailored +# for that language. For instance, namespaces will be presented as packages, +# qualified scopes will look different, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_JAVA = YES + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources. Doxygen will then generate output that is tailored for Fortran. +# The default value is: NO. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for VHDL. +# The default value is: NO. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice +# sources only. Doxygen will then generate output that is more tailored for that +# language. For instance, namespaces will be presented as modules, types will be +# separated into more groups, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_SLICE = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given +# extension. Doxygen has a built-in mapping, but you can override or extend it +# using this tag. The format is ext=language, where ext is a file extension, and +# language is one of the parsers supported by doxygen: IDL, Java, JavaScript, +# Csharp (C#), C, C++, Lex, D, PHP, md (Markdown), Objective-C, Python, Slice, +# VHDL, Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: +# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser +# tries to guess whether the code is fixed or free formatted code, this is the +# default for Fortran type files). For instance to make doxygen treat .inc files +# as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. +# +# Note: For files without extension you can use no_extension as a placeholder. +# +# Note that for custom extensions you also need to set FILE_PATTERNS otherwise +# the files are not read by doxygen. When specifying no_extension you should add +# * to the FILE_PATTERNS. +# +# Note see also the list of default file extension mappings. + +EXTENSION_MAPPING = + +# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments +# according to the Markdown format, which allows for more readable +# documentation. See https://daringfireball.net/projects/markdown/ for details. +# The output of markdown processing is further processed by doxygen, so you can +# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in +# case of backward compatibilities issues. +# The default value is: YES. + +MARKDOWN_SUPPORT = YES + +# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up +# to that level are automatically included in the table of contents, even if +# they do not have an id attribute. +# Note: This feature currently applies only to Markdown headings. +# Minimum value: 0, maximum value: 99, default value: 5. +# This tag requires that the tag MARKDOWN_SUPPORT is set to YES. + +TOC_INCLUDE_HEADINGS = 5 + +# The MARKDOWN_ID_STYLE tag can be used to specify the algorithm used to +# generate identifiers for the Markdown headings. Note: Every identifier is +# unique. +# Possible values are: DOXYGEN use a fixed 'autotoc_md' string followed by a +# sequence number starting at 0 and GITHUB use the lower case version of title +# with any whitespace replaced by '-' and punctuation characters removed. +# The default value is: DOXYGEN. +# This tag requires that the tag MARKDOWN_SUPPORT is set to YES. + +MARKDOWN_ID_STYLE = DOXYGEN + +# When enabled doxygen tries to link words that correspond to documented +# classes, or namespaces to their corresponding documentation. Such a link can +# be prevented in individual cases by putting a % sign in front of the word or +# globally by setting AUTOLINK_SUPPORT to NO. +# The default value is: YES. + +AUTOLINK_SUPPORT = YES + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should set this +# tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); +# versus func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. +# The default value is: NO. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. +# The default value is: NO. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: +# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen +# will parse them like normal C++ but will assume all classes use public instead +# of private inheritance when no explicit protection keyword is present. +# The default value is: NO. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate +# getter and setter methods for a property. Setting this option to YES will make +# doxygen to replace the get and set methods by a property in the documentation. +# This will only work if the methods are indeed getting or setting a simple +# type. If this is not the case, or you want to show the methods anyway, you +# should set this option to NO. +# The default value is: YES. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. +# The default value is: NO. + +DISTRIBUTE_GROUP_DOC = NO + +# If one adds a struct or class to a group and this option is enabled, then also +# any nested class or struct is added to the same group. By default this option +# is disabled and one has to add nested compounds explicitly via \ingroup. +# The default value is: NO. + +GROUP_NESTED_COMPOUNDS = NO + +# Set the SUBGROUPING tag to YES to allow class member groups of the same type +# (for instance a group of public functions) to be put as a subgroup of that +# type (e.g. under the Public Functions section). Set it to NO to prevent +# subgrouping. Alternatively, this can be done per class using the +# \nosubgrouping command. +# The default value is: YES. + +SUBGROUPING = YES + +# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions +# are shown inside the group in which they are included (e.g. using \ingroup) +# instead of on a separate page (for HTML and Man pages) or section (for LaTeX +# and RTF). +# +# Note that this feature does not work in combination with +# SEPARATE_MEMBER_PAGES. +# The default value is: NO. + +INLINE_GROUPED_CLASSES = NO + +# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions +# with only public data fields or simple typedef fields will be shown inline in +# the documentation of the scope in which they are defined (i.e. file, +# namespace, or group documentation), provided this scope is documented. If set +# to NO, structs, classes, and unions are shown on a separate page (for HTML and +# Man pages) or section (for LaTeX and RTF). +# The default value is: NO. + +INLINE_SIMPLE_STRUCTS = NO + +# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or +# enum is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically be +# useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. +# The default value is: NO. + +TYPEDEF_HIDES_STRUCT = NO + +# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This +# cache is used to resolve symbols given their name and scope. Since this can be +# an expensive process and often the same symbol appears multiple times in the +# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small +# doxygen will become slower. If the cache is too large, memory is wasted. The +# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range +# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 +# symbols. At the end of a run doxygen will report the cache usage and suggest +# the optimal cache size from a speed point of view. +# Minimum value: 0, maximum value: 9, default value: 0. + +LOOKUP_CACHE_SIZE = 0 + +# The NUM_PROC_THREADS specifies the number of threads doxygen is allowed to use +# during processing. When set to 0 doxygen will based this on the number of +# cores available in the system. You can set it explicitly to a value larger +# than 0 to get more control over the balance between CPU load and processing +# speed. At this moment only the input processing can be done using multiple +# threads. Since this is still an experimental feature the default is set to 1, +# which effectively disables parallel processing. Please report any issues you +# encounter. Generating dot graphs in parallel is controlled by the +# DOT_NUM_THREADS setting. +# Minimum value: 0, maximum value: 32, default value: 1. + +NUM_PROC_THREADS = 1 + +# If the TIMESTAMP tag is set different from NO then each generated page will +# contain the date or date and time when the page was generated. Setting this to +# NO can help when comparing the output of multiple runs. +# Possible values are: YES, NO, DATETIME and DATE. +# The default value is: NO. + +TIMESTAMP = NO + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in +# documentation are documented, even if no documentation was available. Private +# class members and static file members will be hidden unless the +# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. +# Note: This will also disable the warnings about undocumented members that are +# normally produced when WARNINGS is set to YES. +# The default value is: NO. + +EXTRACT_ALL = NO + +# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will +# be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual +# methods of a class will be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIV_VIRTUAL = NO + +# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal +# scope will be included in the documentation. +# The default value is: NO. + +EXTRACT_PACKAGE = NO + +# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be +# included in the documentation. +# The default value is: NO. + +EXTRACT_STATIC = NO + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO, +# only classes defined in header files are included. Does not have any effect +# for Java sources. +# The default value is: YES. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. If set to YES, local methods, +# which are defined in the implementation section but not in the interface are +# included in the documentation. If set to NO, only methods in the interface are +# included. +# The default value is: NO. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base name of +# the file that contains the anonymous namespace. By default anonymous namespace +# are hidden. +# The default value is: NO. + +EXTRACT_ANON_NSPACES = NO + +# If this flag is set to YES, the name of an unnamed parameter in a declaration +# will be determined by the corresponding definition. By default unnamed +# parameters remain unnamed in the output. +# The default value is: YES. + +RESOLVE_UNNAMED_PARAMS = YES + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all +# undocumented members inside documented classes or files. If set to NO these +# members will be included in the various overviews, but no documentation +# section is generated. This option has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. If set +# to NO, these classes will be included in the various overviews. This option +# will also hide undocumented C++ concepts if enabled. This option has no effect +# if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend +# declarations. If set to NO, these declarations will be included in the +# documentation. +# The default value is: NO. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any +# documentation blocks found inside the body of a function. If set to NO, these +# blocks will be appended to the function's detailed documentation block. +# The default value is: NO. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation that is typed after a +# \internal command is included. If the tag is set to NO then the documentation +# will be excluded. Set it to YES to include the internal documentation. +# The default value is: NO. + +INTERNAL_DOCS = NO + +# With the correct setting of option CASE_SENSE_NAMES doxygen will better be +# able to match the capabilities of the underlying filesystem. In case the +# filesystem is case sensitive (i.e. it supports files in the same directory +# whose names only differ in casing), the option must be set to YES to properly +# deal with such files in case they appear in the input. For filesystems that +# are not case sensitive the option should be set to NO to properly deal with +# output files written for symbols that only differ in casing, such as for two +# classes, one named CLASS and the other named Class, and to also support +# references to files without having to specify the exact matching casing. On +# Windows (including Cygwin) and MacOS, users should typically set this option +# to NO, whereas on Linux or other Unix flavors it should typically be set to +# YES. +# Possible values are: SYSTEM, NO and YES. +# The default value is: SYSTEM. + +CASE_SENSE_NAMES = SYSTEM + +# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with +# their full class and namespace scopes in the documentation. If set to YES, the +# scope will be hidden. +# The default value is: NO. + +HIDE_SCOPE_NAMES = YES + +# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will +# append additional text to a page's title, such as Class Reference. If set to +# YES the compound reference will be hidden. +# The default value is: NO. + +HIDE_COMPOUND_REFERENCE= NO + +# If the SHOW_HEADERFILE tag is set to YES then the documentation for a class +# will show which file needs to be included to use the class. +# The default value is: YES. + +SHOW_HEADERFILE = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of +# the files that are included by a file in the documentation of that file. +# The default value is: YES. + +SHOW_INCLUDE_FILES = NO + +# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each +# grouped member an include statement to the documentation, telling the reader +# which file to include in order to use the member. +# The default value is: NO. + +SHOW_GROUPED_MEMB_INC = NO + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include +# files with double quotes in the documentation rather than with sharp brackets. +# The default value is: NO. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the +# documentation for inline members. +# The default value is: YES. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the +# (detailed) documentation of file and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. +# The default value is: YES. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief +# descriptions of file, namespace and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. Note that +# this will also influence the order of the classes in the class list. +# The default value is: NO. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the +# (brief and detailed) documentation of class members so that constructors and +# destructors are listed first. If set to NO the constructors will appear in the +# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. +# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief +# member documentation. +# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting +# detailed member documentation. +# The default value is: NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy +# of group names into alphabetical order. If set to NO the group names will +# appear in their defined order. +# The default value is: NO. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by +# fully-qualified names, including namespaces. If set to NO, the class list will +# be sorted only by class name, not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the alphabetical +# list. +# The default value is: NO. + +SORT_BY_SCOPE_NAME = NO + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper +# type resolution of all parameters of a function it will reject a match between +# the prototype and the implementation of a member function even if there is +# only one candidate or it is obvious which candidate to choose by doing a +# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still +# accept a match between prototype and implementation in such cases. +# The default value is: NO. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo +# list. This list is created by putting \todo commands in the documentation. +# The default value is: YES. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test +# list. This list is created by putting \test commands in the documentation. +# The default value is: YES. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug +# list. This list is created by putting \bug commands in the documentation. +# The default value is: YES. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) +# the deprecated list. This list is created by putting \deprecated commands in +# the documentation. +# The default value is: YES. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional documentation +# sections, marked by \if ... \endif and \cond +# ... \endcond blocks. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the +# initial value of a variable or macro / define can have for it to appear in the +# documentation. If the initializer consists of more lines than specified here +# it will be hidden. Use a value of 0 to hide initializers completely. The +# appearance of the value of individual variables and macros / defines can be +# controlled using \showinitializer or \hideinitializer command in the +# documentation regardless of this setting. +# Minimum value: 0, maximum value: 10000, default value: 30. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at +# the bottom of the documentation of classes and structs. If set to YES, the +# list will mention the files that were used to generate the documentation. +# The default value is: YES. + +SHOW_USED_FILES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This +# will remove the Files entry from the Quick Index and from the Folder Tree View +# (if specified). +# The default value is: YES. + +SHOW_FILES = NO + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces +# page. This will remove the Namespaces entry from the Quick Index and from the +# Folder Tree View (if specified). +# The default value is: YES. + +SHOW_NAMESPACES = NO + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command command input-file, where command is the value of the +# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided +# by doxygen. Whatever the program writes to standard output is used as the file +# version. For an example see the documentation. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. To create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. You can +# optionally specify a file name after the option, if omitted DoxygenLayout.xml +# will be used as the name of the layout file. See also section "Changing the +# layout of pages" for information. +# +# Note that if you run doxygen from a directory containing a file called +# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE +# tag is left empty. + +LAYOUT_FILE = + +# The CITE_BIB_FILES tag can be used to specify one or more bib files containing +# the reference definitions. This must be a list of .bib files. The .bib +# extension is automatically appended if omitted. This requires the bibtex tool +# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. +# For LaTeX the style of the bibliography can be controlled using +# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the +# search path. See also \cite for info how to create references. + +CITE_BIB_FILES = + +#--------------------------------------------------------------------------- +# Configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated to +# standard output by doxygen. If QUIET is set to YES this implies that the +# messages are off. +# The default value is: NO. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES +# this implies that the warnings are on. +# +# Tip: Turn warnings on while writing the documentation. +# The default value is: YES. + +WARNINGS = YES + +# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate +# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: YES. + +WARN_IF_UNDOCUMENTED = NO + +# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as documenting some parameters in +# a documented function twice, or documenting parameters that don't exist or +# using markup commands wrongly. +# The default value is: YES. + +WARN_IF_DOC_ERROR = YES + +# If WARN_IF_INCOMPLETE_DOC is set to YES, doxygen will warn about incomplete +# function parameter documentation. If set to NO, doxygen will accept that some +# parameters have no documentation without warning. +# The default value is: YES. + +WARN_IF_INCOMPLETE_DOC = YES + +# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that +# are documented, but have no documentation for their parameters or return +# value. If set to NO, doxygen will only warn about wrong parameter +# documentation, but not about the absence of documentation. If EXTRACT_ALL is +# set to YES then this flag will automatically be disabled. See also +# WARN_IF_INCOMPLETE_DOC +# The default value is: NO. + +WARN_NO_PARAMDOC = NO + +# If WARN_IF_UNDOC_ENUM_VAL option is set to YES, doxygen will warn about +# undocumented enumeration values. If set to NO, doxygen will accept +# undocumented enumeration values. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: NO. + +WARN_IF_UNDOC_ENUM_VAL = NO + +# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when +# a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS +# then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but +# at the end of the doxygen process doxygen will return with a non-zero status. +# If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS_PRINT then doxygen behaves +# like FAIL_ON_WARNINGS but in case no WARN_LOGFILE is defined doxygen will not +# write the warning messages in between other messages but write them at the end +# of a run, in case a WARN_LOGFILE is defined the warning messages will be +# besides being in the defined file also be shown at the end of a run, unless +# the WARN_LOGFILE is defined as - i.e. standard output (stdout) in that case +# the behavior will remain as with the setting FAIL_ON_WARNINGS. +# Possible values are: NO, YES, FAIL_ON_WARNINGS and FAIL_ON_WARNINGS_PRINT. +# The default value is: NO. + +WARN_AS_ERROR = NO + +# The WARN_FORMAT tag determines the format of the warning messages that doxygen +# can produce. The string should contain the $file, $line, and $text tags, which +# will be replaced by the file and line number from which the warning originated +# and the warning text. Optionally the format may contain $version, which will +# be replaced by the version of the file (if it could be obtained via +# FILE_VERSION_FILTER) +# See also: WARN_LINE_FORMAT +# The default value is: $file:$line: $text. + +WARN_FORMAT = "$file:$line: $text" + +# In the $text part of the WARN_FORMAT command it is possible that a reference +# to a more specific place is given. To make it easier to jump to this place +# (outside of doxygen) the user can define a custom "cut" / "paste" string. +# Example: +# WARN_LINE_FORMAT = "'vi $file +$line'" +# See also: WARN_FORMAT +# The default value is: at line $line of file $file. + +WARN_LINE_FORMAT = "at line $line of file $file" + +# The WARN_LOGFILE tag can be used to specify a file to which warning and error +# messages should be written. If left blank the output is written to standard +# error (stderr). In case the file specified cannot be opened for writing the +# warning and error messages are written to standard error. When as file - is +# specified the warning and error messages are written to standard output +# (stdout). + +WARN_LOGFILE = + +#--------------------------------------------------------------------------- +# Configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag is used to specify the files and/or directories that contain +# documented source files. You may enter file names like myfile.cpp or +# directories like /usr/src/myproject. Separate the files or directories with +# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING +# Note: If this tag is empty the current directory is searched. + +INPUT = ../../corese-core/src/main/java/fr/inria/corese/core/Corese.java \ + ../../corese-core/src/main/java/fr/inria/corese/core/Graph.java +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses +# libiconv (or the iconv built into libc) for the transcoding. See the libiconv +# documentation (see: +# https://www.gnu.org/software/libiconv/) for the list of possible encodings. +# See also: INPUT_FILE_ENCODING +# The default value is: UTF-8. + +INPUT_ENCODING = UTF-8 + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses The INPUT_FILE_ENCODING tag can be used to specify +# character encoding on a per file pattern basis. Doxygen will compare the file +# name with each pattern and apply the encoding instead of the default +# INPUT_ENCODING) if there is a match. The character encodings are a list of the +# form: pattern=encoding (like *.php=ISO-8859-1). See cfg_input_encoding +# "INPUT_ENCODING" for further information on supported encodings. + +INPUT_FILE_ENCODING = + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and +# *.h) to filter out the source-files in the directories. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# read by doxygen. +# +# Note the list of default checked file patterns might differ from the list of +# default file extension mappings. +# +# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cxxm, +# *.cpp, *.cppm, *.ccm, *.c++, *.c++m, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, +# *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, *.h++, *.ixx, *.l, *.cs, *.d, +# *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, *.md, *.mm, *.dox (to +# be provided as doxygen C comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, +# *.f18, *.f, *.for, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice. + +FILE_PATTERNS = *.java + +# The RECURSIVE tag can be used to specify whether or not subdirectories should +# be searched for input files as well. +# The default value is: NO. + +RECURSIVE = NO + +# The EXCLUDE tag can be used to specify files and/or directories that should be +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. +# +# Note that relative paths are relative to the directory from which doxygen is +# run. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. +# The default value is: NO. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# ANamespace::AClass, ANamespace::*Test + +EXCLUDE_SYMBOLS = java::lang fr::inria::corese::sparql* + +# The EXAMPLE_PATH tag can be used to specify one or more files or directories +# that contain example code fragments that are included (see the \include +# command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank all +# files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude commands +# irrespective of the value of the RECURSIVE tag. +# The default value is: NO. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or directories +# that contain images that are to be included in the documentation (see the +# \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command: +# +# +# +# where is the value of the INPUT_FILTER tag, and is the +# name of an input file. Doxygen will then use the output that the filter +# program writes to standard output. If FILTER_PATTERNS is specified, this tag +# will be ignored. +# +# Note that the filter must not add or remove lines; it is applied before the +# code is scanned, but not when the output code is generated. If lines are added +# or removed, the anchors will not be placed correctly. +# +# Note that doxygen will use the data processed and written to standard output +# for further processing, therefore nothing else, like debug statements or used +# commands (so in case of a Windows batch file always use @echo OFF), should be +# written to standard output. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: pattern=filter +# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how +# filters are used. If the FILTER_PATTERNS tag is empty or if none of the +# patterns match the file name, INPUT_FILTER is applied. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will also be used to filter the input files that are used for +# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). +# The default value is: NO. + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and +# it is also possible to disable source filtering for a specific pattern using +# *.ext= (so without naming a filter). +# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. + +FILTER_SOURCE_PATTERNS = + +# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that +# is part of the input, its contents will be placed on the main page +# (index.html). This can be useful if you have a project on for instance GitHub +# and want to reuse the introduction page also for the doxygen output. + +USE_MDFILE_AS_MAINPAGE = + +# The Fortran standard specifies that for fixed formatted Fortran code all +# characters from position 72 are to be considered as comment. A common +# extension is to allow longer lines before the automatic comment starts. The +# setting FORTRAN_COMMENT_AFTER will also make it possible that longer lines can +# be processed before the automatic comment starts. +# Minimum value: 7, maximum value: 10000, default value: 72. + +FORTRAN_COMMENT_AFTER = 72 + +#--------------------------------------------------------------------------- +# Configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will be +# generated. Documented entities will be cross-referenced with these sources. +# +# Note: To get rid of all source code in the generated output, make sure that +# also VERBATIM_HEADERS is set to NO. +# The default value is: NO. + +SOURCE_BROWSER = NO + +# Setting the INLINE_SOURCES tag to YES will include the body of functions, +# multi-line macros, enums or list initialized variables directly into the +# documentation. +# The default value is: NO. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any +# special comment blocks from generated source code fragments. Normal C, C++ and +# Fortran comments will always remain visible. +# The default value is: YES. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES then for each documented +# entity all documented functions referencing it will be listed. +# The default value is: NO. + +REFERENCED_BY_RELATION = NO + +# If the REFERENCES_RELATION tag is set to YES then for each documented function +# all documented entities called/used by that function will be listed. +# The default value is: NO. + +REFERENCES_RELATION = NO + +# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set +# to YES then the hyperlinks from functions in REFERENCES_RELATION and +# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will +# link to the documentation. +# The default value is: YES. + +REFERENCES_LINK_SOURCE = YES + +# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the +# source code will show a tooltip with additional information such as prototype, +# brief description and links to the definition and documentation. Since this +# will make the HTML file larger and loading of large files a bit slower, you +# can opt to disable this feature. +# The default value is: YES. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +SOURCE_TOOLTIPS = YES + +# If the USE_HTAGS tag is set to YES then the references to source code will +# point to the HTML generated by the htags(1) tool instead of doxygen built-in +# source browser. The htags tool is part of GNU's global source tagging system +# (see https://www.gnu.org/software/global/global.html). You will need version +# 4.8.6 or higher. +# +# To use it do the following: +# - Install the latest version of global +# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file +# - Make sure the INPUT points to the root of the source tree +# - Run doxygen as normal +# +# Doxygen will invoke htags (and that will in turn invoke gtags), so these +# tools must be available from the command line (i.e. in the search path). +# +# The result: instead of the source browser generated by doxygen, the links to +# source code will now point to the output of htags. +# The default value is: NO. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a +# verbatim copy of the header file for each class for which an include is +# specified. Set to NO to disable this. +# See also: Section \class. +# The default value is: YES. + +VERBATIM_HEADERS = YES + +# If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the +# clang parser (see: +# http://clang.llvm.org/) for more accurate parsing at the cost of reduced +# performance. This can be particularly helpful with template rich C++ code for +# which doxygen's built-in parser lacks the necessary type information. +# Note: The availability of this option depends on whether or not doxygen was +# generated with the -Duse_libclang=ON option for CMake. +# The default value is: NO. + +CLANG_ASSISTED_PARSING = NO + +# If the CLANG_ASSISTED_PARSING tag is set to YES and the CLANG_ADD_INC_PATHS +# tag is set to YES then doxygen will add the directory of each input to the +# include path. +# The default value is: YES. +# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. + +CLANG_ADD_INC_PATHS = NO + +# If clang assisted parsing is enabled you can provide the compiler with command +# line options that you would normally use when invoking the compiler. Note that +# the include paths will already be set by doxygen for the files and directories +# specified with INPUT and INCLUDE_PATH. +# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. + +CLANG_OPTIONS = + +# If clang assisted parsing is enabled you can provide the clang parser with the +# path to the directory containing a file called compile_commands.json. This +# file is the compilation database (see: +# http://clang.llvm.org/docs/HowToSetupToolingForLLVM.html) containing the +# options used when the source files were built. This is equivalent to +# specifying the -p option to a clang tool, such as clang-check. These options +# will then be passed to the parser. Any options specified with CLANG_OPTIONS +# will be added as well. +# Note: The availability of this option depends on whether or not doxygen was +# generated with the -Duse_libclang=ON option for CMake. + +CLANG_DATABASE_PATH = + +#--------------------------------------------------------------------------- +# Configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all +# compounds will be generated. Enable this if the project contains a lot of +# classes, structs, unions or interfaces. +# The default value is: YES. + +ALPHABETICAL_INDEX = YES + +# The IGNORE_PREFIX tag can be used to specify a prefix (or a list of prefixes) +# that should be ignored while generating the index headers. The IGNORE_PREFIX +# tag works for classes, function and member names. The entity will be placed in +# the alphabetical list under the first letter of the entity name that remains +# after removing the prefix. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output +# The default value is: YES. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_OUTPUT = doxygen_html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each +# generated HTML page (for example: .htm, .php, .asp). +# The default value is: .html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a user-defined HTML header file for +# each generated HTML page. If the tag is left blank doxygen will generate a +# standard header. +# +# To get valid HTML the header file that includes any scripts and style sheets +# that doxygen needs, which is dependent on the configuration options used (e.g. +# the setting GENERATE_TREEVIEW). It is highly recommended to start with a +# default header using +# doxygen -w html new_header.html new_footer.html new_stylesheet.css +# YourConfigFile +# and then modify the file new_header.html. See also section "Doxygen usage" +# for information on how to generate the default header that doxygen normally +# uses. +# Note: The header is subject to change so you typically have to regenerate the +# default header when upgrading to a newer version of doxygen. For a description +# of the possible markers and block names see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each +# generated HTML page. If the tag is left blank doxygen will generate a standard +# footer. See HTML_HEADER for more information on how to generate a default +# footer and what special commands can be used inside the footer. See also +# section "Doxygen usage" for information on how to generate the default footer +# that doxygen normally uses. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FOOTER = + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style +# sheet that is used by each HTML page. It can be used to fine-tune the look of +# the HTML output. If left blank doxygen will generate a default style sheet. +# See also section "Doxygen usage" for information on how to generate the style +# sheet that doxygen normally uses. +# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as +# it is more robust and this tag (HTML_STYLESHEET) will in the future become +# obsolete. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_STYLESHEET = + +# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined +# cascading style sheets that are included after the standard style sheets +# created by doxygen. Using this option one can overrule certain style aspects. +# This is preferred over using HTML_STYLESHEET since it does not replace the +# standard style sheet and is therefore more robust against future updates. +# Doxygen will copy the style sheet files to the output directory. +# Note: The order of the extra style sheet files is of importance (e.g. the last +# style sheet in the list overrules the setting of the previous ones in the +# list). +# Note: Since the styling of scrollbars can currently not be overruled in +# Webkit/Chromium, the styling will be left out of the default doxygen.css if +# one or more extra stylesheets have been specified. So if scrollbar +# customization is desired it has to be added explicitly. For an example see the +# documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_STYLESHEET = + +# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the HTML output directory. Note +# that these files will be copied to the base HTML output directory. Use the +# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these +# files. In the HTML_STYLESHEET file, use the file name only. Also note that the +# files will be copied as-is; there are no commands or markers available. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_FILES = + +# The HTML_COLORSTYLE tag can be used to specify if the generated HTML output +# should be rendered with a dark or light theme. +# Possible values are: LIGHT always generate light mode output, DARK always +# generate dark mode output, AUTO_LIGHT automatically set the mode according to +# the user preference, use light mode if no preference is set (the default), +# AUTO_DARK automatically set the mode according to the user preference, use +# dark mode if no preference is set and TOGGLE allow to user to switch between +# light and dark mode via a button. +# The default value is: AUTO_LIGHT. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE = AUTO_LIGHT + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen +# will adjust the colors in the style sheet and background images according to +# this color. Hue is specified as an angle on a color-wheel, see +# https://en.wikipedia.org/wiki/Hue for more information. For instance the value +# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 +# purple, and 360 is red again. +# Minimum value: 0, maximum value: 359, default value: 220. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors +# in the HTML output. For a value of 0 the output will use gray-scales only. A +# value of 255 will produce the most vivid colors. +# Minimum value: 0, maximum value: 255, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the +# luminance component of the colors in the HTML output. Values below 100 +# gradually make the output lighter, whereas values above 100 make the output +# darker. The value divided by 100 is the actual gamma applied, so 80 represents +# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not +# change the gamma. +# Minimum value: 40, maximum value: 240, default value: 80. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML +# documentation will contain a main index with vertical navigation menus that +# are dynamically created via JavaScript. If disabled, the navigation index will +# consists of multiple levels of tabs that are statically embedded in every HTML +# page. Disable this option to support browsers that do not have JavaScript, +# like the Qt help browser. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_MENUS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_SECTIONS = NO + +# If the HTML_CODE_FOLDING tag is set to YES then classes and functions can be +# dynamically folded and expanded in the generated HTML source code. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_CODE_FOLDING = YES + +# If the HTML_COPY_CLIPBOARD tag is set to YES then doxygen will show an icon in +# the top right corner of code and text fragments that allows the user to copy +# its content to the clipboard. Note this only works if supported by the browser +# and the web page is served via a secure context (see: +# https://www.w3.org/TR/secure-contexts/), i.e. using the https: or file: +# protocol. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COPY_CLIPBOARD = YES + +# Doxygen stores a couple of settings persistently in the browser (via e.g. +# cookies). By default these settings apply to all HTML pages generated by +# doxygen across all projects. The HTML_PROJECT_COOKIE tag can be used to store +# the settings under a project specific key, such that the user preferences will +# be stored separately. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_PROJECT_COOKIE = + +# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries +# shown in the various tree structured indices initially; the user can expand +# and collapse entries dynamically later on. Doxygen will expand the tree to +# such a level that at most the specified number of entries are visible (unless +# a fully collapsed tree already exceeds this amount). So setting the number of +# entries 1 will produce a full collapsed tree by default. 0 is a special value +# representing an infinite number of entries and will result in a full expanded +# tree by default. +# Minimum value: 0, maximum value: 9999, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_INDEX_NUM_ENTRIES = 100 + +# If the GENERATE_DOCSET tag is set to YES, additional index files will be +# generated that can be used as input for Apple's Xcode 3 integrated development +# environment (see: +# https://developer.apple.com/xcode/), introduced with OSX 10.5 (Leopard). To +# create a documentation set, doxygen will generate a Makefile in the HTML +# output directory. Running make will produce the docset in that directory and +# running make install will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at +# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy +# genXcode/_index.html for more information. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_DOCSET = NO + +# This tag determines the name of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# The default value is: Doxygen generated docs. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# This tag determines the URL of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDURL = + +# This tag specifies a string that should uniquely identify the documentation +# set bundle. This should be a reverse domain-name style string, e.g. +# com.mycompany.MyDocSet. Doxygen will append .docset to the name. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. +# The default value is: org.doxygen.Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. +# The default value is: Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three +# additional HTML index files: index.hhp, index.hhc, and index.hhk. The +# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop +# on Windows. In the beginning of 2021 Microsoft took the original page, with +# a.o. the download links, offline the HTML help workshop was already many years +# in maintenance mode). You can download the HTML help workshop from the web +# archives at Installation executable (see: +# http://web.archive.org/web/20160201063255/http://download.microsoft.com/downlo +# ad/0/A/9/0A939EF6-E31C-430F-A3DF-DFAE7960D564/htmlhelp.exe). +# +# The HTML Help Workshop contains a compiler that can convert all HTML output +# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML +# files are now used as the Windows 98 help format, and will replace the old +# Windows help format (.hlp) on all Windows platforms in the future. Compressed +# HTML files also contain an index, a table of contents, and you can search for +# words in the documentation. The HTML workshop also contains a viewer for +# compressed HTML files. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_HTMLHELP = NO + +# The CHM_FILE tag can be used to specify the file name of the resulting .chm +# file. You can add a path in front of the file if the result should not be +# written to the html output directory. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_FILE = + +# The HHC_LOCATION tag can be used to specify the location (absolute path +# including file name) of the HTML help compiler (hhc.exe). If non-empty, +# doxygen will try to run the HTML help compiler on the generated index.hhp. +# The file has to be specified with full path. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +HHC_LOCATION = + +# The GENERATE_CHI flag controls if a separate .chi index file is generated +# (YES) or that it should be included in the main .chm file (NO). +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +GENERATE_CHI = NO + +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) +# and project file content. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_INDEX_ENCODING = + +# The BINARY_TOC flag controls whether a binary table of contents is generated +# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it +# enables the Previous and Next buttons. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members to +# the table of contents of the HTML help documentation and to the tree view. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +TOC_EXPAND = NO + +# The SITEMAP_URL tag is used to specify the full URL of the place where the +# generated documentation will be placed on the server by the user during the +# deployment of the documentation. The generated sitemap is called sitemap.xml +# and placed on the directory specified by HTML_OUTPUT. In case no SITEMAP_URL +# is specified no sitemap is generated. For information about the sitemap +# protocol see https://www.sitemaps.org +# This tag requires that the tag GENERATE_HTML is set to YES. + +SITEMAP_URL = + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that +# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help +# (.qch) of the generated HTML documentation. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify +# the file name of the resulting .qch file. The path specified is relative to +# the HTML output folder. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help +# Project output. For more information please see Qt Help Project / Namespace +# (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_NAMESPACE = org.doxygen.Project + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt +# Help Project output. For more information please see Qt Help Project / Virtual +# Folders (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-folders). +# The default value is: doc. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_VIRTUAL_FOLDER = doc + +# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom +# filter to add. For more information please see Qt Help Project / Custom +# Filters (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see Qt Help Project / Custom +# Filters (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's filter section matches. Qt Help Project / Filter Attributes (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_SECT_FILTER_ATTRS = + +# The QHG_LOCATION tag can be used to specify the location (absolute path +# including file name) of Qt's qhelpgenerator. If non-empty doxygen will try to +# run qhelpgenerator on the generated .qhp file. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be +# generated, together with the HTML files, they form an Eclipse help plugin. To +# install this plugin and make it available under the help contents menu in +# Eclipse, the contents of the directory containing the HTML and XML files needs +# to be copied into the plugins directory of eclipse. The name of the directory +# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. +# After copying Eclipse needs to be restarted before the help appears. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the Eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have this +# name. Each documentation set should have its own identifier. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# If you want full control over the layout of the generated HTML pages it might +# be necessary to disable the index and replace it with your own. The +# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top +# of each HTML page. A value of NO enables the index and the value YES disables +# it. Since the tabs in the index contain the same information as the navigation +# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +DISABLE_INDEX = NO + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. If the tag +# value is set to YES, a side panel will be generated containing a tree-like +# index structure (just like the one that is generated for HTML Help). For this +# to work a browser that supports JavaScript, DHTML, CSS and frames is required +# (i.e. any modern browser). Windows users are probably better off using the +# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can +# further fine tune the look of the index (see "Fine-tuning the output"). As an +# example, the default style sheet generated by doxygen has an example that +# shows how to put an image at the root of the tree instead of the PROJECT_NAME. +# Since the tree basically has the same information as the tab index, you could +# consider setting DISABLE_INDEX to YES when enabling this option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_TREEVIEW = NO + +# When both GENERATE_TREEVIEW and DISABLE_INDEX are set to YES, then the +# FULL_SIDEBAR option determines if the side bar is limited to only the treeview +# area (value NO) or if it should extend to the full height of the window (value +# YES). Setting this to YES gives a layout similar to +# https://docs.readthedocs.io with more room for contents, but less room for the +# project logo, title, and description. If either GENERATE_TREEVIEW or +# DISABLE_INDEX is set to NO, this option has no effect. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FULL_SIDEBAR = NO + +# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that +# doxygen will group on one line in the generated HTML documentation. +# +# Note that a value of 0 will completely suppress the enum values from appearing +# in the overview section. +# Minimum value: 0, maximum value: 20, default value: 4. +# This tag requires that the tag GENERATE_HTML is set to YES. + +ENUM_VALUES_PER_LINE = 4 + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used +# to set the initial width (in pixels) of the frame in which the tree is shown. +# Minimum value: 0, maximum value: 1500, default value: 250. +# This tag requires that the tag GENERATE_HTML is set to YES. + +TREEVIEW_WIDTH = 250 + +# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to +# external symbols imported via tag files in a separate window. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +EXT_LINKS_IN_WINDOW = NO + +# If the OBFUSCATE_EMAILS tag is set to YES, doxygen will obfuscate email +# addresses. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +OBFUSCATE_EMAILS = YES + +# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg +# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see +# https://inkscape.org) to generate formulas as SVG images instead of PNGs for +# the HTML output. These images will generally look nicer at scaled resolutions. +# Possible values are: png (the default) and svg (looks nicer but requires the +# pdf2svg or inkscape tool). +# The default value is: png. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FORMULA_FORMAT = png + +# Use this tag to change the font size of LaTeX formulas included as images in +# the HTML documentation. When you change the font size after a successful +# doxygen run you need to manually remove any form_*.png images from the HTML +# output directory to force them to be regenerated. +# Minimum value: 8, maximum value: 50, default value: 10. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_FONTSIZE = 10 + +# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands +# to create new LaTeX commands to be used in formulas as building blocks. See +# the section "Including formulas" for details. + +FORMULA_MACROFILE = + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see +# https://www.mathjax.org) which uses client side JavaScript for the rendering +# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX +# installed or if you want to formulas look prettier in the HTML output. When +# enabled you may also need to install MathJax separately and configure the path +# to it using the MATHJAX_RELPATH option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +USE_MATHJAX = NO + +# With MATHJAX_VERSION it is possible to specify the MathJax version to be used. +# Note that the different versions of MathJax have different requirements with +# regards to the different settings, so it is possible that also other MathJax +# settings have to be changed when switching between the different MathJax +# versions. +# Possible values are: MathJax_2 and MathJax_3. +# The default value is: MathJax_2. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_VERSION = MathJax_2 + +# When MathJax is enabled you can set the default output format to be used for +# the MathJax output. For more details about the output format see MathJax +# version 2 (see: +# http://docs.mathjax.org/en/v2.7-latest/output.html) and MathJax version 3 +# (see: +# http://docs.mathjax.org/en/latest/web/components/output.html). +# Possible values are: HTML-CSS (which is slower, but has the best +# compatibility. This is the name for Mathjax version 2, for MathJax version 3 +# this will be translated into chtml), NativeMML (i.e. MathML. Only supported +# for NathJax 2. For MathJax version 3 chtml will be used instead.), chtml (This +# is the name for Mathjax version 3, for MathJax version 2 this will be +# translated into HTML-CSS) and SVG. +# The default value is: HTML-CSS. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_FORMAT = HTML-CSS + +# When MathJax is enabled you need to specify the location relative to the HTML +# output directory using the MATHJAX_RELPATH option. The destination directory +# should contain the MathJax.js script. For instance, if the mathjax directory +# is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax +# Content Delivery Network so you can quickly see the result without installing +# MathJax. However, it is strongly recommended to install a local copy of +# MathJax from https://www.mathjax.org before deployment. The default value is: +# - in case of MathJax version 2: https://cdn.jsdelivr.net/npm/mathjax@2 +# - in case of MathJax version 3: https://cdn.jsdelivr.net/npm/mathjax@3 +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_RELPATH = + +# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax +# extension names that should be enabled during MathJax rendering. For example +# for MathJax version 2 (see +# https://docs.mathjax.org/en/v2.7-latest/tex.html#tex-and-latex-extensions): +# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols +# For example for MathJax version 3 (see +# http://docs.mathjax.org/en/latest/input/tex/extensions/index.html): +# MATHJAX_EXTENSIONS = ams +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_EXTENSIONS = + +# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces +# of code that will be used on startup of the MathJax code. See the MathJax site +# (see: +# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. For an +# example see the documentation. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_CODEFILE = + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box for +# the HTML output. The underlying search engine uses javascript and DHTML and +# should work on any modern browser. Note that when using HTML help +# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) +# there is already a search function so this one should typically be disabled. +# For large projects the javascript based search engine can be slow, then +# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to +# search using the keyboard; to jump to the search box use + S +# (what the is depends on the OS and browser, but it is typically +# , /