diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml new file mode 100644 index 0000000000..2600bd6ff2 --- /dev/null +++ b/.github/workflows/nightly-build.yml @@ -0,0 +1,88 @@ +name: Nightly Build + +on: + push: + branches: + - development + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development + + - name: Set up JDK 11 + uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: '11' + + - name: Build with Maven + run: mvn clean install + + - name: Rename and Copy Artifacts + run: | + short_commit=$(git rev-parse --short HEAD) + cp $(find ./corese-core/target/ -name 'corese-core-*-jar-with-dependencies.jar') ./corese-core-${short_commit}-nightly.jar + cp $(find ./corese-command/target/ -name 'corese-command-*.jar') ./corese-command-${short_commit}-nightly.jar + cp $(find ./corese-gui/target/ -name 'corese-gui-*.jar') ./corese-gui-${short_commit}-nightly.jar + cp $(find ./corese-jena/target/ -name 'corese-jena-*.jar') ./corese-jena-${short_commit}-nightly.jar + cp $(find ./corese-rdf4j/target/ -name 'corese-rdf4j-*.jar') ./corese-rdf4j-${short_commit}-nightly.jar + cp $(find ./corese-server/target/ -name 'corese-server-*.jar') ./corese-server-${short_commit}-nightly.jar + + - name: Upload Build Artifacts + uses: actions/upload-artifact@v4 + with: + name: nightly-build + path: ./corese-*-nightly.jar + + delete_previous_prerelease: + runs-on: ubuntu-latest + needs: build + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development + + - name: Delete previous nightly tag + run: | + latest_nightly_tag=$(gh release list --limit 1 --exclude-drafts --json tagName,isPrerelease -q '.[] | select(.isPrerelease) | .tagName') + if [ -n "$latest_nightly_tag" ]; then + gh release delete "$latest_nightly_tag" -y + gh api --method DELETE "/repos/${{ github.repository }}/git/refs/tags/$latest_nightly_tag" + else + echo "No previous nightly tag found." + fi + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + create_prerelease: + runs-on: ubuntu-latest + needs: delete_previous_prerelease + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + ref: development + + - name: Download Build Artifacts + uses: actions/download-artifact@v4 + with: + name: nightly-build + path: ./artifacts + + - name: Create new prerelease + run: | + short_commit=$(git rev-parse --short HEAD) + date=$(date +'%Y%m%d') + tag_name="nightly-${date}-${short_commit}" + gh release create "$tag_name" -p -t "Nightly Build ${short_commit}" -n "This is an automated nightly build. It is a development version and not stable." --target development + gh release upload "$tag_name" ./artifacts/corese-*-nightly.jar + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/sphinx.yml b/.github/workflows/sphinx.yml new file mode 100644 index 0000000000..fdc791d23c --- /dev/null +++ b/.github/workflows/sphinx.yml @@ -0,0 +1,36 @@ +name: "Sphinx: Render docs" + +on: push + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + + - name: Checkout development branch + uses: actions/checkout@v4 + with: + ref: development + + - name: Build Sphinx Docs + uses: ammaraskar/sphinx-action@master + with: + pre-build-command: "apt-get install doxygen -y" + build-command: "make html" + docs-folder: "docs/" + + - name: Upload artifacts + uses: actions/upload-artifact@v4 + with: + name: html-docs + path: docs/build/html/ + + - name: Deploy + uses: peaceiris/actions-gh-pages@v3 + if: github.ref == 'refs/heads/development' + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: docs/build/html + diff --git a/.gitignore b/.gitignore index 229f1f1823..d872bdc3eb 100644 --- a/.gitignore +++ b/.gitignore @@ -99,4 +99,10 @@ earl-report-test.ttl ############################## ## flatpak ############################## -.flatpak-builder/ \ No newline at end of file +.flatpak-builder/ + +############################## +## test report +############################## +earlReport.ttl +testReport.csv \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 7cea610224..ad99305c8a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ # Corese Changelog +## 4.5.1 – + ## 4.5.0 – 2023/12/14 ### Added diff --git a/README.md b/README.md index ca1bd4f1ea..53271bcb75 100644 --- a/README.md +++ b/README.md @@ -41,21 +41,21 @@ There are several interfaces for Corese: fr.inria.corese corese-core - 4.5.0 + 4.5.1 fr.inria.corese corese-jena - 4.5.0 + 4.5.1 fr.inria.corese corese-rdf4j - 4.5.0 + 4.5.1 ``` @@ -74,8 +74,8 @@ docker run --name my-corese \ - Alternatively, download [Corese-server jar file](https://project.inria.fr/corese/jar/). ```sh -wget "https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-server-4.5.0.jar" -java -jar "-Dfile.encoding=UTF8" "corese-server-4.5.0.jar" +wget "https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-server-4.5.1.jar" +java -jar "-Dfile.encoding=UTF8" "corese-server-4.5.1.jar" ``` - Documentation: @@ -94,8 +94,8 @@ java -jar "-Dfile.encoding=UTF8" "corese-server-4.5.0.jar" - Or download [Corese-gui jar file](https://project.inria.fr/corese/jar/). ```sh -wget "https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-gui-4.5.0.jar" -java -jar "-Dfile.encoding=UTF8" "corese-gui-4.5.0.jar" +wget "https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-gui-4.5.1.jar" +java -jar "-Dfile.encoding=UTF8" "corese-gui-4.5.1.jar" ``` ### Corese-Command @@ -110,8 +110,8 @@ java -jar "-Dfile.encoding=UTF8" "corese-gui-4.5.0.jar" - Or download [Corese-command jar file](https://project.inria.fr/corese/jar/). ```sh -wget "https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-command-4.5.0.jar" -java -jar "-Dfile.encoding=UTF8" "corese-command-4.5.0.jar" +wget "https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-command-4.5.1.jar" +java -jar "-Dfile.encoding=UTF8" "corese-command-4.5.1.jar" ``` - Alternatively, use the installation script for Linux and MacOS systems. @@ -135,8 +135,8 @@ curl -sSL https://files.inria.fr/corese/distrib/script/uninstall-corese-command. - Download [Corese-python jar file](https://project.inria.fr/corese/jar/). ```sh -wget "https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-library-python-4.5.0.jar" -java -jar "-Dfile.encoding=UTF8" "corese-library-python-4.5.0.jar" +wget "https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-library-python-4.5.1.jar" +java -jar "-Dfile.encoding=UTF8" "corese-library-python-4.5.1.jar" ``` - Documentation: [Getting Started With Corese-python](/docs/corese-python/Corese-library%20with%20Python.md) @@ -153,7 +153,7 @@ mvn clean install -DskipTests ## How to cite Corese -Use the "Cite this repository" option on the right side of this page. +Use the "Cite this repository" option on the right side of this page or Hal [hal-04170333](https://hal.science/hal-04170333). ## Contributions and discussions diff --git a/corese-command/metadata/flatpak/fr.inria.corese.CoreseCommand.yml b/corese-command/metadata/flatpak/fr.inria.corese.CoreseCommand.yml index 8b2cd79e2c..b749b8c789 100644 --- a/corese-command/metadata/flatpak/fr.inria.corese.CoreseCommand.yml +++ b/corese-command/metadata/flatpak/fr.inria.corese.CoreseCommand.yml @@ -31,7 +31,7 @@ modules: sources: - type: file - url: https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-command-4.5.0.jar + url: https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-command-4.5.1.jar sha256: 2b15c46a9643eafb1119db9579e4f259e576647d9f322f437089960120960393 x-checker-data: type: json @@ -42,6 +42,6 @@ modules: - type: git dest: git_repo url: https://github.com/Wimmics/corese - tag: metadata-4.5.0 + tag: metadata-4.5.1 - type: file path: run.sh \ No newline at end of file diff --git a/corese-command/metadata/fr.inria.corese.CoreseCommand.appdata.xml b/corese-command/metadata/fr.inria.corese.CoreseCommand.appdata.xml index 73b86c1de8..2ac0af6595 100644 --- a/corese-command/metadata/fr.inria.corese.CoreseCommand.appdata.xml +++ b/corese-command/metadata/fr.inria.corese.CoreseCommand.appdata.xml @@ -109,6 +109,10 @@ + + + +
    @@ -142,27 +146,36 @@

    - Enhanced 'convert' and 'sparql' to accept URL and standard input. - - 'sparql' and 'convert' now support standard output and multiple file inputs. - - Expanded 'sparql' to handle directories, recursive directories, and various query types (SELECT, CONSTRUCT, ASK, etc.). + - 'sparql' and 'convert' now support standard output and multiple file + inputs. + - Expanded 'sparql' to handle directories, recursive directories, and various + query types (SELECT, CONSTRUCT, ASK, etc.). - User can choose result format in 'sparql', including markdown. - Added mime type as a format name. – Disabled owl:imports auto-import. - - Docker users can pass custom options and adjust log level for Corese-server. + - Docker users can pass custom options and adjust log level for + Corese-server. - Standardized format names in Corese-command. - Removed 'owlProfile' and 'ldscript'; to return after refactoring. - Fixed warning related to sun.reflect.Reflection.getCallerClass.

    - - Amélioration des commandes 'convert' et 'sparql' pour accepter les URL et l'entrée standard. - - 'sparql' et 'convert' supportent désormais la sortie standard et de multiples fichiers en entrée. - - Extension de 'sparql' pour gérer les répertoires, les sous-répertoires et divers types de requêtes (SELECT, CONSTRUCT, ASK, etc.). - - L'utilisateur peut choisir le format du résultat dans 'sparql', y compris en markdown. + - Amélioration des commandes 'convert' et 'sparql' pour accepter les URL et + l'entrée standard. + - 'sparql' et 'convert' supportent désormais la sortie standard et de multiples + fichiers en entrée. + - Extension de 'sparql' pour gérer les répertoires, les sous-répertoires et + divers types de requêtes (SELECT, CONSTRUCT, ASK, etc.). + - L'utilisateur peut choisir le format du résultat dans 'sparql', y compris en + markdown. - Ajout du type MIME comme nom de format. – Désactivation de l'auto-importation owl:imports. - - Les utilisateurs de Docker peuvent passer des options personnalisées et ajuster le niveau de journalisation pour Corese-server. + - Les utilisateurs de Docker peuvent passer des options personnalisées et + ajuster le niveau de journalisation pour Corese-server. - Standardisation des noms de format dans Corese-command. - - Retrait des commandes 'owlProfile' et 'ldscript'; reviendront après refonte. + - Retrait des commandes 'owlProfile' et 'ldscript'; reviendront après + refonte. - Correction d'un avertissement lié à sun.reflect.Reflection.getCallerClass.

    diff --git a/corese-command/pom.xml b/corese-command/pom.xml index 23706cabaa..f03b76be71 100644 --- a/corese-command/pom.xml +++ b/corese-command/pom.xml @@ -16,29 +16,46 @@ - - junit - junit - 4.13.2 - test - - + info.picocli picocli - 4.7.4 + 4.7.6 + ${project.parent.groupId} corese-core + + + org.junit.jupiter + junit-jupiter-api + 5.11.0-M1 + test + - jakarta.activation - jakarta.activation-api + org.junit.jupiter + junit-jupiter-engine + 5.11.0-M1 + test + + + org.wiremock + wiremock + 3.8.0 + test + + + org.assertj + assertj-core + 3.26.3 + test + diff --git a/corese-command/src/main/java/fr/inria/corese/command/App.java b/corese-command/src/main/java/fr/inria/corese/command/App.java index 2402abaf95..9ece295a00 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/App.java +++ b/corese-command/src/main/java/fr/inria/corese/command/App.java @@ -1,22 +1,44 @@ package fr.inria.corese.command; +import fr.inria.corese.command.programs.Canonicalize; import fr.inria.corese.command.programs.Convert; import fr.inria.corese.command.programs.RemoteSparql; import fr.inria.corese.command.programs.Shacl; import fr.inria.corese.command.programs.Sparql; +import picocli.AutoComplete.GenerateCompletion; import picocli.CommandLine; import picocli.CommandLine.Command; +import picocli.CommandLine.Help.Ansi.Style; +import picocli.CommandLine.Help.ColorScheme; @Command(name = "Corese-command", version = App.version, mixinStandardHelpOptions = true, subcommands = { - Convert.class, Sparql.class, Shacl.class, RemoteSparql.class + Convert.class, Sparql.class, Shacl.class, RemoteSparql.class, Canonicalize.class, GenerateCompletion.class }) public final class App implements Runnable { - public final static String version = "4.5.0"; + public final static String version = "4.5.1"; public static void main(String[] args) { - int exitCode = new CommandLine(new App()).execute(args); + // Define the color scheme + ColorScheme colorScheme = new ColorScheme.Builder() + .commands(Style.bold) // Commands in blue + .options(Style.fg_yellow) // Options in yellow + .parameters(Style.fg_cyan, Style.bold) // Parameters in cyan and bold + .optionParams(Style.italic, Style.fg_cyan) // Option parameters in italic + .errors(Style.fg_red, Style.bold) // Errors in red and bold + .stackTraces(Style.italic) // Stack traces in italic + .applySystemProperties() // Apply system properties for colors + .build(); + + CommandLine commandLine = new CommandLine(new App()).setColorScheme(colorScheme); + + // Hide the generate-completion command + CommandLine gen = commandLine.getSubcommands().get("generate-completion"); + gen.getCommandSpec().usageMessage().hidden(true); + + // Execute the command + int exitCode = commandLine.execute(args); System.exit(exitCode); } diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/AbstractCommand.java b/corese-command/src/main/java/fr/inria/corese/command/programs/AbstractCommand.java new file mode 100644 index 0000000000..d5b8c24c2e --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/AbstractCommand.java @@ -0,0 +1,85 @@ +package fr.inria.corese.command.programs; + +import java.nio.file.Path; +import java.util.Optional; +import java.util.concurrent.Callable; + +import fr.inria.corese.command.App; +import fr.inria.corese.command.utils.ConfigManager; +import fr.inria.corese.command.utils.exporter.rdf.RdfDataExporter; +import fr.inria.corese.core.util.Property; +import fr.inria.corese.core.util.Property.Value; +import picocli.CommandLine.Command; +import picocli.CommandLine.Model.CommandSpec; +import picocli.CommandLine.Option; +import picocli.CommandLine.Spec; + +/** + * Abstract class for all commands. + * + * This class provides common options and methods for all commands. + */ +@Command(version = App.version) +public abstract class AbstractCommand implements Callable { + + /////////////// + // Constants // + /////////////// + + // Exit codes + protected final int ERROR_EXIT_CODE_SUCCESS = 0; + protected final int ERROR_EXIT_CODE_ERROR = 1; + + ///////////// + // Options // + ///////////// + + @Option(names = { "-o", + "--output-data" }, description = "Specifies the output file path. If not provided, the result will be written to standard output.", arity = "0..1", fallbackValue = RdfDataExporter.DEFAULT_OUTPUT) + protected Path output; + + @Option(names = { "-c", "--config", + "--init" }, description = "Specifies the path to a configuration file. If not provided, the default configuration file will be used.", required = false) + private Path configFilePath; + + @Option(names = { "-v", + "--verbose" }, description = "Enables verbose mode, printing more information about the execution of the command.", negatable = true) + protected boolean verbose = false; + + @Option(names = { "-w", + "--owl-import" }, description = "Disables the automatic importation of ontologies specified in 'owl:imports' statements. When this flag is set, the application will not fetch and include referenced ontologies. Default is '${DEFAULT-VALUE}'.", required = false, defaultValue = "false", negatable = true) + private boolean noOwlImport; + + //////////////// + // Properties // + //////////////// + + // Command specification + @Spec + protected CommandSpec spec; + + // Output + protected Boolean outputToFileIsDefault = false; + + ///////////// + // Methods // + ///////////// + + @Override + public Integer call() { + + // Load configuration file + Optional configFilePath = Optional.ofNullable(this.configFilePath); + if (configFilePath.isPresent()) { + ConfigManager.loadFromFile(configFilePath.get(), this.spec, this.verbose); + } else { + ConfigManager.loadDefaultConfig(this.spec, this.verbose); + } + + // Set owl import + Property.set(Value.DISABLE_OWL_AUTO_IMPORT, this.noOwlImport); + + return 0; + } + +} diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/AbstractInputCommand.java b/corese-command/src/main/java/fr/inria/corese/command/programs/AbstractInputCommand.java new file mode 100644 index 0000000000..2a2f31181f --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/AbstractInputCommand.java @@ -0,0 +1,42 @@ +package fr.inria.corese.command.programs; + +import java.nio.file.Path; + +import picocli.CommandLine.Option; + +public abstract class AbstractInputCommand extends AbstractCommand { + + @Option(names = { "-i", + "--input-data" }, description = "Specifies the path or URL of the input RDF data. Multiple values are allowed.", arity = "1...") + protected String[] inputsRdfData; + + @Option(names = { "-R", + "--recursive" }, description = "If set to true and an input is a directory, all files in the directory will be loaded recursively. Default value: ${DEFAULT-VALUE}.", defaultValue = "false") + protected boolean recursive = false; + + @Override + public Integer call() { + super.call(); + + // Check input values + this.checkInputValues(); + + return 0; + } + + /** + * Check if the input values are correct. + * + * @throws IllegalArgumentException if input path is same as output path. + */ + private void checkInputValues() throws IllegalArgumentException { + if (this.inputsRdfData != null && this.output != null) { + for (String input : this.inputsRdfData) { + if (Path.of(input).compareTo(this.output) == 0) { + throw new IllegalArgumentException("Input path cannot be same as output path: " + input); + } + } + } + } + +} \ No newline at end of file diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/Canonicalize.java b/corese-command/src/main/java/fr/inria/corese/command/programs/Canonicalize.java new file mode 100644 index 0000000000..088da5552c --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/Canonicalize.java @@ -0,0 +1,46 @@ +package fr.inria.corese.command.programs; + +import fr.inria.corese.command.utils.exporter.rdf.EnumCanonicAlgo; +import fr.inria.corese.command.utils.exporter.rdf.RdfDataCanonicalizer; +import fr.inria.corese.command.utils.loader.rdf.EnumRdfInputFormat; +import fr.inria.corese.command.utils.loader.rdf.RdfDataLoader; +import fr.inria.corese.core.Graph; +import picocli.CommandLine.Command; +import picocli.CommandLine.Option; + +@Command(name = "canonicalize", description = "Canonicalize an RDF file to a specific format.", mixinStandardHelpOptions = true) +public class Canonicalize extends AbstractInputCommand { + + @Option(names = { "-f", "-if", + "--input-format" }, description = "Specifies the RDF serialization format of the input file. Available options are: :@|fg(225) ${COMPLETION-CANDIDATES}|@.") + private EnumRdfInputFormat inputFormat; + + @Option(names = { "-a", "-ca", "-r", "-of", + "--canonical-algo" }, required = true, description = "Specifies the canonicalization algorithm to be applied to the input file. Available options are: :@|fg(225) ${COMPLETION-CANDIDATES}|@. The default algorithm is ${DEFAULT-VALUE}.", defaultValue = "rdfc-1.0") + private EnumCanonicAlgo canonicalAlgo; + + public Canonicalize() { + } + + @Override + public Integer call() { + + super.call(); + + try { + // Load the input file(s) + RdfDataLoader loader = new RdfDataLoader(this.spec, this.verbose); + Graph graph = loader.load(this.inputsRdfData, this.inputFormat, this.recursive); + + // Canonicalize and export the graph + RdfDataCanonicalizer rdfCanonicalizer = new RdfDataCanonicalizer(this.spec, this.verbose, this.output); + rdfCanonicalizer.export(graph, this.canonicalAlgo); + + return this.ERROR_EXIT_CODE_SUCCESS; + } catch (IllegalArgumentException e) { + this.spec.commandLine().getErr().println("Error: " + e.getMessage()); + return this.ERROR_EXIT_CODE_ERROR; + } + } + +} diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java b/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java index a88eedee23..c3dbbaec89 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/Convert.java @@ -1,67 +1,24 @@ package fr.inria.corese.command.programs; -import java.io.IOException; -import java.net.URL; -import java.nio.file.Path; -import java.util.Optional; -import java.util.concurrent.Callable; - import fr.inria.corese.command.App; -import fr.inria.corese.command.utils.ConfigManager; -import fr.inria.corese.command.utils.ConvertString; -import fr.inria.corese.command.utils.format.EnumInputFormat; -import fr.inria.corese.command.utils.format.EnumOutputFormat; -import fr.inria.corese.command.utils.rdf.RdfDataExporter; -import fr.inria.corese.command.utils.rdf.RdfDataLoader; +import fr.inria.corese.command.utils.exporter.rdf.EnumRdfOutputFormat; +import fr.inria.corese.command.utils.exporter.rdf.RdfDataExporter; +import fr.inria.corese.command.utils.loader.rdf.EnumRdfInputFormat; +import fr.inria.corese.command.utils.loader.rdf.RdfDataLoader; import fr.inria.corese.core.Graph; -import fr.inria.corese.core.util.Property; -import fr.inria.corese.core.util.Property.Value; import picocli.CommandLine.Command; -import picocli.CommandLine.Model.CommandSpec; import picocli.CommandLine.Option; -import picocli.CommandLine.Spec; @Command(name = "convert", version = App.version, description = "Convert an RDF file from one serialization format to another.", mixinStandardHelpOptions = true) -public class Convert implements Callable { - - private final String DEFAULT_OUTPUT_FILE_NAME = "output"; - private final int ERROR_EXIT_CODE_SUCCESS = 0; - private final int ERROR_EXIT_CODE_ERROR = 1; - - @Spec - CommandSpec spec; - - @Option(names = { "-i", "--input-data" }, description = "Path or URL of the file that needs to be converted.") - private String input; +public class Convert extends AbstractInputCommand { @Option(names = { "-f", "-if", - "--input-format" }, description = "RDF serialization format of the input file. Possible values: ${COMPLETION-CANDIDATES}.") - private EnumInputFormat inputFormat = null; - - @Option(names = { "-o", - "--output-data" }, description = "Output file path. If not provided, the result will be written to standard output.", arity = "0..1", fallbackValue = DEFAULT_OUTPUT_FILE_NAME) - private Path output; + "--input-format" }, description = "Specifies the RDF serialization format of the input file. Possible values:@|fg(225) ${COMPLETION-CANDIDATES}|@.") + private EnumRdfInputFormat inputFormat = null; @Option(names = { "-r", "-of", - "--output-format" }, required = true, description = "Serialization format to which the input file should be converted. Possible values: ${COMPLETION-CANDIDATES}.") - private EnumOutputFormat outputFormat; - - @Option(names = { "-v", - "--verbose" }, description = "Prints more information about the execution of the command.") - private boolean verbose = false; - - @Option(names = { "-c", "--config", - "--init" }, description = "Path to a configuration file. If not provided, the default configuration file will be used.", required = false) - private Path configFilePath; - - @Option(names = { "-w", - "--no-owl-import" }, description = "Disables the automatic importation of ontologies specified in 'owl:imports' statements. When this flag is set, the application will not fetch and include referenced ontologies.", required = false, defaultValue = "false") - private boolean noOwlImport; - - private Graph graph = Graph.create(); - - private boolean outputFormatIsDefined = false; - private boolean isDefaultOutputName = false; + "--output-format" }, required = true, description = "Specifies the RDF serialization format of the output file. Possible values::@|fg(225)${COMPLETION-CANDIDATES}|@.") + private EnumRdfOutputFormat outputFormat; public Convert() { } @@ -69,107 +26,22 @@ public Convert() { @Override public Integer call() { - try { - - // Load configuration file - Optional configFilePath = Optional.ofNullable(this.configFilePath); - if (configFilePath.isPresent()) { - ConfigManager.loadFromFile(configFilePath.get(), this.spec, this.verbose); - } else { - ConfigManager.loadDefaultConfig(this.spec, this.verbose); - } + super.call(); - // Set owl import - Property.set(Value.DISABLE_OWL_AUTO_IMPORT, this.noOwlImport); - - // Check if output format is defined - this.outputFormatIsDefined = this.output != null; - - // Check if output file name is default - this.isDefaultOutputName = this.output != null - && DEFAULT_OUTPUT_FILE_NAME.equals(this.output.toString()); + try { + // Load the input file(s) + RdfDataLoader loader = new RdfDataLoader(this.spec, this.verbose); + Graph graph = loader.load(this.inputsRdfData, this.inputFormat, this.recursive); - // Execute command - this.checkInputValues(); - this.loadInputFile(); - this.exportGraph(); + // Export the graph + RdfDataExporter rdfExporter = new RdfDataExporter(this.spec, this.verbose, this.output); + rdfExporter.export(graph, this.outputFormat); return this.ERROR_EXIT_CODE_SUCCESS; - } catch (IllegalArgumentException | IOException e) { - this.spec.commandLine().getErr().println("\u001B[31mError: " + e.getMessage() + "\u001B[0m"); + } catch (IllegalArgumentException e) { + this.spec.commandLine().getErr().println("Error: " + e.getMessage()); return this.ERROR_EXIT_CODE_ERROR; } } - /** - * Check if the input values are correct. - * - * @throws IllegalArgumentException if input path is same as output path. - */ - private void checkInputValues() throws IllegalArgumentException { - if (this.input != null - && this.output != null - && this.input.equals(this.output.toString())) { - throw new IllegalArgumentException("Input path cannot be the same as output path."); - } - } - - /** - * Load the input file. - * - * @throws IllegalArgumentException if the input format is not supported. - * @throws IOException if an I/O error occurs while loading the - * input file. - */ - private void loadInputFile() throws IllegalArgumentException, IOException { - Optional url = ConvertString.toUrl(this.input); - Optional path = ConvertString.toPath(this.input); - - if (input == null) { - // if input is not provided, load from standard input - RdfDataLoader.LoadFromStdin(this.inputFormat, this.graph, this.spec, this.verbose); - } else if (url.isPresent()) { - // if input is a URL, load from the given URL - RdfDataLoader.loadFromURL(url.get(), this.inputFormat, this.graph, this.spec, this.verbose); - } else if (path.isPresent()) { - // if input is provided, load from the given file - RdfDataLoader.loadFromFile(path.get(), this.inputFormat, this.graph, this.spec, this.verbose); - } else { - throw new IllegalArgumentException("Input path is not a valid URL or file path: " + this.input); - } - } - - /** - * Export the graph. - * - * @throws IOException if an I/O error occurs while exporting the graph. - */ - private void exportGraph() throws IOException { - - if (this.verbose) { - this.spec.commandLine().getOut().println("Converting file to " + this.outputFormat + " format..."); - } - - Path outputFileName; - - // Set output file name - if (this.outputFormatIsDefined && !this.isDefaultOutputName) { - outputFileName = this.output; - } else { - outputFileName = Path.of(this.DEFAULT_OUTPUT_FILE_NAME + "." + this.outputFormat.getExtention()); - } - - // Export the graph - if (this.output == null) { - RdfDataExporter.exportToStdout(this.outputFormat, this.graph, this.spec, this.verbose); - } else { - RdfDataExporter.exportToFile( - outputFileName, - this.outputFormat, - this.graph, - this.spec, - this.verbose); - } - } - } diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/RemoteSparql.java b/corese-command/src/main/java/fr/inria/corese/command/programs/RemoteSparql.java index aa291f94f7..ed51289737 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/RemoteSparql.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/RemoteSparql.java @@ -1,116 +1,75 @@ package fr.inria.corese.command.programs; import java.io.IOException; -import java.net.URL; import java.nio.charset.StandardCharsets; -import java.nio.file.Path; import java.util.List; -import java.util.Optional; -import java.util.concurrent.Callable; import com.github.jsonldjava.shaded.com.google.common.io.Files; -import fr.inria.corese.command.utils.ConfigManager; -import fr.inria.corese.command.utils.ConvertString; -import fr.inria.corese.command.utils.TestType; +import fr.inria.corese.command.App; import fr.inria.corese.command.utils.http.EnumRequestMethod; import fr.inria.corese.command.utils.http.SparqlHttpClient; -import fr.inria.corese.command.utils.sparql.SparqlQueryLoader; -import fr.inria.corese.core.util.Property; -import fr.inria.corese.core.util.Property.Value; +import fr.inria.corese.command.utils.loader.sparql.SparqlQueryLoader; import picocli.CommandLine.Command; -import picocli.CommandLine.Model.CommandSpec; import picocli.CommandLine.Option; -import picocli.CommandLine.Spec; -@Command(name = "remote-sparql", version = "1.0", description = "Execute a SPARQL query on a remote endpoint.", mixinStandardHelpOptions = true) -public class RemoteSparql implements Callable { +@Command(name = "remote-sparql", version = App.version, description = "Execute a SPARQL query on a remote endpoint.", mixinStandardHelpOptions = true) +public class RemoteSparql extends AbstractCommand { - private final int ERROR_EXIT_CODE_SUCCESS = 0; - private final int ERROR_EXIT_CODE_ERROR = 1; - - private final String DEFAULT_OUTPUT_FILE_NAME = "output"; - private boolean outputPathIsDefined; - - @Spec - private CommandSpec spec; - - @Option(names = { "-q", "--query" }, description = "SPARQL query to execute", required = false) + @Option(names = { "-q", + "--query" }, description = "Specifies the SPARQL query to execute. This can be provided as a URL or a file path.", required = false) private String queryUrlOrFile; - @Option(names = { "-e", "--endpoint" }, description = "SPARQL endpoint URL", required = true) + @Option(names = { "-e", "--endpoint" }, description = "Specifies the SPARQL endpoint URL.", required = true) private String endpoint_url; - @Option(names = { "-H", "--header" }, description = "HTTP header to add to the request", arity = "0..") + @Option(names = { "-H", + "--header" }, description = "Adds an HTTP header to the request. Multiple headers can be specified.", arity = "0..") private List headers; - @Option(names = { "-a", "-of", "--accept" }, description = "Accept header value") + @Option(names = { "-a", "-of", + "--accept" }, description = "Specifies the Accept header value for the HTTP request.") private String accept; @Option(names = { "-m", - "--request-method" }, description = "HTTP request method to use (GET, POST-urlencoded, POST-direct).", defaultValue = "GET") + "--request-method" }, description = "Specifies the HTTP request method to use. Possible values are: :@|fg(225) ${COMPLETION-CANDIDATES}|@.") private EnumRequestMethod requestMethod; - @Option(names = { "-v", - "--verbose" }, description = "Prints more information about the execution of the command..", required = false, defaultValue = "false") - private boolean verbose; - @Option(names = { "-r", - "--max-redirection" }, description = "Maximum number of redirections to follow", defaultValue = "5") + "--max-redirection" }, description = "Specifies the maximum number of redirections to follow. Default value: ${DEFAULT-VALUE}.", defaultValue = "5") private int maxRedirection; - @Option(names = { "-d", "--default-graph" }, description = "Default graph URI", arity = "0..") + @Option(names = { "-d", + "--default-graph" }, description = "Specifies the default graph URI. Multiple URIs can be specified.", arity = "0..") private List default_graph; - @Option(names = { "-n", "--named-graph" }, description = "Named graph URI", arity = "0..") + @Option(names = { "-n", + "--named-graph" }, description = "Specifies the named graph URI. Multiple URIs can be specified.", arity = "0..") private List named_graph; - @Option(names = { "-o", - "--output-data" }, description = "Output file path. If not provided, the result will be written to standard output.", arity = "0..1", fallbackValue = DEFAULT_OUTPUT_FILE_NAME) - private Path output; - - @Option(names = { "-c", - "--config", - "--init" }, description = "Path to a configuration file. If not provided, the default configuration file will be used.", required = false) - private Path configFilePath; - @Option(names = { "-i", - "--ignore-query-validation" }, description = "Ignore query validation.", required = false, defaultValue = "false") + "--ignore-query-validation" }, description = "Ignores query validation if set to true. Default value: ${DEFAULT-VALUE}.", required = false, defaultValue = "false") private boolean ignoreQueryValidation; - @Option(names = { "-w", - "--no-owl-import" }, description = "Disables the automatic importation of ontologies specified in 'owl:imports' statements. When this flag is set, the application will not fetch and include referenced ontologies.", required = false, defaultValue = "false") - private boolean noOwlImport; - private String query; private final String DEFAULT_ACCEPT_HEADER = "text/csv"; @Override - public Integer call() throws Exception { - try { + public Integer call() { + + super.call(); - // Check if output is defined - this.outputPathIsDefined = this.output != null; + try { // if accept is not defined, set it to text/csv if (this.accept == null && !this.containAcceptHeader(this.headers)) { this.accept = DEFAULT_ACCEPT_HEADER; } - // Load configuration file - Optional configFilePath = Optional.ofNullable(this.configFilePath); - if (configFilePath.isPresent()) { - ConfigManager.loadFromFile(configFilePath.get(), this.spec, this.verbose); - } else { - ConfigManager.loadDefaultConfig(this.spec, this.verbose); - } - - // Set owl import - Property.set(Value.DISABLE_OWL_AUTO_IMPORT, this.noOwlImport); - // Load query - this.loadQuery(); + SparqlQueryLoader queryLoader = new SparqlQueryLoader(this.spec, this.verbose); + this.query = queryLoader.load(this.queryUrlOrFile); // Execute query String res = this.sendRequest(); @@ -119,7 +78,7 @@ public Integer call() throws Exception { this.exportResult(res); } catch (Exception e) { - this.spec.commandLine().getErr().println("\u001B[31mError: " + e.getMessage() + "\u001B[0m"); + this.spec.commandLine().getErr().println("Error: " + e.getMessage()); return this.ERROR_EXIT_CODE_ERROR; } @@ -147,40 +106,6 @@ private Boolean containAcceptHeader(List headers) { return false; } - /** - * Load the query from the query string or from the query file. - * - * @throws IOException If the query file cannot be read. - */ - private void loadQuery() throws IOException { - - // If query is not defined, read from standard input - if (this.queryUrlOrFile == null) { - this.query = SparqlQueryLoader.loadFromInputStream(System.in, this.spec, this.verbose); - - if (this.query == null || this.query.isEmpty()) { - throw new RuntimeException("The query is not a valid SPARQL query, a URL or a file path."); - } - } else { - Optional path = ConvertString.toPath(this.queryUrlOrFile); - Optional url = ConvertString.toUrl(this.queryUrlOrFile); - Boolean isSparqlQuery = TestType.isSparqlQuery(this.queryUrlOrFile); - - if (isSparqlQuery) { - // if query is a SPARQL query - this.query = this.queryUrlOrFile; - } else if (url.isPresent()) { - // if query is a URL - this.query = SparqlQueryLoader.loadFromUrl(url.get(), this.spec, this.verbose); - } else if (path.isPresent()) { - // if query is a path - this.query = SparqlQueryLoader.loadFromFile(path.get(), this.spec, this.verbose); - } else { - throw new RuntimeException("The query is not a valid SPARQL query, a URL or a file path."); - } - } - } - /** * Send the SPARQL query to the endpoint. * @@ -191,7 +116,7 @@ public String sendRequest() throws Exception { SparqlHttpClient client = new SparqlHttpClient(this.endpoint_url); this.parseHeader(client); - client.setQueryMethod(this.requestMethod); + client.setRequestMethod(this.requestMethod); client.setVerbose(this.verbose); client.setMaxRedirection(this.maxRedirection); @@ -230,7 +155,7 @@ private void parseHeader(SparqlHttpClient client) { */ private void exportResult(String response) { - if (this.outputPathIsDefined) { + if (this.output != null) { // Write result to file try { Files.write(response.getBytes(StandardCharsets.UTF_8), this.output.toFile()); diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/Shacl.java b/corese-command/src/main/java/fr/inria/corese/command/programs/Shacl.java index a0487e0e44..ef045e25de 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/Shacl.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/Shacl.java @@ -1,234 +1,85 @@ package fr.inria.corese.command.programs; -import java.io.IOException; -import java.net.URL; -import java.nio.file.Path; -import java.util.Optional; -import java.util.concurrent.Callable; - import fr.inria.corese.command.App; -import fr.inria.corese.command.utils.ConfigManager; -import fr.inria.corese.command.utils.ConvertString; import fr.inria.corese.command.utils.TestType; -import fr.inria.corese.command.utils.format.EnumInputFormat; -import fr.inria.corese.command.utils.format.EnumOutputFormat; -import fr.inria.corese.command.utils.rdf.RdfDataExporter; -import fr.inria.corese.command.utils.rdf.RdfDataLoader; +import fr.inria.corese.command.utils.exporter.rdf.EnumRdfOutputFormat; +import fr.inria.corese.command.utils.exporter.rdf.RdfDataExporter; +import fr.inria.corese.command.utils.loader.rdf.EnumRdfInputFormat; +import fr.inria.corese.command.utils.loader.rdf.RdfDataLoader; import fr.inria.corese.core.Graph; -import fr.inria.corese.core.util.Property; -import fr.inria.corese.core.util.Property.Value; import picocli.CommandLine.Command; -import picocli.CommandLine.Model.CommandSpec; import picocli.CommandLine.Option; -import picocli.CommandLine.Spec; @Command(name = "shacl", version = App.version, description = "Run SHACL validation on a RDF dataset.", mixinStandardHelpOptions = true) -public class Shacl implements Callable { - - private final String DEFAULT_OUTPUT_FILE_NAME = "output"; - - @Spec - private CommandSpec spec; +public class Shacl extends AbstractInputCommand { @Option(names = { "-f", "-if", - "--input-format" }, description = "RDF serialization format of the input file. Possible values: ${COMPLETION-CANDIDATES}.") - private EnumInputFormat inputFormat = null; - - @Option(names = { "-i", - "--input-data" }, description = "Path or URL of the file that needs to be converted.", arity = "1...") - private String[] rdfData; + "--input-format" }, description = "Specifies the RDF serialization format of the input file. Possible values are: :@|fg(225) ${COMPLETION-CANDIDATES}|@.") + private EnumRdfInputFormat inputFormat = null; @Option(names = { "-a", "-sf", - "--shapes-format" }, description = "Serialization format of the SHACL shapes. Possible values: ${COMPLETION-CANDIDATES}.", defaultValue = "TURTLE") - private EnumInputFormat reportFormat = null; + "--shapes-format" }, description = "Specifies the serialization format of the SHACL shapes. Possible values are: :@|fg(225) ${COMPLETION-CANDIDATES}|@.)") + private EnumRdfInputFormat reportFormat = null; @Option(names = { "-s", - "--shapes" }, description = "Path or URL of the file containing the SHACL shapes.", arity = "1...", required = true) + "--shapes" }, description = "Specifies the path or URL of the file containing the SHACL shapes.", arity = "1...", required = true) private String[] shaclShapes; @Option(names = { "-r", "-of", - "--output-format" }, description = "Serialization format of the validation report. Possible values: ${COMPLETION-CANDIDATES}.", defaultValue = "TURTLE") - private EnumOutputFormat outputFormat = null; - - @Option(names = { "-o", - "--output-data" }, description = "Output file path. If not provided, the result will be written to standard output.", arity = "0..1", fallbackValue = DEFAULT_OUTPUT_FILE_NAME) - private Path output; - - @Option(names = { "-R", - "--recursive" }, description = "If an input is a directory, load all the files in the directory recursively.") - private boolean recursive = false; - - @Option(names = { "-v", "--verbose" }, description = "Prints more information about the execution of the command.") - private boolean verbose = false; - - @Option(names = { "-c", "--config", - "--init" }, description = "Path to a configuration file. If not provided, the default configuration file will be used.", required = false) - private Path configFilePath; - - @Option(names = { "-w", - "--no-owl-import" }, description = "Disables the automatic importation of ontologies specified in 'owl:imports' statements. When this flag is set, the application will not fetch and include referenced ontologies.", required = false, defaultValue = "false") - private boolean noOwlImport; - - private Graph dataGraph = Graph.create(); - private Graph shapesGraph = Graph.create(); - private Graph reportGraph = Graph.create(); - - private boolean outputFormatIsDefined = false; - private boolean isDefaultOutputName = false; + "--output-format" }, description = "Specifies the serialization format of the validation report. Possible values are: :@|fg(225) ${COMPLETION-CANDIDATES}|@. Default value: ${DEFAULT-VALUE}.", defaultValue = "TURTLE") + private EnumRdfOutputFormat outputFormat = null; - private final int ERROR_EXIT_CODE_SUCCESS = 0; - private final int ERROR_EXIT_CODE_ERROR = 1; - - @Override public Integer call() { - try { - // Load configuration file - Optional configFilePath = Optional.ofNullable(this.configFilePath); - if (configFilePath.isPresent()) { - ConfigManager.loadFromFile(configFilePath.get(), this.spec, this.verbose); - } else { - ConfigManager.loadDefaultConfig(this.spec, this.verbose); - } - - // Set owl import - Property.set(Value.DISABLE_OWL_AUTO_IMPORT, this.noOwlImport); - - // Check if output format is defined - this.outputFormatIsDefined = this.output != null; - - // Check if output file name is default - this.isDefaultOutputName = this.output != null - && DEFAULT_OUTPUT_FILE_NAME.equals(this.output.toString()); + super.call(); + try { // Load input file(s) - this.loadInputFile(false); + RdfDataLoader loader = new RdfDataLoader(this.spec, this.verbose); + Graph dataGraph = loader.load(this.inputsRdfData, this.inputFormat, this.recursive); // Load shapes file(s) - this.loadInputFile(true); + Graph shapesGraph = loader.load(this.shaclShapes, this.reportFormat, this.recursive); // Check if shapes graph contains SHACL shapes - if (!TestType.isShacl(this.shapesGraph)) { + if (!TestType.isShacl(shapesGraph)) { throw new IllegalArgumentException("No SHACL shapes found in the input file(s)."); } // Evaluation of SHACL shapes - this.execute(); + Graph reportGraph = this.evaluateSHACLShapes(dataGraph, shapesGraph); // Export the report graph - this.exportGraph(); + RdfDataExporter rdfExporter = new RdfDataExporter(this.spec, this.verbose, this.output); + rdfExporter.export(reportGraph, this.outputFormat); return this.ERROR_EXIT_CODE_SUCCESS; } catch (Exception e) { - this.spec.commandLine().getErr().println("\u001B[31mError: " + e.getMessage() + "\u001B[0m"); + this.spec.commandLine().getErr().println("Error: " + e.getMessage()); return this.ERROR_EXIT_CODE_ERROR; } } /** - * Load the input file(s) into a graph. - * - * @throws IOException If the file cannot be read. + * Evaluate SHACL shapes. + * + * @param dataGraph The data graph. + * @param shapesGraph The shapes graph. + * @return The report graph. + * @throws Exception If an error occurs while evaluating SHACL shapes. */ - private void loadInputFile(boolean isShapes) throws IOException { - - if (rdfData == null && !isShapes) { - // if input is not provided, load from standard input - RdfDataLoader.LoadFromStdin( - this.inputFormat, - isShapes ? this.shapesGraph : this.dataGraph, - this.spec, - this.verbose); - } else { - for (String input : isShapes ? shaclShapes : rdfData) { - Optional url = ConvertString.toUrl(input); - Optional path = ConvertString.toPath(input); - - if (url.isPresent()) { - RdfDataLoader.loadFromURL( - url.get(), - isShapes ? this.reportFormat : this.inputFormat, - isShapes ? this.shapesGraph : this.dataGraph, - this.spec, - this.verbose); - } else if (path.isPresent()) { - // if input is a path - if (path.get().toFile().isDirectory()) { - // if input is a directory - RdfDataLoader.loadFromDirectory( - path.get(), - this.inputFormat, - isShapes ? this.shapesGraph : this.dataGraph, - this.recursive, - this.spec, - this.verbose); - } else { - // if input is a file - RdfDataLoader.loadFromFile( - path.get(), - isShapes ? this.reportFormat : this.inputFormat, - isShapes ? this.shapesGraph : this.dataGraph, - this.spec, - this.verbose); - } - - } else { - throw new IllegalArgumentException( - "Input path is not a valid URL, file path or directory: " + input); - } - } + private Graph evaluateSHACLShapes(Graph dataGraph, Graph shapesGraph) throws Exception { + + if (this.verbose) { + this.spec.commandLine().getErr().println("Evaluating SHACL shapes..."); } - } - /** - * Execute the SHACL validation. - * - * @throws Exception If an error occurs while evaluating the SHACL shapes. - */ - private void execute() throws Exception { fr.inria.corese.core.shacl.Shacl shacl = new fr.inria.corese.core.shacl.Shacl(dataGraph, shapesGraph); try { - - if (this.verbose) { - this.spec.commandLine().getErr().println("Evaluating SHACL shapes..."); - } - - this.reportGraph = shacl.eval(); + return shacl.eval(); } catch (Exception e) { throw new Exception("Error while evaluating SHACL shapes: " + e.getMessage(), e); } } - /** - * Export the report graph. - * - * @throws IOException if an I/O error occurs while exporting the report graph. - */ - private void exportGraph() throws IOException { - - if (this.verbose) { - this.spec.commandLine().getOut().println("Exporting report to " + this.outputFormat + " format..."); - } - - Path outputFileName; - - // Set output file name - if (this.outputFormatIsDefined && !this.isDefaultOutputName) { - outputFileName = this.output; - } else { - outputFileName = Path.of(this.DEFAULT_OUTPUT_FILE_NAME + "." + this.outputFormat.getExtention()); - } - - // Export the graph - if (this.output == null) { - RdfDataExporter.exportToStdout(this.outputFormat, this.reportGraph, this.spec, this.verbose); - } else { - RdfDataExporter.exportToFile( - outputFileName, - this.outputFormat, - this.reportGraph, - this.spec, - this.verbose); - } - } } diff --git a/corese-command/src/main/java/fr/inria/corese/command/programs/Sparql.java b/corese-command/src/main/java/fr/inria/corese/command/programs/Sparql.java index 896a829890..13bafcab33 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/programs/Sparql.java +++ b/corese-command/src/main/java/fr/inria/corese/command/programs/Sparql.java @@ -1,310 +1,79 @@ package fr.inria.corese.command.programs; -import java.io.IOException; -import java.net.URL; -import java.nio.file.Path; -import java.util.Optional; -import java.util.concurrent.Callable; - import fr.inria.corese.command.App; -import fr.inria.corese.command.utils.ConfigManager; -import fr.inria.corese.command.utils.ConvertString; -import fr.inria.corese.command.utils.TestType; -import fr.inria.corese.command.utils.format.EnumInputFormat; -import fr.inria.corese.command.utils.format.EnumOutputFormat; -import fr.inria.corese.command.utils.format.EnumResultFormat; -import fr.inria.corese.command.utils.rdf.RdfDataExporter; -import fr.inria.corese.command.utils.rdf.RdfDataLoader; -import fr.inria.corese.command.utils.sparql.SparqlQueryLoader; -import fr.inria.corese.command.utils.sparql.SparqlResultExporter; +import fr.inria.corese.command.utils.exporter.sparql.EnumResultFormat; +import fr.inria.corese.command.utils.exporter.sparql.SparqlResultExporter; +import fr.inria.corese.command.utils.loader.rdf.EnumRdfInputFormat; +import fr.inria.corese.command.utils.loader.rdf.RdfDataLoader; +import fr.inria.corese.command.utils.loader.sparql.SparqlQueryLoader; import fr.inria.corese.core.Graph; import fr.inria.corese.core.query.QueryProcess; -import fr.inria.corese.core.util.Property; -import fr.inria.corese.core.util.Property.Value; import fr.inria.corese.kgram.core.Mappings; -import fr.inria.corese.sparql.triple.parser.ASTQuery; import picocli.CommandLine.Command; -import picocli.CommandLine.Model.CommandSpec; import picocli.CommandLine.Option; -import picocli.CommandLine.Spec; @Command(name = "sparql", version = App.version, description = "Run a SPARQL query.", mixinStandardHelpOptions = true) -public class Sparql implements Callable { - - private final String ERROR_OUTPUT_FORMAT_CONSTRUCT_REQUEST = "Error: %s is not a valid output format for insert, delete, describe or construct requests. Use one of the following RDF formats: \"rdfxml\", \"turtle\", \"jsonld\", \"trig\", \"jsonld\"."; - private final String ERROR_OUTPUT_FORMAT_SELECT_REQUEST = "Error: %s is not a valid output format for select or ask requests. Use one of the following result formats: \"xml\", \"json\", \"csv\", \"tsv\", \"md\"."; - private final int ERROR_EXIT_CODE_SUCCESS = 0; - private final int ERROR_EXIT_CODE_ERROR = 1; - private final String DEFAULT_OUTPUT_FILE_NAME = "output"; - - @Spec - private CommandSpec spec; +public class Sparql extends AbstractInputCommand { @Option(names = { "-f", "-if", - "--input-format" }, description = "RDF serialization format of the input file. Possible values: ${COMPLETION-CANDIDATES}.") - private EnumInputFormat inputFormat = null; - - @Option(names = { "-i", - "--input-data" }, description = "Path or URL of the input file. If not provided, the standard input will be used.", arity = "1...") - private String[] inputs; + "--input-format" }, description = "Specifies the RDF serialization format of the input file. Possible values are: :@|fg(225) ${COMPLETION-CANDIDATES}|@.") + private EnumRdfInputFormat inputFormat = null; @Option(names = { "-r", "-of", - "--result-format" }, description = "Result fileformat. Possible values: ${COMPLETION-CANDIDATES}. ") + "--result-format" }, description = "Specifies the format of the result file. Possible values are: :@|fg(225) ${COMPLETION-CANDIDATES}|@.") private EnumResultFormat resultFormat = null; - @Option(names = { "-o", - "--output-data" }, description = "Output file path. If not provided, the result will be written to standard output.", arity = "0..1", fallbackValue = DEFAULT_OUTPUT_FILE_NAME) - private Path output; - @Option(names = { "-q", - "--query" }, description = "SPARQL query string or path/URL to a .rq file.", required = true) + "--query" }, description = "Specifies the SPARQL query string or the path/URL to a .rq file containing the query.", required = true) private String queryUrlOrFile; - @Option(names = { "-R", - "--recursive" }, description = "Load all files in the input directory recursively.", required = false, defaultValue = "false") - private boolean recursive; - - @Option(names = { "-v", - "--verbose" }, description = "Prints more information about the execution of the command..", required = false, defaultValue = "false") - private boolean verbose; - - @Option(names = { "-c", - "--config", - "--init" }, description = "Path to a configuration file. If not provided, the default configuration file will be used.", required = false) - private Path configFilePath; - - @Option(names = { "-w", - "--no-owl-import" }, description = "Disables the automatic importation of ontologies specified in 'owl:imports' statements. When this flag is set, the application will not fetch and include referenced ontologies.", required = false, defaultValue = "false") - private boolean noOwlImport; - - private String query; - - private Graph graph = Graph.create(); - - private boolean resultFormatIsDefined = false; - private boolean outputPathIsDefined = false; - private boolean isDefaultOutputName = false; - - private EnumResultFormat defaultRdfBidings = EnumResultFormat.TURTLE; - private EnumResultFormat defaultResult = EnumResultFormat.BIDING_MD; - public Sparql() { } @Override public Integer call() { + super.call(); + try { - // Load configuration file - Optional configFilePath = Optional.ofNullable(this.configFilePath); - if (configFilePath.isPresent()) { - ConfigManager.loadFromFile(configFilePath.get(), this.spec, this.verbose); - } else { - ConfigManager.loadDefaultConfig(this.spec, this.verbose); - } - // Set owl import - Property.set(Value.DISABLE_OWL_AUTO_IMPORT, this.noOwlImport); + // Load the input file(s) + RdfDataLoader loader = new RdfDataLoader(this.spec, this.verbose); + Graph graph = loader.load(this.inputsRdfData, this.inputFormat, this.recursive); + + // Load the query + SparqlQueryLoader queryLoader = new SparqlQueryLoader(this.spec, this.verbose); + String query = queryLoader.load(this.queryUrlOrFile); - this.resultFormatIsDefined = this.resultFormat != null; - this.outputPathIsDefined = this.output != null; - this.isDefaultOutputName = this.output == null - || this.DEFAULT_OUTPUT_FILE_NAME.equals(this.output.toString()); + // Execute the query + Mappings mappings = this.execute(graph, query); - this.loadInputFile(); - this.loadQuery(); - this.executeAndPrint(); + // Export the result + SparqlResultExporter exporter = new SparqlResultExporter(this.spec, this.verbose, this.output); + exporter.export(mappings, graph, this.resultFormat); return this.ERROR_EXIT_CODE_SUCCESS; } catch (Exception e) { - this.spec.commandLine().getErr().println("\u001B[31mError: " + e.getMessage() + "\u001B[0m"); + this.spec.commandLine().getErr().println("Error: " + e.getMessage()); return this.ERROR_EXIT_CODE_ERROR; } } - /** - * Load the input file(s) into a graph. - * - * @throws IOException If the file cannot be read. - */ - private void loadInputFile() throws IOException { - - if (this.inputs == null) { - // If inputs is not provided, load from stdin - RdfDataLoader.LoadFromStdin( - this.inputFormat, - this.graph, - this.spec, - this.verbose); - } else { - for (String input : this.inputs) { - Optional path = ConvertString.toPath(input); - Optional url = ConvertString.toUrl(input); - - if (url.isPresent()) { - // if input is a URL - RdfDataLoader.loadFromURL( - url.get(), - this.inputFormat, - this.graph, - this.spec, - this.verbose); - } else if (path.isPresent()) { - // if input is a path - if (path.get().toFile().isDirectory()) { - // if input is a directory - RdfDataLoader.loadFromDirectory( - path.get(), - this.inputFormat, - this.graph, - this.recursive, - this.spec, - this.verbose); - } else { - // if input is a file - RdfDataLoader.loadFromFile( - path.get(), - this.inputFormat, - this.graph, - this.spec, - this.verbose); - } - } else { - throw new IllegalArgumentException( - "Input path is not a valid URL, file path or directory: " + input); - } - } - } - } - - /** - * Load the query from the query string or from the query file. - * - * @throws IOException If the query file cannot be read. - */ - private void loadQuery() throws IOException { - Optional path = ConvertString.toPath(this.queryUrlOrFile); - Optional url = ConvertString.toUrl(this.queryUrlOrFile); - Boolean isSparqlQuery = TestType.isSparqlQuery(this.queryUrlOrFile); - - if (isSparqlQuery) { - // if query is a SPARQL query - this.query = this.queryUrlOrFile; - } else if (url.isPresent()) { - // if query is a URL - this.query = SparqlQueryLoader.loadFromUrl(url.get(), this.spec, this.verbose); - } else if (path.isPresent()) { - // if query is a path - this.query = SparqlQueryLoader.loadFromFile(path.get(), this.spec, this.verbose); - } - } - - /** - * Execute the query and print or write the results. - * - * @throws Exception If the query cannot be executed. - */ - private void executeAndPrint() throws Exception { + private Mappings execute(Graph graph, String query) throws Exception { QueryProcess exec = QueryProcess.create(graph); // Execute query try { if (this.verbose) { - this.spec.commandLine().getErr().println("Query: " + this.query); + this.spec.commandLine().getErr().println("Query: " + query); this.spec.commandLine().getErr().println("Executing query..."); } - ASTQuery ast = exec.ast(this.query); - Mappings map = exec.query(ast); - - // Print or write results - exportResult(ast, map); + return exec.query(query); } catch (Exception e) { throw new Exception("Error when executing SPARQL query : " + e.getMessage(), e); } } - /** - * Export the results to the output file or to the standard output. - * - * @param ast – AST of the query - * @param map – Mappings of the query - * @throws IOException If the output file cannot be written. - */ - private void exportResult(ASTQuery ast, Mappings map) throws IOException { - Path outputFileName; - - boolean isUpdate = ast.isSPARQLUpdate(); - boolean isConstruct = ast.isConstruct(); - boolean isAsk = ast.isAsk(); - boolean isSelect = ast.isSelect(); - - // Set default output and result formats if not set - if (!this.resultFormatIsDefined) { - if (isUpdate || isConstruct) { - this.resultFormat = this.defaultRdfBidings; - } else { - this.resultFormat = this.defaultResult; - } - } - - // Check if the output format is valid for the query type - if ((isUpdate || isConstruct) && !this.resultFormat.isRDFFormat()) { - throw new IllegalArgumentException(String.format(ERROR_OUTPUT_FORMAT_CONSTRUCT_REQUEST, resultFormat)); - } - - if ((isAsk || isSelect) && this.resultFormat.isRDFFormat()) { - throw new IllegalArgumentException(String.format(ERROR_OUTPUT_FORMAT_SELECT_REQUEST, resultFormat)); - } - - // Set output file name - if (this.outputPathIsDefined && !this.isDefaultOutputName) { - outputFileName = this.output; - } else { - outputFileName = Path.of(DEFAULT_OUTPUT_FILE_NAME + "." + this.resultFormat.getExtention()); - } - - // Export results - if (isUpdate) { - EnumOutputFormat outputFormat = this.resultFormat.convertToOutputFormat(); - - if (this.outputPathIsDefined) { - RdfDataExporter.exportToFile( - outputFileName, - outputFormat, - this.graph, - this.spec, - this.verbose); - } else { - // if no output format is defined if print results to stdout - // then print true if the update was successful or false - // otherwise - if (!resultFormatIsDefined) { - this.spec.commandLine().getOut().println(!map.isEmpty()); - this.spec.commandLine().getErr() - .println( - "Precise result format with --resultFormat option to get the result in standard output."); - } else { - RdfDataExporter.exportToStdout(outputFormat, this.graph, this.spec, this.verbose); - } - } - } else { - if (this.outputPathIsDefined) { - SparqlResultExporter.exportToFile( - outputFileName, - this.resultFormat, - map, - this.spec, - this.verbose); - } else { - SparqlResultExporter.exportToStdout( - this.resultFormat, - map, - this.spec, - this.verbose); - } - } - } - } diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/AbstractExporter.java b/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/AbstractExporter.java new file mode 100644 index 0000000000..4a519ac29e --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/AbstractExporter.java @@ -0,0 +1,176 @@ + +package fr.inria.corese.command.utils.exporter; + +import java.nio.file.Path; + +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.print.ResultFormat; +import fr.inria.corese.kgram.core.Mappings; +import fr.inria.corese.sparql.triple.function.extension.ResultFormater; +import picocli.CommandLine.Model.CommandSpec; + +/** + * Utility class to export SPARQL query results and RDF graphs. + */ +public abstract class AbstractExporter { + + public static final String DEFAULT_OUTPUT = "./output"; + private final Path DEFAULT_OUTPUT_PATH = Path.of(DEFAULT_OUTPUT); + + // Command specification + private CommandSpec spec; + private boolean verbose; + + // Output + protected Path output; + protected boolean outputIsDefined; + protected boolean needToAppendExtension; + private boolean outputToFileIsDefault; + + ///////////////// + // Constructor // + ///////////////// + + /** + * Constructor. + * + * @param spec Command specification. + * @param verbose If true, print information about the exported file. + * @param output Output file path. If not provided, the result will be written + * to standard output. + */ + public AbstractExporter(CommandSpec spec, boolean verbose, Path output) throws IllegalArgumentException { + // Command specification + this.spec = spec; + this.verbose = verbose; + + // Output + this.outputIsDefined = output != null; + this.outputToFileIsDefault = outputIsDefined && DEFAULT_OUTPUT_PATH.equals(this.output); + this.output = outputToFileIsDefault ? DEFAULT_OUTPUT_PATH : output; + this.needToAppendExtension = outputIsDefined && !hasExtension(this.output); + } + + /////////////////////// + // Protected methods // + /////////////////////// + + /** + * Export the result to a file. + * + * @param path Path of the file to export to. + * @param FormatCode Corese code of the format. + * @param formatName Name of the format. + * @param graph Graph to export. + */ + protected void exportToFile(Path path, int FormatCode, String formatName, Graph graph) { + ResultFormat resultFormater = ResultFormat.create(graph); + exportToFile(path, FormatCode, formatName, resultFormater); + } + + /** + * Export the result to standard output. + * + * @param FormatCode Corese code of the format. + * @param formatName Name of the format. + * @param graph Graph to export. + */ + protected void exportToStdout(int FormatCode, String formatName, Graph graph) { + ResultFormat resultFormater = ResultFormat.create(graph); + exportToStdout(FormatCode, formatName, resultFormater); + } + + /** + * Export the result to a file. + * + * @param path Path of the file to export to. + * @param FormatCode Corese code of the format. + * @param formatName Name of the format. + * @param mappings Mappings to export. + */ + protected void exportToFile(Path path, int FormatCode, String formatName, Mappings mappings) { + ResultFormat resultFormater = ResultFormat.create(mappings); + exportToFile(path, FormatCode, formatName, resultFormater); + } + + /** + * Export the result to standard output. + * + * @param FormatCode Corese code of the format. + * @param formatName Name of the format. + * @param mappings Mappings to export. + */ + protected void exportToStdout(int FormatCode, String formatName, Mappings mappings) { + ResultFormat resultFormater = ResultFormat.create(mappings); + exportToStdout(FormatCode, formatName, resultFormater); + } + + ///////////////////// + // Private methods // + ///////////////////// + + /** + * Export the result to a file. + * + * @param path Path of the file to export to. + * @param FormatCode Corese code of the format. + * @param formatName Name of the format. + * @param ResultFormat Result formater. + */ + private void exportToFile(Path path, int FormatCode, String formatName, ResultFormat resultFormater) { + + resultFormater.setSelectFormat(FormatCode); + resultFormater.setConstructFormat(FormatCode); + + try { + resultFormater.write(path.toString()); + } catch (Exception e) { + throw new IllegalArgumentException("Failed to open export file: " + path.toString(), e); + } + + if (this.verbose) { + this.spec.commandLine().getErr() + .println("Exported result in file: " + path.toString() + " with format: " + formatName); + + } + } + + /** + * Export the result to standard output. + * + * @param FormatCode Corese code of the format. + * @param formatName Name of the format. + * @param ResultFormat Result formater. + */ + private void exportToStdout(int FormatCode, String formatName, ResultFormat resultFormater) { + + // Configure the result formater + resultFormater.setSelectFormat(FormatCode); + resultFormater.setConstructFormat(FormatCode); + + // Write the result to standard output + try { + String str = resultFormater.toString(); + spec.commandLine().getOut().println(str); + } catch (Exception e) { + throw new IllegalArgumentException("Failed to write to standard output", e); + } + + // Print information about the exported file + if (verbose) { + spec.commandLine().getErr().println("Exported result to standard output with format: " + formatName); + } + } + + /** + * Determine if the given path has an extension. + * + * @param path Path to check. + * @return True if the path has an extension, false otherwise. + */ + private boolean hasExtension(Path path) { + String fileName = path.getFileName().toString(); + int dotIndex = fileName.lastIndexOf('.'); + return dotIndex > 0 && dotIndex < fileName.length() - 1; + } +} \ No newline at end of file diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/rdf/EnumCanonicAlgo.java b/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/rdf/EnumCanonicAlgo.java new file mode 100644 index 0000000000..56696d34ac --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/rdf/EnumCanonicAlgo.java @@ -0,0 +1,59 @@ +package fr.inria.corese.command.utils.exporter.rdf; + +import fr.inria.corese.sparql.api.ResultFormatDef; + +/** + * Enumeration of canonic algorithms. + */ +public enum EnumCanonicAlgo { + + // Rdfc-1.0-sha256 + RDFC_1_0("rdfc-1.0", ResultFormatDef.RDFC10_FORMAT, "nq"), + RDFC_1_0_SHA256("rdfc-1.0-sha256", ResultFormatDef.RDFC10_FORMAT, "nq"), + + // Rdfc-1.0-sha384 + RDFC_1_0_SHA384("rdfc-1.0-sha384", ResultFormatDef.RDFC10_SHA384_FORMAT, "nq"); + + private final String name; + private final int coreseCodeFormat; + private final String extention; + + /** + * Constructor. + * + * @param name The name of the canonic algorithm. + * @param coreseCodeFormat The Corese code of the format. + * @param extention The extension of the file format associated with the + * canonic algorithm. + */ + private EnumCanonicAlgo(String name, int coreseCodeFormat, String extention) { + this.name = name; + this.coreseCodeFormat = coreseCodeFormat; + this.extention = extention; + } + + /** + * Get the Corese code of the format. + * + * @return The Corese code of the format. + */ + public int getCoreseCodeFormat() { + return this.coreseCodeFormat; + } + + /** + * Get the extension of the file format associated with the canonic algorithm. + * + * @return The extension of the file format associated with the canonic + * algorithm. + */ + public String getExtention() { + return this.extention; + } + + @Override + public String toString() { + return this.name; + } + +} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/rdf/EnumRdfOutputFormat.java b/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/rdf/EnumRdfOutputFormat.java new file mode 100644 index 0000000000..b2798afb8f --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/rdf/EnumRdfOutputFormat.java @@ -0,0 +1,84 @@ +package fr.inria.corese.command.utils.exporter.rdf; + +import fr.inria.corese.sparql.api.ResultFormatDef; + +/** + * Enumeration of output RDF serialization formats. + */ +public enum EnumRdfOutputFormat { + // RdfXml + RDFXML("rdfxml", ResultFormatDef.RDF_XML_FORMAT, "rdf"), + APPLICATION_RDF_XML("application/rdf+xml", ResultFormatDef.RDF_XML_FORMAT, "rdf"), + RDF("rdf", ResultFormatDef.RDF_XML_FORMAT, "rdf"), + + // Turtle + TURTLE("turtle", ResultFormatDef.TURTLE_FORMAT, "ttl"), + TEXT_TURTLE("text/turtle", ResultFormatDef.TURTLE_FORMAT, "ttl"), + TTL("ttl", ResultFormatDef.TURTLE_FORMAT, "ttl"), + + // Trig + TRIG("trig", ResultFormatDef.TRIG_FORMAT, "trig"), + APPLICATION_TRIG("application/trig", ResultFormatDef.TRIG_FORMAT, "trig"), + + // JsonLd + JSONLD("jsonld", ResultFormatDef.JSONLD_FORMAT, "jsonld"), + APPLICATION_LD_JSON("application/ld+json", ResultFormatDef.JSONLD_FORMAT, "jsonld"), + + // Ntriples + NTRIPLES("ntriples", ResultFormatDef.NTRIPLES_FORMAT, "nt"), + APPLICATION_N_TRIPLES("application/n-triples", ResultFormatDef.NTRIPLES_FORMAT, "nt"), + NT("nt", ResultFormatDef.NTRIPLES_FORMAT, "nt"), + + // Nquads + NQUADS("nquads", ResultFormatDef.NQUADS_FORMAT, "nq"), + APPLICATION_N_QUADS("application/n-quads", ResultFormatDef.NQUADS_FORMAT, "nq"), + NQ("nq", ResultFormatDef.NQUADS_FORMAT, "nq"), + + // Rdfc-1.0-sha256 + RDFC10("rdfc-1.0", ResultFormatDef.RDFC10_FORMAT, "nq"), + RDFC10SHA256("rdfc-1.0-sha256", ResultFormatDef.RDFC10_FORMAT, "nq"), + + // Rdfc-1.0-sha384 + RDFC10SHA384("rdfc-1.0-sha384", ResultFormatDef.RDFC10_SHA384_FORMAT, "nq"); + + private final String name; + private final int coreseCodeFormat; + private final String extention; + + /** + * Constructor. + * + * @param name The name of the format. + * @param coreseCodeFormat The Corese code of the format. + * @param extention The extension file for the format. + */ + private EnumRdfOutputFormat(String name, int coreseCodeFormat, String extention) { + this.name = name; + this.coreseCodeFormat = coreseCodeFormat; + this.extention = extention; + } + + /** + * Get the Corese code of the format. + * + * @return The Corese code of the format. + */ + public int getCoreseCodeFormat() { + return this.coreseCodeFormat; + } + + /** + * Get the extension of the file format associated with the format. + * + * @return The extension of the file format associated with the format. + */ + public String getExtention() { + return this.extention; + } + + @Override + public String toString() { + return this.name; + } + +} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/rdf/RdfDataCanonicalizer.java b/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/rdf/RdfDataCanonicalizer.java new file mode 100644 index 0000000000..aa23f48434 --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/rdf/RdfDataCanonicalizer.java @@ -0,0 +1,51 @@ + +package fr.inria.corese.command.utils.exporter.rdf; + +import java.nio.file.Path; + +import fr.inria.corese.command.utils.exporter.AbstractExporter; +import fr.inria.corese.core.Graph; +import picocli.CommandLine.Model.CommandSpec; + +/** + * Utility class to canonicalize RDF graphs. + */ +public class RdfDataCanonicalizer extends AbstractExporter { + + ///////////////// + // Constructor // + ///////////////// + + /** + * Constructor. + * + * @param spec Command specification. + * @param verbose If true, print information about the exported file. + * @param output Output file path. If not provided, the result will be written + * to standard output. + */ + public RdfDataCanonicalizer(CommandSpec spec, boolean verbose, Path output) { + super(spec, verbose, output); + } + + //////////////////// + // Public methods // + //////////////////// + + /** + * Canonicalize an RDF graph to a file or standard output. + * + * @param format Serialization format. + * @param graph RDF graph to export. + */ + public void export(Graph graph, EnumCanonicAlgo format) { + + if (this.outputIsDefined) { + Path path = this.needToAppendExtension ? Path.of(this.output + format.getExtention()) : this.output; + exportToFile(path, format.getCoreseCodeFormat(), format.toString(), graph); + } else { + exportToStdout(format.getCoreseCodeFormat(), format.toString(), graph); + } + } + +} \ No newline at end of file diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/rdf/RdfDataExporter.java b/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/rdf/RdfDataExporter.java new file mode 100644 index 0000000000..3890600caa --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/rdf/RdfDataExporter.java @@ -0,0 +1,51 @@ + +package fr.inria.corese.command.utils.exporter.rdf; + +import java.nio.file.Path; + +import fr.inria.corese.command.utils.exporter.AbstractExporter; +import fr.inria.corese.core.Graph; +import picocli.CommandLine.Model.CommandSpec; + +/** + * Utility class to export RDF graphs. + */ +public class RdfDataExporter extends AbstractExporter { + + ///////////////// + // Constructor // + ///////////////// + + /** + * Constructor. + * + * @param spec Command specification. + * @param verbose If true, print information about the exported file. + * @param output Output file path. If not provided, the result will be written + * to standard output. + */ + public RdfDataExporter(CommandSpec spec, boolean verbose, Path output) { + super(spec, verbose, output); + } + + //////////////////// + // Public methods // + //////////////////// + + /** + * Export an RDF graph to a file or standard output. + * + * @param graph RDF graph to export. + * @param format Serialization format. + */ + public void export(Graph graph, EnumRdfOutputFormat format) { + + if (this.outputIsDefined) { + Path path = this.needToAppendExtension ? Path.of(this.output + format.getExtention()) : this.output; + exportToFile(path, format.getCoreseCodeFormat(), format.toString(), graph); + } else { + exportToStdout(format.getCoreseCodeFormat(), format.toString(), graph); + } + } + +} \ No newline at end of file diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/sparql/EnumResultFormat.java b/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/sparql/EnumResultFormat.java new file mode 100644 index 0000000000..19564f7b00 --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/sparql/EnumResultFormat.java @@ -0,0 +1,175 @@ +package fr.inria.corese.command.utils.exporter.sparql; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Predicate; + +import fr.inria.corese.sparql.api.ResultFormatDef; + +/** + * Enumeration of result serialization formats. + */ +public enum EnumResultFormat { + + ///////////////// + // RDF Formats // + ///////////////// + + // RdfXml + RDFXML("rdfxml", ResultFormatDef.RDF_XML_FORMAT, "rdf", false), + APPLICATION_RDF_XML("application/rdf+xml", ResultFormatDef.RDF_XML_FORMAT, "rdf", false), + RDF("rdf", ResultFormatDef.RDF_XML_FORMAT, "rdf", false), + + // Turtle + TURTLE("turtle", ResultFormatDef.TURTLE_FORMAT, "ttl", false), + TEXT_TURTLE("text/turtle", ResultFormatDef.TURTLE_FORMAT, "ttl", false), + TTL("ttl", ResultFormatDef.TURTLE_FORMAT, "ttl", false), + + // Trig + TRIG("trig", ResultFormatDef.TRIG_FORMAT, "trig", false), + APPLICATION_TRIG("application/trig", ResultFormatDef.TRIG_FORMAT, "trig", false), + + // JsonLd + JSONLD("jsonld", ResultFormatDef.JSONLD_FORMAT, "jsonld", false), + APPLICATION_LD_JSON("application/ld+json", ResultFormatDef.JSONLD_FORMAT, "jsonld", false), + + // Ntriples + NTRIPLES("ntriples", ResultFormatDef.NTRIPLES_FORMAT, "nt", false), + APPLICATION_N_TRIPLES("application/n-triples", ResultFormatDef.NTRIPLES_FORMAT, "nt", false), + NT("nt", ResultFormatDef.NTRIPLES_FORMAT, "nt", false), + + // Nquads + NQUADS("nquads", ResultFormatDef.NQUADS_FORMAT, "nq", false), + APPLICATION_N_QUADS("application/n-quads", ResultFormatDef.NQUADS_FORMAT, "nq", false), + NQ("nq", ResultFormatDef.NQUADS_FORMAT, "nq", false), + + // Rdfc-1.0-sha256 + RDFC10("rdfc-1.0", ResultFormatDef.RDFC10_FORMAT, "nq", false), + RDFC10SHA256("rdfc-1.0-sha256", ResultFormatDef.RDFC10_FORMAT, "nq", false), + + // Rdfc-1.0-sha384 + RDFC10SHA384("rdfc-1.0-sha384", ResultFormatDef.RDFC10_SHA384_FORMAT, "nq", false), + + ///////////////////// + // Mapping Formats // + ///////////////////// + + // Xml + XML("xml", ResultFormatDef.XML_FORMAT, "srx", true), + APPLICATION_SPARQL_RESULTS_XML("application/sparql-results+xml", ResultFormatDef.XML_FORMAT, "srx", true), + SRX("srx", ResultFormatDef.XML_FORMAT, "srx", true), + + // Json + JSON("json", ResultFormatDef.JSON_FORMAT, "srj", true), + APPLICATION_SPARQL_RESULTS_JSON("application/sparql-results+json", ResultFormatDef.JSON_FORMAT, "srj", true), + SRJ("srj", ResultFormatDef.JSON_FORMAT, "srj", true), + + // Csv + CSV("csv", ResultFormatDef.CSV_FORMAT, "csv", true), + TEXT_CSV("text/csv", ResultFormatDef.CSV_FORMAT, "csv", true), + + // Tsv + TSV("tsv", ResultFormatDef.TSV_FORMAT, "tsv", true), + TEXT_TAB_SEPARATED_VALUES("text/tab-separated-values", ResultFormatDef.TSV_FORMAT, "tsv", true), + + // Markdown + MARKDOWN("markdown", ResultFormatDef.MARKDOWN_FORMAT, "md", true), + TEXT_MARKDOWN("text/markdown", ResultFormatDef.MARKDOWN_FORMAT, "md", true), + MD("md", ResultFormatDef.MARKDOWN_FORMAT, "md", true); + + private final String name; + private final int coreseCodeFormat; + private final String extention; + private final boolean isMappingFormat; + + /** + * Constructor. + * + * @param name The name of the format. + * @param coreseCodeFormat The Corese code of the format. + * @param extention The extension of the format. + * @param isMappingFormat True if the format is a mapping format, false + * otherwise. + */ + private EnumResultFormat(String name, int coreseCodeFormat, String extention, boolean isMappingFormat) { + this.name = name; + this.coreseCodeFormat = coreseCodeFormat; + this.extention = extention; + this.isMappingFormat = isMappingFormat; + } + + /** + * Get the Corese code of the format. + * + * @return The Corese code of the format. + */ + public int getCoreseCodeFormat() { + return this.coreseCodeFormat; + } + + /** + * Get the extension of the file format associated with the format. + * + * @return The extension of the file format associated with the format. + */ + public String getExtention() { + return this.extention; + } + + /** + * Check if the format is a mapping format. + * + * @return True if the format is a mapping format, false otherwise. + */ + public boolean isMappingFormat() { + return this.isMappingFormat; + } + + /** + * Check if the format is an RDF graph format. + * + * @return True if the format is an RDF graph format, false otherwise. + */ + public boolean isRdfGraphFormat() { + return !this.isMappingFormat; + } + + /** + * Filter the formats based on the given predicate. + * + * @param predicate Predicate to filter the formats. + * @return List of formats that satisfy the predicate. + */ + private static List filterFormats(Predicate predicate) { + List formatsList = new ArrayList<>(); + for (EnumResultFormat format : EnumResultFormat.values()) { + if (predicate.test(format)) { + formatsList.add(format); + } + } + return formatsList; + } + + /** + * Get the list of RDF graph formats. + * + * @return List of RDF graph formats. + */ + public static List getRdfFormats() { + return filterFormats(EnumResultFormat::isRdfGraphFormat); + } + + /** + * Get the list of mapping formats. + * + * @return List of mapping formats. + */ + public static List getMappingFormats() { + return filterFormats(EnumResultFormat::isMappingFormat); + } + + @Override + public String toString() { + return this.name; + } +} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/sparql/SparqlResultExporter.java b/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/sparql/SparqlResultExporter.java new file mode 100644 index 0000000000..1a4e9f2938 --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/exporter/sparql/SparqlResultExporter.java @@ -0,0 +1,101 @@ + +package fr.inria.corese.command.utils.exporter.sparql; + +import java.nio.file.Path; + +import fr.inria.corese.command.utils.exporter.AbstractExporter; +import fr.inria.corese.core.Graph; +import fr.inria.corese.kgram.core.Mappings; +import fr.inria.corese.sparql.triple.parser.ASTQuery; +import picocli.CommandLine.Model.CommandSpec; + +/** + * Utility class to export SPARQL query results and SPARQL graphs results. + */ +public class SparqlResultExporter extends AbstractExporter { + + // Default output + private static final EnumResultFormat DEFAULT_GRAPH_OUTPUT = EnumResultFormat.TURTLE; + private static final EnumResultFormat DEFAULT_MAPPING_OUTPUT = EnumResultFormat.MARKDOWN; + + ///////////////// + // Constructor // + ///////////////// + + /** + * Constructor. + * + * @param spec Command specification. + * @param verbose If true, print information about the exported file. + * @param output Output file path. If not provided, the result will be written + * to standard output. + */ + public SparqlResultExporter(CommandSpec spec, boolean verbose, Path output) { + super(spec, verbose, output); + } + + //////////////////// + // Public methods // + //////////////////// + + /** + * Export a SPARQL query result to a file or standard output. + * + * @param format Serialization format. + * @param graph SPARQL graph to export. + * @param map SPARQL query result to export. + */ + public void export(Mappings map, Graph graph, EnumResultFormat format) { + + ASTQuery ast = map.getAST(); + + boolean isUpdate = ast.isUpdate(); + boolean isConstruct = ast.isConstruct(); + boolean isAsk = ast.isAsk(); + boolean isSelect = ast.isSelect(); + boolean isDescribe = ast.isDescribe(); + + // Define the output format if not provided + if (format == null) { + if (isUpdate || isConstruct) { + format = DEFAULT_GRAPH_OUTPUT; + } else { + format = DEFAULT_MAPPING_OUTPUT; + } + } + + // Check if the output format is compatible with the query result + if ((isUpdate || isConstruct) && format.isMappingFormat()) { + throw new IllegalArgumentException(String.format( + "Error: %s is not a valid output format for insert, delete, describe or construct requests. Use one of the following RDF formats: %s", + format, EnumResultFormat.getRdfFormats())); + } else if ((isSelect || isAsk || isDescribe) && format.isRdfGraphFormat()) { + throw new IllegalArgumentException(String.format( + "Error: %s is not a valid output format for select or ask requests. Use one of the following mapping formats: %s", + format, EnumResultFormat.getMappingFormats())); + } + + // Define the output file name if not provided + if (!this.outputIsDefined) { + this.output = Path.of(DEFAULT_OUTPUT + format.getExtention()); + } else if (this.needToAppendExtension) { + this.output = Path.of(this.output + format.getExtention()); + } + + // Export the query result + if (isUpdate) { + if (this.outputIsDefined) { + exportToFile(this.output, format.getCoreseCodeFormat(), format.toString(), graph); + } else { + exportToStdout(format.getCoreseCodeFormat(), format.toString(), graph); + } + } else { + if (this.outputIsDefined) { + exportToFile(this.output, format.getCoreseCodeFormat(), format.toString(), map); + } else { + exportToStdout(format.getCoreseCodeFormat(), format.toString(), map); + } + } + } + +} \ No newline at end of file diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumInputFormat.java b/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumInputFormat.java deleted file mode 100644 index 1370e4998e..0000000000 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumInputFormat.java +++ /dev/null @@ -1,157 +0,0 @@ -package fr.inria.corese.command.utils.format; - -import java.security.InvalidParameterException; - -import fr.inria.corese.core.api.Loader; - -/** - * Enumeration of parsable RDF serialization formats. - */ -public enum EnumInputFormat { - - RDFXML(1, "rdfxml"), - RDF(1, "rdf"), - APPLICATION_RDF_XML(1, "application/rdf+xml"), - - TURTLE(2, "turtle"), - TTL(2, "ttl"), - TEXT_TURTLE(2, "text/turtle"), - - TRIG(3, "trig"), - APPLICATION_TRIG(3, "application/trig"), - - JSONLD(4, "jsonld"), - APPLICATION_LD_JSON(4, "application/ld+json"), - - NTRIPLES(6, "ntriples"), - NT(6, "nt"), - APPLICATION_NTRIPLES(6, "application/n-triples"), - - NQUADS(7, "nquads"), - NQ(7, "nq"), - APPLICATION_NQUADS(7, "application/n-quads"), - - RDFA(8, "rdfa"), - HTML(8, "html"), - APPLICATION_XHTML_XML(8, "application/xhtml+xml"); - - private final int value; - private final String name; - - /** - * Constructor. - * - * @param value The value of the enum. - * @param name The name of the enum. - * @param extention The extension of the format. - */ - private EnumInputFormat(int value, String name) { - this.value = value; - this.name = name; - } - - /** - * Get the value of the enum. - * - * @return The value of the enum. - */ - public int getValue() { - return this.value; - } - - /** - * Get the name of the enum. - * - * @return The name of the enum. - */ - public String getName() { - return this.name; - } - - /** - * Convert {@code Loader} format value into {@code InputFormat} equivalent - * - * @param loaderFormat The Loader format. - * @return The corresponding InputFormat. - */ - public static EnumInputFormat fromLoaderValue(int loaderFormat) { - switch (loaderFormat) { - case Loader.RDFXML_FORMAT: - return EnumInputFormat.RDFXML; - - case Loader.TURTLE_FORMAT: - return EnumInputFormat.TURTLE; - - case Loader.TRIG_FORMAT: - return EnumInputFormat.TRIG; - - case Loader.JSONLD_FORMAT: - return EnumInputFormat.JSONLD; - - case Loader.NT_FORMAT: - return EnumInputFormat.NTRIPLES; - - case Loader.NQUADS_FORMAT: - return EnumInputFormat.NQUADS; - - case Loader.RDFA_FORMAT: - return EnumInputFormat.RDFA; - - default: - throw new InvalidParameterException( - "Impossible to determine the input format, please specify it with the -f or -if or --input-format option."); - } - } - - /** - * Convert {@code OutputFormat} value to the corresponding {@code Loader} - * format. - * - * @param format The Loader format. - * @return The corresponding Loader format. - */ - public static int toLoaderValue(EnumInputFormat format) { - switch (format) { - case RDFXML: - case RDF: - case APPLICATION_RDF_XML: - return Loader.RDFXML_FORMAT; - - case TURTLE: - case TTL: - case TEXT_TURTLE: - return Loader.TURTLE_FORMAT; - - case TRIG: - case APPLICATION_TRIG: - return Loader.TRIG_FORMAT; - - case JSONLD: - case APPLICATION_LD_JSON: - return Loader.JSONLD_FORMAT; - - case NTRIPLES: - case NT: - case APPLICATION_NTRIPLES: - return Loader.NT_FORMAT; - - case NQUADS: - case NQ: - case APPLICATION_NQUADS: - return Loader.NQUADS_FORMAT; - - case RDFA: - case HTML: - case APPLICATION_XHTML_XML: - return Loader.RDFA_FORMAT; - - default: - throw new InvalidParameterException("Input format " + format + " is unknown."); - } - } - - @Override - public String toString() { - return name; - } -} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java b/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java deleted file mode 100644 index 6a5b29cf8c..0000000000 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumOutputFormat.java +++ /dev/null @@ -1,78 +0,0 @@ -package fr.inria.corese.command.utils.format; - -/** - * Enumeration of exportable RDF serialization formats. - */ -public enum EnumOutputFormat { - RDFXML(1, "rdfxml", "rdf"), - RDF(1, "rdf", "rdf"), - APPLICATION_RDF_XML(1, "application/rdf+xml", "rdf"), - - TURTLE(2, "turtle", "ttl"), - TTL(2, "ttl", "ttl"), - TEXT_TURTLE(2, "text/turtle", "ttl"), - - TRIG(3, "trig", "trig"), - APPLICATION_TRIG(3, "application/trig", "trig"), - - JSONLD(4, "jsonld", "jsonld"), - APPLICATION_LD_JSON(4, "application/ld+json", "jsonld"), - - NTRIPLES(6, "ntriples", "nt"), - NT(6, "nt", "nt"), - APPLICATION_NTRIPLES(6, "application/n-triples", "nt"), - - NQUADS(7, "nquads", "nq"), - NQ(7, "nq", "nq"), - APPLICATION_NQUADS(7, "application/n-quads", "nq"); - - private final int value; - private final String name; - private final String extention; - - /** - * Constructor. - * - * @param value The value of the enum. - * @param name The name of the enum. - * @param extention The extension of the format. - */ - private EnumOutputFormat(int value, String name, String extention) { - this.value = value; - this.name = name; - this.extention = extention; - } - - /** - * Get the value of the enum. - * - * @return The value of the enum. - */ - public int getValue() { - return this.value; - } - - /** - * Get the name of the enum. - * - * @return The name of the enum. - */ - public String getName() { - return this.name; - } - - /** - * Get the extension of the format. - * - * @return The extension. - */ - public String getExtention() { - return this.extention; - } - - @Override - public String toString() { - return this.name; - } - -} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumResultFormat.java b/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumResultFormat.java deleted file mode 100644 index 4f62dd1592..0000000000 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/format/EnumResultFormat.java +++ /dev/null @@ -1,147 +0,0 @@ -package fr.inria.corese.command.utils.format; - -import java.security.InvalidParameterException; - -import fr.inria.corese.core.print.ResultFormat; - -/** - * Enumeration of exportable RDF serialization formats. - */ -public enum EnumResultFormat { - RDFXML(1, "rdfxml", "rdf", true), - RDF(1, "rdf", "rdf", true), - APPLICATION_RDF_XML(1, "application/rdf+xml", "rdf", true), - - TURTLE(2, "turtle", "ttl", true), - TTL(2, "ttl", "ttl", true), - TEXT_TURTLE(2, "text/turtle", "ttl", true), - - TRIG(3, "trig", "trig", true), - APPLICATION_TRIG(3, "application/trig", "trig", true), - - JSONLD(4, "jsonld", "jsonld", true), - APPLICATION_LD_JSON(4, "application/ld+json", "jsonld", true), - - NTRIPLES(6, "ntriples", "nt", true), - NT(6, "nt", "nt", true), - APPLICATION_NTRIPLES(6, "application/n-triples", "nt", true), - - NQUADS(7, "nquads", "nq", true), - NQ(7, "nq", "nq", true), - APPLICATION_NQUADS(7, "application/n-quads", "nq", true), - - BIDING_XML(11, "xml", "srx", false), - SRX(11, "srx", "srx", false), - APPLICATION_SPARQL_RESULTS_XML(11, ResultFormat.SPARQL_RESULTS_XML, "srx", false), - - BIDING_JSON(13, "json", "srj", false), - SRJ(13, "srj", "srj", false), - APPLICATION_SPARQL_RESULTS_JSON(13, ResultFormat.SPARQL_RESULTS_JSON, "srj", false), - - BIDING_CSV(14, "csv", "csv", false), - TEXT_CSV(14, ResultFormat.SPARQL_RESULTS_CSV, "csv", false), - - BIDING_TSV(15, "tsv", "tsv", false), - TEXT_TSV(15, ResultFormat.SPARQL_RESULTS_TSV, "tsv", false), - - BIDING_MARKDOWN(16, "markdown", "md", false), - BIDING_MD(16, "md", "md", false), - TEXT_MARKDOWN(16, ResultFormat.SPARQL_RESULTS_MD, "md", false); - - private final int value; - private final String name; - private final String extention; - private final boolean isRDFFormat; - - /** - * Constructor. - * - * @param value The value of the enum. - * @param name The name of the enum. - * @param extention The extension of the format. - */ - private EnumResultFormat(int value, String name, String extention, boolean isRDFFormat) { - this.value = value; - this.name = name; - this.extention = extention; - this.isRDFFormat = isRDFFormat; - } - - /** - * Get the value of the enum. - * - * @return The value of the enum. - */ - public int getValue() { - return this.value; - } - - /** - * Get the name of the enum. - * - * @return The name of the enum. - */ - public String getName() { - return this.name; - } - - /** - * Get the extension of the format. - * - * @return The extension. - * @throws InvalidParameterException If the format is unknow. - */ - public String getExtention() { - return this.extention; - } - - /** - * Get if the format is a RDF format. - * - * @return True if the format is a RDF format, false otherwise. - */ - public boolean isRDFFormat() { - return this.isRDFFormat; - } - - /** - * Convert to the corresponding OutputFormat. - * - * @return The corresponding OutputFormat. - * @throws InvalidParameterException If the format is unknow. - */ - public EnumOutputFormat convertToOutputFormat() { - switch (this) { - case RDFXML: - case RDF: - case APPLICATION_RDF_XML: - return EnumOutputFormat.RDFXML; - case TURTLE: - case TTL: - case TEXT_TURTLE: - return EnumOutputFormat.TURTLE; - case TRIG: - case APPLICATION_TRIG: - return EnumOutputFormat.TRIG; - case JSONLD: - case APPLICATION_LD_JSON: - return EnumOutputFormat.JSONLD; - case NTRIPLES: - case NT: - case APPLICATION_NTRIPLES: - return EnumOutputFormat.NTRIPLES; - case NQUADS: - case NQ: - case APPLICATION_NQUADS: - return EnumOutputFormat.NQUADS; - - default: - throw new InvalidParameterException("Output format " + this + " cannot be converted to OutputFormat."); - } - } - - @Override - public String toString() { - return this.name; - } -} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/http/EnumRequestMethod.java b/corese-command/src/main/java/fr/inria/corese/command/utils/http/EnumRequestMethod.java index bdb1aeb08e..e0983f59ac 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/http/EnumRequestMethod.java +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/http/EnumRequestMethod.java @@ -4,39 +4,19 @@ * Enumeration of SPARQL request methods. */ public enum EnumRequestMethod { - GET("GET"), - POST_URLENCODED("POST-Encoded"), - POST_DIRECT("POST-Direct"); + GET("get"), + POST_URLENCODED("post-urlencoded"), + POST_DIRECT("post-direct"); private final String name; - private final boolean isPost; /** * Constructor. - * - * @param name The name of the request method. + * + * @param name The name of the SPARQL request method. */ - EnumRequestMethod(String name) { + private EnumRequestMethod(String name) { this.name = name; - this.isPost = name.contains("POST"); - } - - /** - * Get the name of the request method. - * - * @return The name of the request method. - */ - public String getName() { - return this.name; - } - - /** - * Check if the request method is POST or GET. - * - * @return True if the request method is POST, false if it is GET. - */ - public boolean isPost() { - return this.isPost; } @Override diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java b/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java index 978f62f59d..fbb002bd9b 100644 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/http/SparqlHttpClient.java @@ -8,6 +8,7 @@ import org.apache.commons.lang3.tuple.Pair; +import fr.inria.corese.command.App; import fr.inria.corese.command.utils.TestType; import fr.inria.corese.core.Graph; import fr.inria.corese.core.query.QueryProcess; @@ -31,14 +32,15 @@ public class SparqlHttpClient { private final String endpointUrl; - private EnumRequestMethod queryMethod = EnumRequestMethod.GET; + private EnumRequestMethod requestMethod = EnumRequestMethod.GET; + private Boolean requestMethodIsDefinedByUser = false; private List> headers = new ArrayList<>(); private boolean verbose = false; private int redirectCount = 0; private int maxRedirects = 5; - private final String USERAGENT = "Corese-Command/4.5.0"; + private final String USERAGENT = "Corese-Command/" + App.version; ///////////////// // Constructor // @@ -61,12 +63,15 @@ public SparqlHttpClient(String endpointUrl) { /////////////////////// /** - * Sets the query method. + * Sets the request method. * - * @param requestMethod the query method + * @param requestMethod the request method */ - public void setQueryMethod(EnumRequestMethod requestMethod) { - this.queryMethod = requestMethod; + public void setRequestMethod(EnumRequestMethod requestMethod) { + if (requestMethod != null) { + this.requestMethod = requestMethod; + this.requestMethodIsDefinedByUser = true; + } } /** @@ -158,10 +163,10 @@ public String sendRequest(String query, List defaultGraphUris, List header : this.headers) { System.err.println(" " + header.getKey() + ": " + header.getValue()); @@ -254,7 +259,7 @@ private void printRequest(WebTarget webTarget, String bodyContent) { } if (bodyContent != null && !bodyContent.isEmpty()) { - System.err.println(" Request body: " + bodyContent); + System.err.println("\tRequest body: " + bodyContent); } } @@ -280,10 +285,21 @@ private void validateQuery(String queryString, List defaultGraphUris, Li Query query = buildQuery(queryString); + if (!this.requestMethodIsDefinedByUser) { + // Check if the query is an update query. + if (query.getAST().isSPARQLUpdate()) { + // If it is an update query, set the request method to POST_Encoded. + this.requestMethod = EnumRequestMethod.POST_URLENCODED; + } else { + // If the query is not an update query, set the request method to GET. + // No need to set it here as GET is already the default value. + } + } + // Check if the query is an update query and the method is GET // which is not allowed by the SPARQL specification // (see https://www.w3.org/TR/sparql11-protocol/#update-operation) - if (this.queryMethod == EnumRequestMethod.GET && query.getAST().isSPARQLUpdate()) { + if (this.requestMethod == EnumRequestMethod.GET && query.getAST().isSPARQLUpdate()) { throw new IllegalArgumentException( "SPARQL query is an update query, but GET method is used. Please use a POST method instead."); } @@ -369,12 +385,12 @@ private WebTarget buildWebTarget( WebTarget webTarget = client.target(endpoint); // Add the query parameter - if (this.queryMethod == EnumRequestMethod.GET) { + if (this.requestMethod == EnumRequestMethod.GET) { webTarget = webTarget.queryParam("query", this.encode(query)); } // Add graph URIs - if (this.queryMethod == EnumRequestMethod.GET || this.queryMethod == EnumRequestMethod.POST_DIRECT) { + if (this.requestMethod == EnumRequestMethod.GET || this.requestMethod == EnumRequestMethod.POST_DIRECT) { for (String defaultGraphUri : defaultGraphUris) { webTarget = webTarget.queryParam("default-graph-uri", this.encode(defaultGraphUri)); } @@ -401,7 +417,7 @@ private String buildRequestBody( StringBuilder bodyContent = new StringBuilder(); - if (this.queryMethod == EnumRequestMethod.POST_URLENCODED) { + if (this.requestMethod == EnumRequestMethod.POST_URLENCODED) { // Add the query parameter bodyContent.append("query=").append(this.encode(query)); @@ -412,7 +428,7 @@ private String buildRequestBody( for (String namedGraphUri : namedGraphUris) { bodyContent.append("&named-graph-uri=").append(this.encode(namedGraphUri)); } - } else if (this.queryMethod == EnumRequestMethod.POST_DIRECT) { + } else if (this.requestMethod == EnumRequestMethod.POST_DIRECT) { // Add the query parameter bodyContent.append(query); } @@ -439,11 +455,11 @@ private Response executeRequest(WebTarget webTarget, String bodyContent) { } // Send the request - if (this.queryMethod == EnumRequestMethod.GET) { + if (this.requestMethod == EnumRequestMethod.GET) { response = builder.get(); - } else if (this.queryMethod == EnumRequestMethod.POST_URLENCODED) { + } else if (this.requestMethod == EnumRequestMethod.POST_URLENCODED) { response = builder.post(Entity.entity(bodyContent, MediaType.APPLICATION_FORM_URLENCODED)); - } else if (this.queryMethod == EnumRequestMethod.POST_DIRECT) { + } else if (this.requestMethod == EnumRequestMethod.POST_DIRECT) { response = builder.post(Entity.entity(bodyContent, "application/sparql-query")); } diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/loader/rdf/EnumRdfInputFormat.java b/corese-command/src/main/java/fr/inria/corese/command/utils/loader/rdf/EnumRdfInputFormat.java new file mode 100644 index 0000000000..2e8bfb3034 --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/loader/rdf/EnumRdfInputFormat.java @@ -0,0 +1,84 @@ +package fr.inria.corese.command.utils.loader.rdf; + +import java.security.InvalidParameterException; + +import fr.inria.corese.core.api.Loader; + +/** + * Enumeration of input RDF serialization formats. + */ +public enum EnumRdfInputFormat { + + // RdfXml + RDFXML("rdfxml", Loader.RDFXML_FORMAT), + APPLICATION_RDF_XML("application/rdf+xml", Loader.RDFXML_FORMAT), + RDF("rdf", Loader.RDFXML_FORMAT), + + // Turtle + TURTLE("turtle", Loader.TURTLE_FORMAT), + TEXT_TURTLE("text/turtle", Loader.TURTLE_FORMAT), + TTL("ttl", Loader.TURTLE_FORMAT), + + // Trig + TRIG("trig", Loader.TRIG_FORMAT), + APPLICATION_TRIG("application/trig", Loader.TRIG_FORMAT), + + // JsonLd + JSONLD("jsonld", Loader.JSONLD_FORMAT), + APPLICATION_LD_JSON("application/ld+json", Loader.JSONLD_FORMAT), + + // Ntriples + NTRIPLES("ntriples", Loader.NT_FORMAT), + APPLICATION_N_TRIPLES("application/n-triples", Loader.NT_FORMAT), + NT("nt", Loader.NT_FORMAT), + + // Nquads + NQUADS("nquads", Loader.NQUADS_FORMAT), + APPLICATION_N_QUADS("application/n-quads", Loader.NQUADS_FORMAT), + NQ("nq", Loader.NQUADS_FORMAT), + + // Rdfa + RDFA("rdfa", Loader.RDFA_FORMAT), + APPLICATION_XHTML_XML("application/xhtml+xml", Loader.RDFA_FORMAT), + XHTML("xhtml", Loader.RDFA_FORMAT), + HTML("html", Loader.RDFA_FORMAT); + + private final String name; + private final int coreseCodeFormat; + + /** + * Constructor. + * + * @param name The name of the format. + * @param coreseCodeFormat The Corese code of the format. + */ + private EnumRdfInputFormat(String name, int coreseCodeFormat) { + this.name = name; + this.coreseCodeFormat = coreseCodeFormat; + } + + /** + * Create an EnumInputFormat from a Corese code. + */ + public static EnumRdfInputFormat create(int loaderFormat) { + for (EnumRdfInputFormat format : EnumRdfInputFormat.values()) { + if (format.coreseCodeFormat == loaderFormat) { + return format; + } + } + throw new InvalidParameterException( + "Impossible to determine the input format, please specify it with the -f or -if or --input-format option."); + } + + /** + * Get the Corese code of the format. + */ + public int getCoreseCode() { + return this.coreseCodeFormat; + } + + @Override + public String toString() { + return name; + } +} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/loader/rdf/RdfDataLoader.java b/corese-command/src/main/java/fr/inria/corese/command/utils/loader/rdf/RdfDataLoader.java new file mode 100644 index 0000000000..89155f4fee --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/loader/rdf/RdfDataLoader.java @@ -0,0 +1,266 @@ +package fr.inria.corese.command.utils.loader.rdf; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.nio.file.Path; +import java.util.Optional; + +import fr.inria.corese.command.utils.ConvertString; +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.load.Load; +import fr.inria.corese.core.load.LoadFormat; +import picocli.CommandLine.Model.CommandSpec; + +/** + * Utility class to load RDF data into a Corese Graph. + */ +public class RdfDataLoader { + + // Command specification + private CommandSpec spec; + private boolean verbose; + + ///////////////// + // Constructor // + ///////////////// + + /** + * Constructor. + * + * @param spec Command specification. + * @param verbose If true, print information about the loaded files. + */ + public RdfDataLoader(CommandSpec spec, boolean verbose) { + this.spec = spec; + this.verbose = verbose; + } + + //////////////////// + // Public methods // + //////////////////// + + /** + * Load RDF data into a Corese Graph. + * + * @param inputs Paths or URLs of the files to load. + * @param inputFormat Input file serialization format. + * @param recursive If true, load RDF data from subdirectories. + * @return The Corese Graph containing the RDF data. + */ + public Graph load(String[] inputs, EnumRdfInputFormat inputFormat, boolean recursive) + throws IllegalArgumentException { + + if (inputs == null || inputs.length == 0) { + return this.LoadFromStdin(inputFormat); + } else { + Graph graph = Graph.create(); + for (String input : inputs) { + Optional url = ConvertString.toUrl(input); + Optional path = ConvertString.toPath(input); + + if (url.isPresent()) { + // Load RDF data from URL + Graph resultGraph = this.loadFromURL(url.get(), inputFormat); + graph.merge(resultGraph); + } else if (path.isPresent()) { + // Load RDF data from file or directory + File file = path.get().toFile(); + if (file.isDirectory()) { + // Load RDF data from directory + Graph resultGraph = this.loadFromDirectory(path.get(), inputFormat, recursive); + graph.merge(resultGraph); + } else { + // Load RDF data from file + Graph resultGraph = this.loadFromFile(path.get(), inputFormat); + graph.merge(resultGraph); + } + } else { + throw new IllegalArgumentException("Invalid input: " + input); + } + } + return graph; + } + } + + ///////////////////// + // Private methods // + ///////////////////// + + /** + * Load RDF data from standard input into a Corese Graph. + * + * @param inputFormat Input file serialization format. + * @return The Corese Graph containing the RDF data. + */ + private Graph LoadFromStdin(EnumRdfInputFormat inputFormat) { + + Graph graph = this.loadFromInputStream(System.in, inputFormat); + + if (this.verbose) { + this.spec.commandLine().getErr().println("Loaded file: standard input"); + } + + return graph; + } + + /** + * Load RDF data from a path or URL into a Corese Graph. + * + * @param url URL of the file to load. + * @param inputFormat Input file serialization format. + * @return The Corese Graph containing the RDF data. + */ + private Graph loadFromURL(URL url, EnumRdfInputFormat inputFormat) { + + // If the input format is not provided, try to determine it from the file + if (inputFormat == null) { + Optional inputFormatOptional = this.guessInputFormat(url.toString()); + if (inputFormatOptional.isPresent()) { + inputFormat = inputFormatOptional.get(); + } + } + + // Load RDF data from URL + InputStream inputStream; + try { + inputStream = url.openStream(); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to open URL: " + url.toString(), e); + } + Graph graph = this.loadFromInputStream(inputStream, inputFormat); + + if (this.verbose) { + this.spec.commandLine().getErr().println("Loaded file: " + url.toString()); + } + return graph; + } + + /** + * Load RDF data from a path to a file into a Corese Graph. + * + * @param path Path of the file to load. + * @param inputFormat Input file serialization format. + * @return The Corese Graph containing the RDF data. + */ + private Graph loadFromFile(Path path, EnumRdfInputFormat inputFormat) { + + // If the input format is not provided, try to determine it from the file + if (inputFormat == null) { + Optional inputFormatOptional = this.guessInputFormat(path.toString()); + if (inputFormatOptional.isPresent()) { + inputFormat = inputFormatOptional.get(); + } + } + + // Load RDF data from file + InputStream inputStream; + try { + inputStream = new FileInputStream(path.toFile()); + } catch (FileNotFoundException e) { + throw new IllegalArgumentException("Failed to open RDF data file: " + path.toString(), e); + } + Graph graph = this.loadFromInputStream(inputStream, inputFormat); + + if (this.verbose) { + this.spec.commandLine().getErr().println("Loaded file: " + path); + } + return graph; + } + + /** + * Load RDF data from a directory into a Corese Graph. + * + * @param path Path of the directory to load. + * @param inputFormat Input file serialization format. + * @param recursive If true, load RDF data from subdirectories. + * @return The Corese Graph containing the RDF data. + */ + private void loadFromDirectoryRecursive(Path path, EnumRdfInputFormat inputFormat, boolean recursive, Graph graph) { + + File[] files = path.toFile().listFiles(); + + if (files != null) { + for (File childFile : files) { + + if (childFile.isDirectory() && recursive) { + this.loadFromDirectoryRecursive(childFile.toPath(), inputFormat, recursive, graph); + } else if (childFile.isFile()) { + Graph resultGraph = this.loadFromFile(childFile.toPath(), inputFormat); + graph.merge(resultGraph); + } + } + } + } + + /** + * Load RDF data from a directory into a Corese Graph. + * + * @param path Path of the directory to load. + * @param inputFormat Input file serialization format. + * @param recursive If true, load RDF data from subdirectories. + * @return The Corese Graph containing the RDF data. + */ + private Graph loadFromDirectory(Path path, EnumRdfInputFormat inputFormat, boolean recursive) { + Graph graph = Graph.create(); + this.loadFromDirectoryRecursive(path, inputFormat, recursive, graph); + + if (this.verbose) { + this.spec.commandLine().getErr().println("Loaded directory: " + path); + } + return graph; + } + + /** + * Load RDF data from an input stream into a Corese Graph. + * + * @param inputStream Input stream of the file to load. + * @param inputFormat Input file serialization format. + * @return The Corese Graph containing the RDF data. + */ + private Graph loadFromInputStream(InputStream inputStream, EnumRdfInputFormat inputFormat) { + + Graph graph = Graph.create(); + Load load = Load.create(graph); + + if (inputFormat == null) { + throw new IllegalArgumentException( + "The input format cannot be automatically determined if you use standard input or na URL. " + + "Please specify the input format with the option -f."); + } else { + try { + load.parse(inputStream, inputFormat.getCoreseCode()); + return graph; + } catch (Exception e) { + throw new IllegalArgumentException("Failed to parse RDF file. Check if file is well-formed and that " + + "the input format is correct. " + e.getMessage(), e); + } + } + } + + /** + * Guess the input format from the file extension. + * + * @param input Input file path or URL. + * @return The guessed input format. + */ + private Optional guessInputFormat(String input) { + + EnumRdfInputFormat inputFormat = EnumRdfInputFormat.create(LoadFormat.getFormat(input)); + + if (inputFormat == null) { + if (this.verbose) { + this.spec.commandLine().getErr().println("Failed to detect input format, defaulting to Turtle"); + } + inputFormat = EnumRdfInputFormat.TURTLE; + } + + if (this.verbose) { + this.spec.commandLine().getErr().println("Format not specified, detected input format: " + inputFormat); + } + return Optional.of(inputFormat); + } +} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/loader/sparql/SparqlQueryLoader.java b/corese-command/src/main/java/fr/inria/corese/command/utils/loader/sparql/SparqlQueryLoader.java new file mode 100644 index 0000000000..5e0a583209 --- /dev/null +++ b/corese-command/src/main/java/fr/inria/corese/command/utils/loader/sparql/SparqlQueryLoader.java @@ -0,0 +1,127 @@ +package fr.inria.corese.command.utils.loader.sparql; + +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.InputStream; +import java.net.URL; +import java.nio.file.Path; +import java.util.Optional; + +import fr.inria.corese.command.utils.ConvertString; +import fr.inria.corese.command.utils.TestType; +import picocli.CommandLine.Model.CommandSpec; + +/** + * Utility class to load SPARQL queries. + */ +public class SparqlQueryLoader { + + // Command specification + private CommandSpec spec; + private boolean verbose; + + ///////////////// + // Constructor // + ///////////////// + + /** + * Constructor. + * + * @param spec Command specification. + * @param verbose If true, print information about the loaded files. + */ + public SparqlQueryLoader(CommandSpec spec, boolean verbose) { + this.spec = spec; + this.verbose = verbose; + } + + //////////////////// + // Public methods // + //////////////////// + + /** + * Load a SPARQL query from a path, URL, or standard input. + * + * @param input Path, URL, or SPARQL query to load. + * @return The loaded query. + */ + public String load(String input) { + Optional path = ConvertString.toPath(input); + Optional url = ConvertString.toUrl(input); + Boolean isSparqlQuery = TestType.isSparqlQuery(input); + + if (isSparqlQuery) { + return input; + } else if (url.isPresent()) { + return this.loadFromUrl(url.get()); + } else if (path.isPresent()) { + return this.loadFromFile(path.get()); + } else { + throw new IllegalArgumentException("Invalid input: " + input); + } + } + + ///////////////////// + // Private methods // + ///////////////////// + + /** + * Load a SPARQL query from a path. + * + * @param path Path of the file to load. + * @return The loaded query. + */ + private String loadFromFile(Path path) { + InputStream inputStream; + try { + inputStream = new FileInputStream(path.toString()); + } catch (FileNotFoundException e) { + throw new IllegalArgumentException("Failed to open SPARQL query file: " + path.toString(), e); + } + + String query = this.loadFromInputStream(inputStream); + + if (this.verbose) { + this.spec.commandLine().getErr().println("Loaded SPAQRL query file: " + path.toString()); + } + + return query; + } + + /** + * Load a SPARQL query from a URL. + * + * @param url URL of the file to load. + * @return The loaded query. + */ + private String loadFromUrl(URL url) { + InputStream inputStream; + try { + inputStream = url.openStream(); + } catch (Exception e) { + throw new IllegalArgumentException("Failed to open SPARQL query file: " + url.toString(), e); + } + + String query = this.loadFromInputStream(inputStream); + + if (this.verbose) { + this.spec.commandLine().getErr().println("Loaded SPARQL query file: " + url.toString()); + } + + return query; + } + + /** + * Load a SPARQL query from standard input. + * + * @param inputStream Input stream of the file to load. + * @return The loaded query. + */ + private String loadFromInputStream(InputStream inputStream) { + try { + return new String(inputStream.readAllBytes()); + } catch (Exception e) { + throw new IllegalArgumentException("Failed to read SPARQL query from input stream", e); + } + } +} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java b/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java deleted file mode 100644 index 5fdc22a16d..0000000000 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataExporter.java +++ /dev/null @@ -1,128 +0,0 @@ -package fr.inria.corese.command.utils.rdf; - -import java.io.FileOutputStream; -import java.io.OutputStream; -import java.nio.file.Path; - -import fr.inria.corese.command.utils.format.EnumOutputFormat; -import fr.inria.corese.core.Graph; -import fr.inria.corese.core.print.JSONLDFormat; -import fr.inria.corese.core.print.NQuadsFormat; -import fr.inria.corese.core.print.NTriplesFormat; -import fr.inria.corese.core.print.RDFFormat; -import fr.inria.corese.core.print.TripleFormat; -import picocli.CommandLine.Model.CommandSpec; - -/** - * Utility class to export RDF data from a Corese Graph. - */ -public class RdfDataExporter { - - /** - * Export RDF data from a Corese Graph to a file. - * - * @param path Path of the file to export to. - * @param outputFormat Output file serialization format. - * @param graph Corese Graph to export RDF data from. - * @param spec Command specification. - * @param verbose If true, print information about the exported file. - */ - public static void exportToFile( - Path path, - EnumOutputFormat outputFormat, - Graph graph, - CommandSpec spec, - boolean verbose) { - - OutputStream outputStream; - - try { - outputStream = new FileOutputStream(path.toString()); - } catch (Exception e) { - throw new IllegalArgumentException("Failed to open export file: " + path.toString(), e); - } - - exportToOutputStream(outputStream, outputFormat, graph); - - if (verbose) { - spec.commandLine().getErr().println("Exported RDF data with format: " + outputFormat); - spec.commandLine().getErr().println("Exported RDF data to file: " + path.toString()); - } - } - - /** - * Export RDF data from a Corese Graph to standard output. - * - * @param outputFormat Output file serialization format. - * @param graph Corese Graph to export RDF data from. - * @param spec Command specification. - * @param verbose If true, print information about the exported file. - */ - public static void exportToStdout( - EnumOutputFormat outputFormat, - Graph graph, - CommandSpec spec, - boolean verbose) { - - exportToOutputStream(System.out, outputFormat, graph); - - if (verbose) { - spec.commandLine().getErr().println("Exported RDF data with format: " + outputFormat); - spec.commandLine().getErr().println("Exported RDF data to: standard output"); - } - } - - /** - * Export RDF data from a Corese Graph to a output stream. - * - * @param outputStream Output stream to export to. - * @param outputFormat Output file serialization format. - * @param graph Corese Graph to export RDF data from. - */ - private static void exportToOutputStream( - OutputStream outputStream, - EnumOutputFormat outputFormat, - Graph graph) { - - try { - switch (outputFormat) { - case RDFXML: - case RDF: - case APPLICATION_RDF_XML: - RDFFormat.create(graph).write(outputStream); - break; - case TURTLE: - case TTL: - case TEXT_TURTLE: - TripleFormat.create(graph).write(outputStream); - break; - case TRIG: - case APPLICATION_TRIG: - TripleFormat.create(graph, true).write(outputStream); - break; - case JSONLD: - case APPLICATION_LD_JSON: - JSONLDFormat.create(graph).write(outputStream); - break; - case NTRIPLES: - case NT: - case APPLICATION_NTRIPLES: - NTriplesFormat.create(graph).write(outputStream); - break; - case NQUADS: - case NQ: - case APPLICATION_NQUADS: - NQuadsFormat.create(graph).write(outputStream); - break; - default: - throw new IllegalArgumentException("Unsupported output format: " + outputFormat); - } - - outputStream.flush(); - - } catch (Exception e) { - throw new IllegalArgumentException("Failed to write to RDF data to output stream", e); - } - } - -} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataLoader.java b/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataLoader.java deleted file mode 100644 index 13ec5b5628..0000000000 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/rdf/RdfDataLoader.java +++ /dev/null @@ -1,208 +0,0 @@ -package fr.inria.corese.command.utils.rdf; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.net.URL; -import java.nio.file.Path; -import java.util.Optional; - -import fr.inria.corese.command.utils.format.EnumInputFormat; -import fr.inria.corese.core.Graph; -import fr.inria.corese.core.load.Load; -import fr.inria.corese.core.load.LoadFormat; -import picocli.CommandLine.Model.CommandSpec; - -/** - * Utility class to load RDF data into a Corese Graph. - */ -public class RdfDataLoader { - - /** - * Load RDF data from standard input into a Corese Graph. - * - * @param inputFormat Input file serialization format. - * @param graph Corese Graph to load RDF data into. - * @param spec Command specification. - * @param verbose If true, print information about the loaded files. - */ - public static void LoadFromStdin( - EnumInputFormat inputFormat, - Graph graph, - CommandSpec spec, - boolean verbose) { - - RdfDataLoader.loadFromInputStream(System.in, inputFormat, graph); - - if (verbose) { - spec.commandLine().getErr().println("Loaded file: standard input"); - } - } - - /** - * Load RDF data from a path or URL into a Corese Graph. - * - * @param url URL of the file to load. - * @param inputFormat Input file serialization format. - * @param graph Corese Graph to load RDF data into. - * @param spec Command specification. - * @param verbose If true, print information about the loaded files. - */ - public static void loadFromURL( - URL url, - EnumInputFormat inputFormat, - Graph graph, - CommandSpec spec, - boolean verbose) { - - // If the input format is not provided, try to determine it from the - // file - if (inputFormat == null) { - Optional inputFormatOptional = RdfDataLoader.guessInputFormat(url.toString(), spec, - verbose); - if (inputFormatOptional.isPresent()) { - inputFormat = inputFormatOptional.get(); - } - } - - // Load RDF data from URL - InputStream inputStream; - try { - inputStream = url.openStream(); - } catch (IOException e) { - throw new IllegalArgumentException("Failed to open URL: " + url.toString(), e); - } - RdfDataLoader.loadFromInputStream(inputStream, inputFormat, graph); - - if (verbose) { - spec.commandLine().getErr().println("Loaded file: " + url.toString()); - } - } - - /** - * Load RDF data from a path to a file into a Corese Graph. - * - * @param path Path of the file to load. - * @param inputFormat Input file serialization format. - * @param graph Corese Graph to load RDF data into. - * @param spec Command specification. - * @param verbose If true, print information about the loaded files. - */ - public static void loadFromFile( - Path path, - EnumInputFormat inputFormat, - Graph graph, - CommandSpec spec, - Boolean verbose) { - - // If the input format is not provided, try to determine it from the - // file - if (inputFormat == null) { - Optional inputFormatOptional = RdfDataLoader.guessInputFormat(path.toString(), spec, - verbose); - if (inputFormatOptional.isPresent()) { - inputFormat = inputFormatOptional.get(); - } - } - - // Load RDF data from file - InputStream inputStream; - try { - inputStream = new FileInputStream(path.toFile()); - } catch (FileNotFoundException e) { - throw new IllegalArgumentException("Failed to open RDF data file: " + path.toString(), e); - } - RdfDataLoader.loadFromInputStream(inputStream, inputFormat, graph); - - if (verbose) { - spec.commandLine().getErr().println("Loaded file: " + path); - } - } - - /** - * Load RDF data from a directory into a Corese Graph. - * - * @param path Path of the directory to load. - * @param inputFormat Input file serialization format. - * @param graph Corese Graph to load RDF data into. - * @param recursive If true, load RDF data from subdirectories. - * @param spec Command specification. - * @param verbose If true, print information about the loaded files. - */ - public static void loadFromDirectory( - Path path, - EnumInputFormat inputFormat, - Graph graph, - boolean recursive, - CommandSpec spec, - boolean verbose) { - - File[] files = path.toFile().listFiles(); - - if (files != null) { - for (File childFile : files) { - - if (childFile.isDirectory() && recursive) { - RdfDataLoader.loadFromDirectory(childFile.toPath(), inputFormat, graph, recursive, spec, verbose); - } else if (childFile.isFile()) { - RdfDataLoader.loadFromFile(childFile.toPath(), inputFormat, graph, spec, verbose); - } - } - } - } - - /** - * Load RDF data from an input stream into a Corese Graph. - * - * @param inputStream Input stream of the file to load. - * @param inputFormat Input file serialization format. - * @param graph Corese Graph to load RDF data into. - */ - private static void loadFromInputStream(InputStream inputStream, EnumInputFormat inputFormat, Graph graph) { - - Load load = Load.create(graph); - - if (inputFormat == null) { - throw new IllegalArgumentException( - "The input format cannot be automatically determined if you use standard input or na URL. " - + "Please specify the input format with the option -f."); - } else { - try { - load.parse(inputStream, EnumInputFormat.toLoaderValue(inputFormat)); - } catch (Exception e) { - throw new IllegalArgumentException("Failed to parse RDF file. Check if file is well-formed and that " - + "the input format is correct. " + e.getMessage(), e); - } - } - } - - /** - * Guess the input format from the file extension. - * - * @param input Input file path or URL. - * @param spec Command specification. - * @param verbose If true, print information about the loaded files. - * @return The guessed input format. - */ - private static Optional guessInputFormat( - String input, - CommandSpec spec, - boolean verbose) { - - EnumInputFormat inputFormat = EnumInputFormat.fromLoaderValue(LoadFormat.getFormat(input)); - - if (inputFormat == null) { - if (verbose) { - spec.commandLine().getErr().println("Failed to detect input format, defaulting to Turtle"); - } - inputFormat = EnumInputFormat.TURTLE; - } - - if (verbose) { - spec.commandLine().getErr().println("Format not specified, detected input format: " + inputFormat); - } - return Optional.of(inputFormat); - } -} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/sparql/SparqlQueryLoader.java b/corese-command/src/main/java/fr/inria/corese/command/utils/sparql/SparqlQueryLoader.java deleted file mode 100644 index eaf6ce509c..0000000000 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/sparql/SparqlQueryLoader.java +++ /dev/null @@ -1,97 +0,0 @@ -package fr.inria.corese.command.utils.sparql; - -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.InputStream; -import java.net.URL; -import java.nio.file.Path; - -import picocli.CommandLine.Model.CommandSpec; - -/** - * Utility class to load SPARQL queries. - */ -public class SparqlQueryLoader { - - /** - * Load a SPARQL query from a path. - * - * @param path Path of the file to load. - * @param spec Command specification. - * @param verbose If true, print information about the loaded files. - * @return The loaded query. - */ - public static String loadFromFile(Path path, CommandSpec spec, boolean verbose) { - InputStream inputStream; - try { - inputStream = new FileInputStream(path.toString()); - } catch (FileNotFoundException e) { - throw new IllegalArgumentException("Failed to open SPARQL query file: " + path.toString(), e); - } - - String query = SparqlQueryLoader.loadFromInputStreamPrivate(inputStream); - - if (verbose) { - spec.commandLine().getErr().println("Loaded SPAQRL query file: " + path.toString()); - } - - return query; - } - - /** - * Load a SPARQL query from a URL. - * - * @param url URL of the file to load. - * @param spec Command specification. - * @param verbose If true, print information about the loaded files. - * @return The loaded query. - */ - public static String loadFromUrl(URL url, CommandSpec spec, boolean verbose) { - InputStream inputStream; - try { - inputStream = url.openStream(); - } catch (Exception e) { - throw new IllegalArgumentException("Failed to open SPARQL query file: " + url.toString(), e); - } - - String query = SparqlQueryLoader.loadFromInputStreamPrivate(inputStream); - - if (verbose) { - spec.commandLine().getErr().println("Loaded SPARQL query file: " + url.toString()); - } - - return query; - } - - /** - * Load a SPARQL query from stream input. - * - * @param inputStream Input stream to load. - * @param spec Command specification. - * @param verbose If true, print information about the loaded files. - * @return The loaded query. - */ - public static String loadFromInputStream(InputStream inputStream, CommandSpec spec, boolean verbose) { - String query = SparqlQueryLoader.loadFromInputStreamPrivate(inputStream); - - if (verbose) { - spec.commandLine().getErr().println("Loaded SPARQL query from input stream"); - } - - return query; - } - - /** - * Load a SPARQL query from standard input. - * - * @param inputStream Input stream of the file to load. - * @return The loaded query. - */ - private static String loadFromInputStreamPrivate(InputStream inputStream) { - try { - return new String(inputStream.readAllBytes()); - } catch (Exception e) { - throw new IllegalArgumentException("Failed to read SPARQL query from input stream", e); - } - } -} diff --git a/corese-command/src/main/java/fr/inria/corese/command/utils/sparql/SparqlResultExporter.java b/corese-command/src/main/java/fr/inria/corese/command/utils/sparql/SparqlResultExporter.java deleted file mode 100644 index f82dc4f392..0000000000 --- a/corese-command/src/main/java/fr/inria/corese/command/utils/sparql/SparqlResultExporter.java +++ /dev/null @@ -1,78 +0,0 @@ - -package fr.inria.corese.command.utils.sparql; - -import java.nio.file.Path; - -import fr.inria.corese.command.utils.format.EnumResultFormat; -import fr.inria.corese.core.print.ResultFormat; -import fr.inria.corese.kgram.core.Mappings; -import picocli.CommandLine.Model.CommandSpec; - -/** - * Utility class to export SPARQL results. - */ -public class SparqlResultExporter { - - /** - * Export SPARQL results to a file. - * - * @param path Path of the file to export to. - * @param resultFormat Output file serialization format. - * @param map SPARQL results to export. - * @param spec Command specification. - * @param verbose If true, print information about the exported file. - */ - public static void exportToFile( - Path path, - EnumResultFormat resultFormat, - Mappings map, - CommandSpec spec, - boolean verbose) { - - ResultFormat resultFormater = ResultFormat.create(map); - resultFormater.setSelectFormat(resultFormat.getValue()); - resultFormater.setConstructFormat(resultFormat.getValue()); - - try { - resultFormater.write(path.toString()); - } catch (Exception e) { - throw new IllegalArgumentException("Failed to open export file: " + path.toString(), e); - } - - if (verbose) { - spec.commandLine().getErr().println("Exported SPARQL result with format: " + resultFormat); - spec.commandLine().getErr().println("Exported SPARQL result to file: " + path.toString()); - } - } - - /** - * Export SPARQL results to standard output. - * - * @param resultFormat Output file serialization format. - * @param map SPARQL results to export. - * @param spec Command specification. - * @param verbose If true, print information about the exported file. - */ - public static void exportToStdout( - EnumResultFormat resultFormat, - Mappings map, - CommandSpec spec, - boolean verbose) { - - ResultFormat resultFormater = ResultFormat.create(map); - resultFormater.setSelectFormat(resultFormat.getValue()); - resultFormater.setConstructFormat(resultFormat.getValue()); - - try { - String str = resultFormater.toString(); - spec.commandLine().getOut().println(str); - } catch (Exception e) { - throw new IllegalArgumentException("Failed to write to standard output", e); - } - - if (verbose) { - spec.commandLine().getErr().println("Exported SPARQL result with format: " + resultFormat); - spec.commandLine().getErr().println("Exported SPARQL result to: standard output"); - } - } -} \ No newline at end of file diff --git a/corese-command/src/test/java/fr/inria/corese/command/programs/CanonicalizeTest.java b/corese-command/src/test/java/fr/inria/corese/command/programs/CanonicalizeTest.java new file mode 100644 index 0000000000..2d60ddb739 --- /dev/null +++ b/corese-command/src/test/java/fr/inria/corese/command/programs/CanonicalizeTest.java @@ -0,0 +1,185 @@ +package fr.inria.corese.command.programs; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.nio.file.Paths; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import picocli.CommandLine; + +public class CanonicalizeTest { + + private Canonicalize canonicalize = new Canonicalize(); + private CommandLine cmd = new CommandLine(canonicalize); + + private StringWriter out = new StringWriter(); + private StringWriter err = new StringWriter(); + + private String inputPath = CanonicalizeTest.class + .getResource("/fr/inria/corese/command/programs/canonicalize/input/") + .getPath(); + private String referencesPath = CanonicalizeTest.class + .getResource("/fr/inria/corese/command/programs/canonicalize/references/") + .getPath(); + private String resultPath = CanonicalizeTest.class + .getResource("/fr/inria/corese/command/programs/canonicalize/results/") + .getPath(); + + @BeforeEach + public void setUp() { + PrintWriter out = new PrintWriter(this.out); + PrintWriter err = new PrintWriter(this.err); + cmd.setOut(out); + cmd.setErr(err); + } + + private String getStringContent(String path) { + try { + return new String(java.nio.file.Files.readAllBytes(Paths.get(path))); + } catch (IOException e) { + e.printStackTrace(); + throw new RuntimeException(e); + } + } + + @Test + public void test1InputFile() { + String input = inputPath + "beatles.ttl"; + String expected = referencesPath + "beatles.nq"; + String output = resultPath + "beatles.nq"; + + String[] args = { "-i", input, "-a", "rdfc-1.0-sha256", "-o", output }; + int exitCode = cmd.execute(args); + + assertEquals(0, exitCode); + assertEquals("", err.toString()); + assertEquals("", out.toString()); + assertEquals(getStringContent(expected), getStringContent(output)); + } + + @Test + public void test1Url() { + String input = "https://files.inria.fr/corese/data/unit-test/beatles.ttl"; + String expected = referencesPath + "beatles.nq"; + String output = resultPath + "beatles.nq"; + + String[] args = { "-i", input, "-a", "rdfc-1.0-sha256", "-o", output }; + int exitCode = cmd.execute(args); + + assertEquals(0, exitCode); + assertEquals("", err.toString()); + assertEquals("", out.toString()); + assertEquals(getStringContent(expected), getStringContent(output)); + } + + @Test + public void test1Directory() { + String input = inputPath; + String expected = referencesPath + "beatles.nq"; + String output = resultPath + "beatles.nq"; + + String[] args = { "-i", input, "-a", "rdfc-1.0-sha256", "-o", output }; + int exitCode = cmd.execute(args); + + assertEquals(0, exitCode); + assertEquals("", err.toString()); + assertEquals("", out.toString()); + assertEquals(getStringContent(expected), getStringContent(output)); + } + + @Test + public void test1DirectoryRecursive() { + String input = inputPath; + String expected = referencesPath + "recursive.nq"; + String output = resultPath + "recursive.nq"; + + String[] args = { "-i", input, "-a", "rdfc-1.0-sha256", "-o", output, "-R" }; + int exitCode = cmd.execute(args); + + assertEquals(0, exitCode); + assertEquals("", err.toString()); + assertEquals("", out.toString()); + assertEquals(getStringContent(expected), getStringContent(output)); + } + + @Test + public void testMultipleSources() { + String input1 = inputPath + "beatles.ttl"; + String input2 = Paths.get(inputPath, "recursive-level1", "person.ttl").toString(); + String expected = referencesPath + "multiple.nq"; + String output = resultPath + "multiple.nq"; + + String[] args = { "-i", input1, input2, "-a", "rdfc-1.0-sha256", "-o", output }; + int exitCode = cmd.execute(args); + + assertEquals(0, exitCode); + assertEquals("", err.toString()); + assertEquals("", out.toString()); + assertEquals(getStringContent(expected), getStringContent(output)); + } + + @Test + public void testInputFormat() { + String input = inputPath + "beatles.ttl"; + String expected = referencesPath + "beatles.nq"; + String output = resultPath + "beatles.nq"; + + String[] args = { "-i", input, "-f", "text/turtle", "-a", "rdfc-1.0-sha256", "-o", output }; + int exitCode = cmd.execute(args); + + assertEquals(0, exitCode); + assertEquals("", err.toString()); + assertEquals("", out.toString()); + assertEquals(getStringContent(expected), getStringContent(output)); + } + + @Test + public void testInputBadFormat() { + String input = inputPath + "beatles.ttl"; + String output = resultPath + "beatles.nq"; + + String[] args = { "-i", input, "-f", "rdfxml", "-a", "rdfc-1.0-sha256", "-o", output }; + int exitCode = cmd.execute(args); + + assertEquals(1, exitCode); + assertEquals("", out.toString()); + assertTrue(err.toString().contains("Failed to parse RDF file.")); + } + + @Test + public void testSha384() { + String input = inputPath + "beatles.ttl"; + String expected = referencesPath + "beatles-sha384.nq"; + String output = resultPath + "beatles-sha384.nq"; + + String[] args = { "-i", input, "-a", "rdfc-1.0-sha384", "-o", output }; + int exitCode = cmd.execute(args); + + assertEquals(0, exitCode); + assertEquals("", err.toString()); + assertEquals("", out.toString()); + assertEquals(getStringContent(expected), getStringContent(output)); + } + + @Test + public void testDefaultAlgorithm() { + String input = inputPath + "beatles.ttl"; + String expected = referencesPath + "beatles.nq"; + String output = resultPath + "beatles.nq"; + + String[] args = { "-i", input, "-o", output }; + int exitCode = cmd.execute(args); + + assertEquals(0, exitCode); + assertEquals("", err.toString()); + assertEquals("", out.toString()); + assertEquals(getStringContent(expected), getStringContent(output)); + } + +} diff --git a/corese-command/src/test/java/fr/inria/corese/command/programs/ConvertTest.java b/corese-command/src/test/java/fr/inria/corese/command/programs/ConvertTest.java index eefdcb91b5..c306ca78ba 100644 --- a/corese-command/src/test/java/fr/inria/corese/command/programs/ConvertTest.java +++ b/corese-command/src/test/java/fr/inria/corese/command/programs/ConvertTest.java @@ -1,25 +1,23 @@ package fr.inria.corese.command.programs; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; -import java.io.BufferedReader; -import java.io.FileReader; -import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.HashSet; -import java.util.Set; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import fr.inria.corese.command.utils.format.EnumInputFormat; -import fr.inria.corese.command.utils.rdf.RdfDataLoader; +import fr.inria.corese.command.utils.loader.rdf.EnumRdfInputFormat; +import fr.inria.corese.command.utils.loader.rdf.RdfDataLoader; import fr.inria.corese.core.Graph; +import fr.inria.corese.core.load.Load; +import fr.inria.corese.core.print.CanonicalRdf10Format; import picocli.CommandLine; public class ConvertTest { @@ -30,7 +28,7 @@ public class ConvertTest { private StringWriter out = new StringWriter(); private StringWriter err = new StringWriter(); - private String inputFile = ConvertTest.class + private String inputPath = ConvertTest.class .getResource("/fr/inria/corese/command/programs/convert/input/") .getPath(); private String referencesPath = ConvertTest.class @@ -40,39 +38,21 @@ public class ConvertTest { .getResource("/fr/inria/corese/command/programs/convert/results/") .getPath(); - private boolean compareFiles(String filePath1, String filePath2) { - // Create two sets to store the lines of each file - Set file1Lines = new HashSet<>(); - Set file2Lines = new HashSet<>(); + private String canonicalize(String path) { + Graph graph = Graph.create(); + Load ld = Load.create(graph); try { - // Read the first file and store each line in the set - try (BufferedReader reader = new BufferedReader(new FileReader(filePath1))) { - String line; - while ((line = reader.readLine()) != null) { - file1Lines.add(line); - } - } - - // Read the second file and store each line in the set - try (BufferedReader reader = new BufferedReader(new FileReader(filePath2))) { - String line; - while ((line = reader.readLine()) != null) { - file2Lines.add(line); - } - } - - // Check if both sets are equal - return file1Lines.equals(file2Lines); - - } catch (IOException e) { + ld.parse(path, ""); + } catch (Exception e) { e.printStackTrace(); - return false; // Return false if an error occurs } + + return CanonicalRdf10Format.create(graph).toString(); } - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp() { PrintWriter out = new PrintWriter(this.out); PrintWriter err = new PrintWriter(this.err); cmd.setOut(out); @@ -81,7 +61,7 @@ public void setUp() throws Exception { @Test public void testConvertTurtleToxml() { - String pathinputBeatlesTTL = Paths.get(inputFile, "beatles.ttl").toString(); + String pathinputBeatlesTTL = Paths.get(inputPath, "beatles.ttl").toString(); String pathRefBeatlesXML = Paths.get(referencesPath, "ttl.beatles.rdf").toString(); String pathOutBeatlesXML = Paths.get(resultPath, "ttl.beatles.rdf").toString(); @@ -90,12 +70,14 @@ public void testConvertTurtleToxml() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesXML, pathOutBeatlesXML)); + assertEquals(canonicalize(pathRefBeatlesXML), canonicalize(pathOutBeatlesXML)); + assertNotEquals("", pathOutBeatlesXML); + } @Test public void testConvertTurtleToJsonld() { - String pathInputBeatlesTTL = Paths.get(inputFile, "beatles.ttl").toString(); + String pathInputBeatlesTTL = Paths.get(inputPath, "beatles.ttl").toString(); String pathRefBeatlesJSON = Paths.get(referencesPath, "ttl.beatles.jsonld").toString(); String pathOutBeatlesJSON = Paths.get(resultPath, "ttl.beatles.jsonld").toString(); @@ -104,12 +86,14 @@ public void testConvertTurtleToJsonld() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesJSON, pathOutBeatlesJSON)); + assertEquals(canonicalize(pathRefBeatlesJSON), canonicalize(pathOutBeatlesJSON)); + assertNotEquals("", pathOutBeatlesJSON); + } @Test public void testConvertTurtleToTrig() { - String pathInputBeatlesTTL = Paths.get(inputFile, "beatles.ttl").toString(); + String pathInputBeatlesTTL = Paths.get(inputPath, "beatles.ttl").toString(); String pathRefBeatlesTRIG = Paths.get(referencesPath, "ttl.beatles.trig").toString(); String pathOutBeatlesTRIG = Paths.get(resultPath, "ttl.beatles.trig").toString(); @@ -118,12 +102,14 @@ public void testConvertTurtleToTrig() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesTRIG, pathOutBeatlesTRIG)); + assertEquals(canonicalize(pathRefBeatlesTRIG), canonicalize(pathOutBeatlesTRIG)); + assertNotEquals("", pathOutBeatlesTRIG); + } @Test public void testConvertTurtleToTurtle() { - String pathInputBeatlesTTL = Paths.get(inputFile, "beatles.ttl").toString(); + String pathInputBeatlesTTL = Paths.get(inputPath, "beatles.ttl").toString(); String pathRefBeatlesTTL = Paths.get(referencesPath, "ttl.beatles.ttl").toString(); String pathOutBeatlesTTL = Paths.get(resultPath, "ttl.beatles.ttl").toString(); @@ -132,12 +118,14 @@ public void testConvertTurtleToTurtle() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesTTL, pathOutBeatlesTTL)); + assertEquals(canonicalize(pathRefBeatlesTTL), canonicalize(pathOutBeatlesTTL)); + assertNotEquals("", pathOutBeatlesTTL); + } @Test public void testConvertTurtleToNt() { - String pathInputBeatlesTTL = Paths.get(inputFile, "beatles.ttl").toString(); + String pathInputBeatlesTTL = Paths.get(inputPath, "beatles.ttl").toString(); String pathRefBeatlesNT = Paths.get(referencesPath, "ttl.beatles.nt").toString(); String pathOutBeatlesNT = Paths.get(resultPath, "ttl.beatles.nt").toString(); @@ -146,12 +134,14 @@ public void testConvertTurtleToNt() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesNT, pathOutBeatlesNT)); + assertEquals(canonicalize(pathRefBeatlesNT), canonicalize(pathOutBeatlesNT)); + assertNotEquals("", pathOutBeatlesNT); + } @Test public void testConvertTurtleToNq() { - String pathInputBeatlesTTL = Paths.get(inputFile, "beatles.ttl").toString(); + String pathInputBeatlesTTL = Paths.get(inputPath, "beatles.ttl").toString(); String pathRefBeatlesNQ = Paths.get(referencesPath, "ttl.beatles.nq").toString(); String pathOutBeatlesNQ = Paths.get(resultPath, "ttl.beatles.nq").toString(); @@ -160,12 +150,14 @@ public void testConvertTurtleToNq() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesNQ, pathOutBeatlesNQ)); + assertEquals(canonicalize(pathRefBeatlesNQ), canonicalize(pathOutBeatlesNQ)); + assertNotEquals("", pathOutBeatlesNQ); + } @Test public void testConvertXmltoXml() { - String pathInputBeatlesXML = Paths.get(inputFile, "beatles.rdf").toString(); + String pathInputBeatlesXML = Paths.get(inputPath, "beatles.rdf").toString(); String pathRefBeatlesXML = Paths.get(referencesPath, "rdf.beatles.rdf").toString(); String pathOutBeatlesXML = Paths.get(resultPath, "rdf.beatles.rdf").toString(); @@ -174,12 +166,14 @@ public void testConvertXmltoXml() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesXML, pathOutBeatlesXML)); + assertEquals(canonicalize(pathRefBeatlesXML), canonicalize(pathOutBeatlesXML)); + assertNotEquals("", pathOutBeatlesXML); + } @Test public void testConvertXmlToJsonld() { - String pathInputBeatlesXML = Paths.get(inputFile, "beatles.rdf").toString(); + String pathInputBeatlesXML = Paths.get(inputPath, "beatles.rdf").toString(); String pathRefBeatlesJSON = Paths.get(referencesPath, "rdf.beatles.jsonld").toString(); String pathOutBeatlesJSON = Paths.get(resultPath, "rdf.beatles.jsonld").toString(); @@ -188,12 +182,14 @@ public void testConvertXmlToJsonld() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesJSON, pathOutBeatlesJSON)); + assertEquals(canonicalize(pathRefBeatlesJSON), canonicalize(pathOutBeatlesJSON)); + assertNotEquals("", pathOutBeatlesJSON); + } @Test public void testConvertXmlToTrig() { - String pathInputBeatlesXML = Paths.get(inputFile, "beatles.rdf").toString(); + String pathInputBeatlesXML = Paths.get(inputPath, "beatles.rdf").toString(); String pathRefBeatlesTRIG = Paths.get(referencesPath, "rdf.beatles.trig").toString(); String pathOutBeatlesTRIG = Paths.get(resultPath, "rdf.beatles.trig").toString(); @@ -202,12 +198,14 @@ public void testConvertXmlToTrig() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesTRIG, pathOutBeatlesTRIG)); + assertEquals(canonicalize(pathRefBeatlesTRIG), canonicalize(pathOutBeatlesTRIG)); + assertNotEquals("", pathOutBeatlesTRIG); + } @Test public void testConvertXmlTiTurtle() { - String pathInputBeatlesXML = Paths.get(inputFile, "beatles.rdf").toString(); + String pathInputBeatlesXML = Paths.get(inputPath, "beatles.rdf").toString(); String pathRefBeatlesTTL = Paths.get(referencesPath, "rdf.beatles.ttl").toString(); String pathOutBeatlesTTL = Paths.get(resultPath, "rdf.beatles.ttl").toString(); @@ -216,12 +214,14 @@ public void testConvertXmlTiTurtle() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesTTL, pathOutBeatlesTTL)); + assertEquals(canonicalize(pathRefBeatlesTTL), canonicalize(pathOutBeatlesTTL)); + assertNotEquals("", pathOutBeatlesTTL); + } @Test public void testConvertXmlToNt() { - String pathInputBeatlesXML = Paths.get(inputFile, "beatles.rdf").toString(); + String pathInputBeatlesXML = Paths.get(inputPath, "beatles.rdf").toString(); String pathRefBeatlesNT = Paths.get(referencesPath, "rdf.beatles.nt").toString(); String pathOutBeatlesNT = Paths.get(resultPath, "rdf.beatles.nt").toString(); @@ -230,12 +230,14 @@ public void testConvertXmlToNt() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesNT, pathOutBeatlesNT)); + assertEquals(canonicalize(pathRefBeatlesNT), canonicalize(pathOutBeatlesNT)); + assertNotEquals("", pathOutBeatlesNT); + } @Test public void testConvertXmlToNq() { - String pathInputBeatlesXML = Paths.get(inputFile, "beatles.rdf").toString(); + String pathInputBeatlesXML = Paths.get(inputPath, "beatles.rdf").toString(); String pathRefBeatlesNQ = Paths.get(referencesPath, "rdf.beatles.nq").toString(); String pathOutBeatlesNQ = Paths.get(resultPath, "rdf.beatles.nq").toString(); @@ -244,12 +246,14 @@ public void testConvertXmlToNq() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesNQ, pathOutBeatlesNQ)); + assertEquals(canonicalize(pathRefBeatlesNQ), canonicalize(pathOutBeatlesNQ)); + assertNotEquals("", pathOutBeatlesNQ); + } @Test public void testConvertTrigToXml() { - String pathInputBeatlesTRIG = Paths.get(inputFile, "beatles.trig").toString(); + String pathInputBeatlesTRIG = Paths.get(inputPath, "beatles.trig").toString(); String pathRefBeatlesXML = Paths.get(referencesPath, "trig.beatles.rdf").toString(); String pathOutBeatlesXML = Paths.get(resultPath, "trig.beatles.rdf").toString(); @@ -258,12 +262,14 @@ public void testConvertTrigToXml() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesXML, pathOutBeatlesXML)); + assertEquals(canonicalize(pathRefBeatlesXML), canonicalize(pathOutBeatlesXML)); + assertNotEquals("", pathOutBeatlesXML); + } @Test public void testConvertTrigToJsonld() { - String pathInputBeatlesTRIG = Paths.get(inputFile, "beatles.trig").toString(); + String pathInputBeatlesTRIG = Paths.get(inputPath, "beatles.trig").toString(); String pathRefBeatlesJSON = Paths.get(referencesPath, "trig.beatles.jsonld").toString(); String pathOutBeatlesJSON = Paths.get(resultPath, "trig.beatles.jsonld").toString(); @@ -272,12 +278,14 @@ public void testConvertTrigToJsonld() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesJSON, pathOutBeatlesJSON)); + assertEquals(canonicalize(pathRefBeatlesJSON), canonicalize(pathOutBeatlesJSON)); + assertNotEquals("", pathOutBeatlesJSON); + } @Test public void testConvertTrigToTrig() { - String pathInputBeatlesTRIG = Paths.get(inputFile, "beatles.trig").toString(); + String pathInputBeatlesTRIG = Paths.get(inputPath, "beatles.trig").toString(); String pathExpectBeatlesTRIG = Paths.get(referencesPath, "trig.beatles.trig").toString(); String pathOutBeatlesTRIG = Paths.get(resultPath, "trig.beatles.trig").toString(); @@ -286,12 +294,14 @@ public void testConvertTrigToTrig() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathExpectBeatlesTRIG, pathOutBeatlesTRIG)); + assertEquals(canonicalize(pathExpectBeatlesTRIG), canonicalize(pathOutBeatlesTRIG)); + assertNotEquals("", pathOutBeatlesTRIG); + } @Test public void testConvertTrigToTurtle() { - String pathInputBeatlesTRIG = Paths.get(inputFile, "beatles.trig").toString(); + String pathInputBeatlesTRIG = Paths.get(inputPath, "beatles.trig").toString(); String pathRefBeatlesTTL = Paths.get(referencesPath, "trig.beatles.ttl").toString(); String pathOutBeatlesTTL = Paths.get(resultPath, "trig.beatles.ttl").toString(); @@ -300,12 +310,14 @@ public void testConvertTrigToTurtle() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesTTL, pathOutBeatlesTTL)); + assertEquals(canonicalize(pathRefBeatlesTTL), canonicalize(pathOutBeatlesTTL)); + assertNotEquals("", pathOutBeatlesTTL); + } @Test public void testConvertTrigToNt() { - String pathInputBeatlesTRIG = Paths.get(inputFile, "beatles.trig").toString(); + String pathInputBeatlesTRIG = Paths.get(inputPath, "beatles.trig").toString(); String pathRefBeatlesNT = Paths.get(referencesPath, "trig.beatles.nt").toString(); String pathOutBeatlesNT = Paths.get(resultPath, "trig.beatles.nt").toString(); @@ -314,12 +326,14 @@ public void testConvertTrigToNt() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesNT, pathOutBeatlesNT)); + assertEquals(canonicalize(pathRefBeatlesNT), canonicalize(pathOutBeatlesNT)); + assertNotEquals("", pathOutBeatlesNT); + } @Test public void testConvertTrigToNq() { - String pathInputBeatlesTRIG = Paths.get(inputFile, "beatles.trig").toString(); + String pathInputBeatlesTRIG = Paths.get(inputPath, "beatles.trig").toString(); String pathRefBeatlesNQ = Paths.get(referencesPath, "trig.beatles.nq").toString(); String pathOutBeatlesNQ = Paths.get(resultPath, "trig.beatles.nq").toString(); @@ -328,12 +342,14 @@ public void testConvertTrigToNq() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesNQ, pathOutBeatlesNQ)); + assertEquals(canonicalize(pathRefBeatlesNQ), canonicalize(pathOutBeatlesNQ)); + assertNotEquals("", pathOutBeatlesNQ); + } @Test public void testConvertJsonldToXml() { - String pathInputBeatlesJSONLD = Paths.get(inputFile, "beatles.jsonld").toString(); + String pathInputBeatlesJSONLD = Paths.get(inputPath, "beatles.jsonld").toString(); String pathRefBeatlesXML = Paths.get(referencesPath, "jsonld.beatles.rdf").toString(); String pathOutBeatlesXML = Paths.get(resultPath, "jsonld.beatles.rdf").toString(); @@ -342,12 +358,14 @@ public void testConvertJsonldToXml() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesXML, pathOutBeatlesXML)); + assertEquals(canonicalize(pathRefBeatlesXML), canonicalize(pathOutBeatlesXML)); + assertNotEquals("", pathOutBeatlesXML); + } @Test public void testConvertJsonldToJsonld() { - String pathInputBeatlesJSONLD = Paths.get(inputFile, "beatles.jsonld").toString(); + String pathInputBeatlesJSONLD = Paths.get(inputPath, "beatles.jsonld").toString(); String pathRefBeatlesJSON = Paths.get(referencesPath, "jsonld.beatles.jsonld").toString(); String pathOutBeatlesJSON = Paths.get(resultPath, "jsonld.beatles.jsonld").toString(); @@ -356,12 +374,14 @@ public void testConvertJsonldToJsonld() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesJSON, pathOutBeatlesJSON)); + assertEquals(canonicalize(pathRefBeatlesJSON), canonicalize(pathOutBeatlesJSON)); + assertNotEquals("", pathOutBeatlesJSON); + } @Test public void testConvertJsonldToTrig() { - String pathInputBeatlesJSONLD = Paths.get(inputFile, "beatles.jsonld").toString(); + String pathInputBeatlesJSONLD = Paths.get(inputPath, "beatles.jsonld").toString(); String pathRefBeatlesTRIG = Paths.get(referencesPath, "jsonld.beatles.trig").toString(); String pathOutBeatlesTRIG = Paths.get(resultPath, "jsonld.beatles.trig").toString(); @@ -370,12 +390,14 @@ public void testConvertJsonldToTrig() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesTRIG, pathOutBeatlesTRIG)); + assertEquals(canonicalize(pathRefBeatlesTRIG), canonicalize(pathOutBeatlesTRIG)); + assertNotEquals("", pathOutBeatlesTRIG); + } @Test public void testConvertJsonldToTurtle() { - String pathInputBeatlesJSONLD = Paths.get(inputFile, "beatles.jsonld").toString(); + String pathInputBeatlesJSONLD = Paths.get(inputPath, "beatles.jsonld").toString(); String pathRefBeatlesTTL = Paths.get(referencesPath, "jsonld.beatles.ttl").toString(); String pathOutBeatlesTTL = Paths.get(resultPath, "jsonld.beatles.ttl").toString(); @@ -384,12 +406,14 @@ public void testConvertJsonldToTurtle() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesTTL, pathOutBeatlesTTL)); + assertEquals(canonicalize(pathRefBeatlesTTL), canonicalize(pathOutBeatlesTTL)); + assertNotEquals("", pathOutBeatlesTTL); + } @Test public void testConvertJsonldToNt() { - String pathInputBeatlesJSONLD = Paths.get(inputFile, "beatles.jsonld").toString(); + String pathInputBeatlesJSONLD = Paths.get(inputPath, "beatles.jsonld").toString(); String pathRefBeatlesNT = Paths.get(referencesPath, "jsonld.beatles.nt").toString(); String pathOutBeatlesNT = Paths.get(resultPath, "jsonld.beatles.nt").toString(); @@ -398,12 +422,14 @@ public void testConvertJsonldToNt() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesNT, pathOutBeatlesNT)); + assertEquals(canonicalize(pathRefBeatlesNT), canonicalize(pathOutBeatlesNT)); + assertNotEquals("", pathOutBeatlesNT); + } @Test public void testConvertJsonldToNq() { - String pathInputBeatlesJSONLD = Paths.get(inputFile, "beatles.jsonld").toString(); + String pathInputBeatlesJSONLD = Paths.get(inputPath, "beatles.jsonld").toString(); String pathRefBeatlesNQ = Paths.get(referencesPath, "jsonld.beatles.nq").toString(); String pathOutBeatlesNQ = Paths.get(resultPath, "jsonld.beatles.nq").toString(); @@ -412,12 +438,14 @@ public void testConvertJsonldToNq() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesNQ, pathOutBeatlesNQ)); + assertEquals(canonicalize(pathRefBeatlesNQ), canonicalize(pathOutBeatlesNQ)); + assertNotEquals("", pathOutBeatlesNQ); + } @Test public void testConvertNtToXml() { - String pathInputBeatlesNT = Paths.get(inputFile, "beatles.nt").toString(); + String pathInputBeatlesNT = Paths.get(inputPath, "beatles.nt").toString(); String pathRefBeatlesXML = Paths.get(referencesPath, "nt.beatles.rdf").toString(); String pathOutBeatlesXML = Paths.get(resultPath, "nt.beatles.rdf").toString(); @@ -426,12 +454,14 @@ public void testConvertNtToXml() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesXML, pathOutBeatlesXML)); + assertEquals(canonicalize(pathRefBeatlesXML), canonicalize(pathOutBeatlesXML)); + assertNotEquals("", pathOutBeatlesXML); + } @Test public void testConvertNtToJsonld() { - String pathInputBeatlesNT = Paths.get(inputFile, "beatles.nt").toString(); + String pathInputBeatlesNT = Paths.get(inputPath, "beatles.nt").toString(); String pathRefBeatlesJSON = Paths.get(referencesPath, "nt.beatles.jsonld").toString(); String pathOutBeatlesJSON = Paths.get(resultPath, "nt.beatles.jsonld").toString(); @@ -440,12 +470,14 @@ public void testConvertNtToJsonld() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesJSON, pathOutBeatlesJSON)); + assertEquals(canonicalize(pathRefBeatlesJSON), canonicalize(pathOutBeatlesJSON)); + assertNotEquals("", pathOutBeatlesJSON); + } @Test public void testConvertNtToTrig() { - String pathInputBeatlesNT = Paths.get(inputFile, "beatles.nt").toString(); + String pathInputBeatlesNT = Paths.get(inputPath, "beatles.nt").toString(); String pathRefBeatlesTRIG = Paths.get(referencesPath, "nt.beatles.trig").toString(); String pathOutBeatlesTRIG = Paths.get(resultPath, "nt.beatles.trig").toString(); @@ -454,12 +486,14 @@ public void testConvertNtToTrig() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesTRIG, pathOutBeatlesTRIG)); + assertEquals(canonicalize(pathRefBeatlesTRIG), canonicalize(pathOutBeatlesTRIG)); + assertNotEquals("", pathOutBeatlesTRIG); + } @Test public void testConvertNtToTurtle() { - String pathInputBeatlesNT = Paths.get(inputFile, "beatles.nt").toString(); + String pathInputBeatlesNT = Paths.get(inputPath, "beatles.nt").toString(); String pathRefBeatlesTTL = Paths.get(referencesPath, "nt.beatles.ttl").toString(); String pathOutBeatlesTTL = Paths.get(resultPath, "nt.beatles.ttl").toString(); @@ -468,12 +502,14 @@ public void testConvertNtToTurtle() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesTTL, pathOutBeatlesTTL)); + assertEquals(canonicalize(pathRefBeatlesTTL), canonicalize(pathOutBeatlesTTL)); + assertNotEquals("", pathOutBeatlesTTL); + } @Test public void testConvertNtToNt() { - String pathInputBeatlesNT = Paths.get(inputFile, "beatles.nt").toString(); + String pathInputBeatlesNT = Paths.get(inputPath, "beatles.nt").toString(); String pathRefBeatlesNT = Paths.get(referencesPath, "nt.beatles.nt").toString(); String pathOutBeatlesNT = Paths.get(resultPath, "nt.beatles.nt").toString(); @@ -482,12 +518,14 @@ public void testConvertNtToNt() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesNT, pathOutBeatlesNT)); + assertEquals(canonicalize(pathRefBeatlesNT), canonicalize(pathOutBeatlesNT)); + assertNotEquals("", pathOutBeatlesNT); + } @Test public void testConvertNtToNq() { - String pathInputBeatlesNT = Paths.get(inputFile, "beatles.nt").toString(); + String pathInputBeatlesNT = Paths.get(inputPath, "beatles.nt").toString(); String pathRefBeatlesNQ = Paths.get(referencesPath, "nt.beatles.nq").toString(); String pathOutBeatlesNQ = Paths.get(resultPath, "nt.beatles.nq").toString(); @@ -496,12 +534,14 @@ public void testConvertNtToNq() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesNQ, pathOutBeatlesNQ)); + assertEquals(canonicalize(pathRefBeatlesNQ), canonicalize(pathOutBeatlesNQ)); + assertNotEquals("", pathOutBeatlesNQ); + } @Test public void testConvertNqToXml() { - String pathInputBeatlesNQ = Paths.get(inputFile, "beatles.nq").toString(); + String pathInputBeatlesNQ = Paths.get(inputPath, "beatles.nq").toString(); String pathRefBeatlesXML = Paths.get(referencesPath, "nq.beatles.rdf").toString(); String pathOutBeatlesXML = Paths.get(resultPath, "nq.beatles.rdf").toString(); @@ -510,12 +550,14 @@ public void testConvertNqToXml() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesXML, pathOutBeatlesXML)); + assertEquals(canonicalize(pathRefBeatlesXML), canonicalize(pathOutBeatlesXML)); + assertNotEquals("", pathOutBeatlesXML); + } @Test public void testConvertNqToJsonld() { - String pathInputBeatlesNQ = Paths.get(inputFile, "beatles.nq").toString(); + String pathInputBeatlesNQ = Paths.get(inputPath, "beatles.nq").toString(); String pathRefBeatlesJSON = Paths.get(referencesPath, "nq.beatles.jsonld").toString(); String pathOutBeatlesJSON = Paths.get(resultPath, "nq.beatles.jsonld").toString(); @@ -524,12 +566,14 @@ public void testConvertNqToJsonld() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesJSON, pathOutBeatlesJSON)); + assertEquals(canonicalize(pathRefBeatlesJSON), canonicalize(pathOutBeatlesJSON)); + assertNotEquals("", pathOutBeatlesJSON); + } @Test public void testConvertNqToTrig() { - String pathInputBeatlesNQ = Paths.get(inputFile, "beatles.nq").toString(); + String pathInputBeatlesNQ = Paths.get(inputPath, "beatles.nq").toString(); String pathRefBeatlesTRIG = Paths.get(referencesPath, "nq.beatles.trig").toString(); String pathOutBeatlesTRIG = Paths.get(resultPath, "nq.beatles.trig").toString(); @@ -538,12 +582,14 @@ public void testConvertNqToTrig() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesTRIG, pathOutBeatlesTRIG)); + assertEquals(canonicalize(pathRefBeatlesTRIG), canonicalize(pathOutBeatlesTRIG)); + assertNotEquals("", pathOutBeatlesTRIG); + } @Test public void testConvertNqToTurtle() { - String pathInputBeatlesNQ = Paths.get(inputFile, "beatles.nq").toString(); + String pathInputBeatlesNQ = Paths.get(inputPath, "beatles.nq").toString(); String pathRefBeatlesTTL = Paths.get(referencesPath, "nq.beatles.ttl").toString(); String pathOutBeatlesTTL = Paths.get(resultPath, "nq.beatles.ttl").toString(); @@ -552,12 +598,14 @@ public void testConvertNqToTurtle() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesTTL, pathOutBeatlesTTL)); + assertEquals(canonicalize(pathRefBeatlesTTL), canonicalize(pathOutBeatlesTTL)); + assertNotEquals("", pathOutBeatlesTTL); + } @Test public void testConvertNqToNt() { - String pathInputBeatlesNQ = Paths.get(inputFile, "beatles.nq").toString(); + String pathInputBeatlesNQ = Paths.get(inputPath, "beatles.nq").toString(); String pathRefBeatlesNT = Paths.get(referencesPath, "nq.beatles.nt").toString(); String pathOutBeatlesNT = Paths.get(resultPath, "nq.beatles.nt").toString(); @@ -566,12 +614,14 @@ public void testConvertNqToNt() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesNT, pathOutBeatlesNT)); + assertEquals(canonicalize(pathRefBeatlesNT), canonicalize(pathOutBeatlesNT)); + assertNotEquals("", pathOutBeatlesNT); + } @Test public void testConvertNqToNq() { - String pathInputBeatlesNQ = Paths.get(inputFile, "beatles.nq").toString(); + String pathInputBeatlesNQ = Paths.get(inputPath, "beatles.nq").toString(); String pathRefBeatlesNQ = Paths.get(referencesPath, "nq.beatles.nq").toString(); String pathOutBeatlesNQ = Paths.get(resultPath, "nq.beatles.nq").toString(); @@ -580,12 +630,14 @@ public void testConvertNqToNq() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesNQ, pathOutBeatlesNQ)); + assertEquals(canonicalize(pathRefBeatlesNQ), canonicalize(pathOutBeatlesNQ)); + assertNotEquals("", pathOutBeatlesNQ); + } @Test public void testConvertRdfaToXml() { - String pathInputStringHtml = Paths.get(inputFile, "beatles.html").toString(); + String pathInputStringHtml = Paths.get(inputPath, "beatles.html").toString(); String pathRefBeatlesXML = Paths.get(referencesPath, "html.beatles.rdf").toString(); String pathOutBeatlesXML = Paths.get(resultPath, "html.beatles.rdf").toString(); @@ -594,12 +646,14 @@ public void testConvertRdfaToXml() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesXML, pathOutBeatlesXML)); + assertEquals(canonicalize(pathRefBeatlesXML), canonicalize(pathOutBeatlesXML)); + assertNotEquals("", pathOutBeatlesXML); + } @Test public void testConvertRdfaToJsonld() { - String pathInputStringHtml = Paths.get(inputFile, "beatles.html").toString(); + String pathInputStringHtml = Paths.get(inputPath, "beatles.html").toString(); String pathRefBeatlesJSON = Paths.get(referencesPath, "html.beatles.jsonld").toString(); String pathOutBeatlesJSON = Paths.get(resultPath, "html.beatles.jsonld").toString(); @@ -608,12 +662,14 @@ public void testConvertRdfaToJsonld() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesJSON, pathOutBeatlesJSON)); + assertEquals(canonicalize(pathRefBeatlesJSON), canonicalize(pathOutBeatlesJSON)); + assertNotEquals("", pathOutBeatlesJSON); + } @Test public void testConvertRdfaToTrig() { - String pathInputStringHtml = Paths.get(inputFile, "beatles.html").toString(); + String pathInputStringHtml = Paths.get(inputPath, "beatles.html").toString(); String pathRefBeatlesTRIG = Paths.get(referencesPath, "html.beatles.trig").toString(); String pathOutBeatlesTRIG = Paths.get(resultPath, "html.beatles.trig").toString(); @@ -622,12 +678,14 @@ public void testConvertRdfaToTrig() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesTRIG, pathOutBeatlesTRIG)); + assertEquals(canonicalize(pathRefBeatlesTRIG), canonicalize(pathOutBeatlesTRIG)); + assertNotEquals("", pathOutBeatlesTRIG); + } @Test public void testConvertRdfaToTurtle() { - String pathInputStringHtml = Paths.get(inputFile, "beatles.html").toString(); + String pathInputStringHtml = Paths.get(inputPath, "beatles.html").toString(); String pathRefBeatlesTTL = Paths.get(referencesPath, "html.beatles.ttl").toString(); String pathOutBeatlesTTL = Paths.get(resultPath, "html.beatles.ttl").toString(); @@ -636,12 +694,14 @@ public void testConvertRdfaToTurtle() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesTTL, pathOutBeatlesTTL)); + assertEquals(canonicalize(pathRefBeatlesTTL), canonicalize(pathOutBeatlesTTL)); + assertNotEquals("", pathOutBeatlesTTL); + } @Test public void testConvertRdfaToNt() { - String pathInputStringHtml = Paths.get(inputFile, "beatles.html").toString(); + String pathInputStringHtml = Paths.get(inputPath, "beatles.html").toString(); String pathRefBeatlesNT = Paths.get(referencesPath, "html.beatles.nt").toString(); String pathOutBeatlesNT = Paths.get(resultPath, "html.beatles.nt").toString(); @@ -650,12 +710,14 @@ public void testConvertRdfaToNt() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesNT, pathOutBeatlesNT)); + assertEquals(canonicalize(pathRefBeatlesNT), canonicalize(pathOutBeatlesNT)); + assertNotEquals("", pathOutBeatlesNT); + } @Test public void testConvertRdfaToNq() { - String pathInputStringHtml = Paths.get(inputFile, "beatles.html").toString(); + String pathInputStringHtml = Paths.get(inputPath, "beatles.html").toString(); String pathRefBeatlesNQ = Paths.get(referencesPath, "html.beatles.nq").toString(); String pathOutBeatlesNQ = Paths.get(resultPath, "html.beatles.nq").toString(); @@ -664,7 +726,9 @@ public void testConvertRdfaToNq() { assertEquals(0, exitCode); assertEquals(out.toString(), ""); assertEquals(err.toString(), ""); - assertTrue(compareFiles(pathRefBeatlesNQ, pathOutBeatlesNQ)); + assertEquals(canonicalize(pathRefBeatlesNQ), canonicalize(pathOutBeatlesNQ)); + assertNotEquals("", pathOutBeatlesNQ); + } @Test @@ -673,7 +737,7 @@ public void testConvertWithSameInputAndOutputPath() { int exitCode = cmd.execute("-i", inputPath, "-of", "TURTLE", "-o", inputPath); assertEquals(1, exitCode); assertEquals(out.toString(), ""); - assertTrue(err.toString().trim().contains("Input path cannot be the same as output path.")); + assertTrue(err.toString().trim().contains("Input path cannot be same as output path")); } @Test @@ -703,7 +767,8 @@ public void testGraphUtilsLoadWithInvalidFormat() { Path inputPath = Paths.get(referencesPath, "beatles.ttl"); try { - RdfDataLoader.loadFromFile(inputPath, EnumInputFormat.JSONLD, Graph.create(), null, false); + RdfDataLoader loader = new RdfDataLoader(null, false); + loader.load(new String[] { inputPath.toString() }, EnumRdfInputFormat.JSONLD, false); fail("Expected an IllegalArgumentException to be thrown"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Failed to open RDF data file:")); diff --git a/corese-command/src/test/java/fr/inria/corese/command/programs/RemoteSparqlTest.java b/corese-command/src/test/java/fr/inria/corese/command/programs/RemoteSparqlTest.java new file mode 100644 index 0000000000..a64c40325f --- /dev/null +++ b/corese-command/src/test/java/fr/inria/corese/command/programs/RemoteSparqlTest.java @@ -0,0 +1,353 @@ +package fr.inria.corese.command.programs; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; +import static com.github.tomakehurst.wiremock.client.WireMock.exactly; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.getRequestedFor; +import static com.github.tomakehurst.wiremock.client.WireMock.post; +import static com.github.tomakehurst.wiremock.client.WireMock.postRequestedFor; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo; +import static com.github.tomakehurst.wiremock.client.WireMock.verify; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.io.PrintWriter; +import java.io.StringWriter; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.github.tomakehurst.wiremock.WireMockServer; +import com.github.tomakehurst.wiremock.core.WireMockConfiguration; + +import picocli.CommandLine; + +public class RemoteSparqlTest { + + // Picocli objects + private RemoteSparql convert = new RemoteSparql(); + private CommandLine cmd = new CommandLine(convert); + + private StringWriter out = new StringWriter(); + private StringWriter err = new StringWriter(); + + // WireMock objects + private static WireMockServer wireMockServer; + + // Server informations + private final String serverUrl = "http://localhost:8080/sparql"; + private final String graphUri = "http://example.orgraphUrig/graph"; + // Query + private static final String querySelect = "SELECT * WHERE { ?s ?p ?o }"; + + //////////////// + // Before All // + //////////////// + + @BeforeEach + private void initializePicoCli() { + PrintWriter out = new PrintWriter(this.out); + PrintWriter err = new PrintWriter(this.err); + cmd.setOut(out); + cmd.setErr(err); + } + + @BeforeAll + private static void initializeWireMockServer() { + wireMockServer = new WireMockServer(WireMockConfiguration.options().port(8080)); + + wireMockServer.start(); + + // Get + wireMockServer.stubFor(get(urlPathEqualTo("/sparql")) + .withQueryParam("query", equalTo(querySelect)) + .willReturn(aResponse() + .withStatus(200) + .withBody("this is a fake response"))); + + // Post-UrlEncoded + wireMockServer.stubFor(post(urlPathEqualTo("/sparql")) + .withHeader("Content-Type", equalTo("application/x-www-form-urlencoded")) + .willReturn(aResponse() + .withStatus(200) + .withBody("this is a fake response"))); + + // Post-Direct + wireMockServer.stubFor(post(urlPathEqualTo("/sparql")) + .withHeader("Content-Type", equalTo("application/sparql-query")) + .willReturn(aResponse() + .withStatus(200) + .withBody("this is a fake response"))); + + } + + /////////////// + // After All // + /////////////// + + @AfterEach + private void resetStreams() { + wireMockServer.resetRequests(); + out.getBuffer().setLength(0); + err.getBuffer().setLength(0); + } + + @AfterAll + private static void tearDown() { + wireMockServer.stop(); + wireMockServer.shutdown(); + } + + /////////// + // Utils // + /////////// + + private static String encode(String value) { + try { + // Encode the value using URLEncoder + String encodedValue = URLEncoder.encode(value, "UTF-8"); + // Replace '+' with '%20' + return encodedValue.replace("+", "%20"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("UTF-8 encoding is not supported", e); + } + } + + //////////////// + // Test Cases // + //////////////// + + // Query via get + + @Test + public void getQueryTest() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "get" }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), getRequestedFor( + urlPathEqualTo("/sparql")) + .withQueryParam("query", equalTo(querySelect)) + .withHeader("Accept", equalTo("text/csv"))); + } + + @Test + public void getQueryTestDefaultGraphUri() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "get", "-d", graphUri }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), getRequestedFor( + urlPathEqualTo("/sparql")) + .withQueryParam("query", equalTo(querySelect)) + .withQueryParam("default-graph-uri", equalTo(graphUri)) + .withHeader("Accept", equalTo("text/csv"))); + } + + @Test + public void getQueryTestNamedGraphUri() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "get", "-n", graphUri }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), getRequestedFor( + urlPathEqualTo("/sparql")) + .withQueryParam("query", equalTo(querySelect)) + .withQueryParam("named-graph-uri", equalTo(graphUri)) + .withHeader("Accept", equalTo("text/csv"))); + } + + @Test + public void getQueryTestAcceptHeader() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "get", "-a", "application/json" }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), getRequestedFor( + urlPathEqualTo("/sparql")) + .withQueryParam("query", equalTo(querySelect)) + .withHeader("Accept", equalTo("application/json"))); + } + + @Test + public void getQueryTestMultipleHeaders() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "get", "-H", "Accept: application/json", + "-H", "Authorization: Bearer 1234" }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), getRequestedFor( + urlPathEqualTo("/sparql")) + .withQueryParam("query", equalTo(querySelect)) + .withHeader("Accept", equalTo("application/json")) + .withHeader("Authorization", equalTo("Bearer 1234"))); + } + + // Query via POST URL-Encoded + + @Test + public void postQueryUrlEncodedQueryTest() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "post-urlencoded" }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), postRequestedFor( + urlPathEqualTo("/sparql")) + .withHeader("Content-Type", equalTo("application/x-www-form-urlencoded")) + .withRequestBody(equalTo("query=" + encode(querySelect))) + .withHeader("Accept", equalTo("text/csv"))); + } + + @Test + public void postQueryUrlEncodedQueryTestDefaultGraphUri() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "post-urlencoded", "-d", + graphUri }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), postRequestedFor( + urlPathEqualTo("/sparql")) + .withHeader("Content-Type", equalTo("application/x-www-form-urlencoded")) + .withRequestBody(equalTo("query=" + encode(querySelect) + + "&default-graph-uri=" + encode(graphUri))) + .withHeader("Accept", equalTo("text/csv"))); + } + + @Test + public void postQueryUrlEncodedQueryTestNamedGraphUri() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "post-urlencoded", "-n", + graphUri }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), postRequestedFor( + urlPathEqualTo("/sparql")) + .withHeader("Content-Type", equalTo("application/x-www-form-urlencoded")) + .withRequestBody(equalTo("query=" + encode(querySelect) + + "&named-graph-uri=" + encode(graphUri))) + .withHeader("Accept", equalTo("text/csv"))); + } + + @Test + public void postQueryUrlEncodedQueryTestAcceptHeader() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "post-urlencoded", "-a", + "application/json" }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), postRequestedFor( + urlPathEqualTo("/sparql")) + .withHeader("Content-Type", equalTo("application/x-www-form-urlencoded")) + .withRequestBody(equalTo("query=" + encode(querySelect))) + .withHeader("Accept", equalTo("application/json"))); + } + + @Test + public void postQueryUrlEncodedQueryTestMultipleHeaders() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "post-urlencoded", "-H", + "Accept: application/json", "-H", "Authorization: Bearer 1234" }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), postRequestedFor( + urlPathEqualTo("/sparql")) + .withHeader("Content-Type", equalTo("application/x-www-form-urlencoded")) + .withRequestBody(equalTo("query=" + encode(querySelect))) + .withHeader("Accept", equalTo("application/json")) + .withHeader("Authorization", equalTo("Bearer 1234"))); + } + + // Query via POST Directly + + @Test + public void postQueryDirectQueryTest() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "post-direct" }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), postRequestedFor( + urlPathEqualTo("/sparql")) + .withHeader("Content-Type", equalTo("application/sparql-query")) + .withRequestBody(equalTo(querySelect)) + .withHeader("Accept", equalTo("text/csv"))); + } + + @Test + public void postQueryDirectQueryTestDefaultGraphUri() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "post-direct", "-d", + graphUri }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), postRequestedFor( + urlPathEqualTo("/sparql")) + .withHeader("Content-Type", equalTo("application/sparql-query")) + .withRequestBody(equalTo(querySelect)) + .withQueryParam("default-graph-uri", equalTo(graphUri)) + .withHeader("Accept", equalTo("text/csv"))); + } + + @Test + public void postQueryDirectQueryTestNamedGraphUri() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "post-direct", "-n", + graphUri }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), postRequestedFor( + urlPathEqualTo("/sparql")) + .withHeader("Content-Type", equalTo("application/sparql-query")) + .withRequestBody(equalTo(querySelect)) + .withQueryParam("named-graph-uri", equalTo(graphUri)) + .withHeader("Accept", equalTo("text/csv"))); + } + + @Test + public void postQueryDirectQueryTestAcceptHeader() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "post-direct", "-a", + "application/json" }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), postRequestedFor( + urlPathEqualTo("/sparql")) + .withHeader("Content-Type", equalTo("application/sparql-query")) + .withRequestBody(equalTo(querySelect)) + .withHeader("Accept", equalTo("application/json"))); + } + + @Test + public void postQueryDirectQueryTestMultipleHeaders() { + String[] args = { "-e", serverUrl, "-q", querySelect, "-m", "post-direct", "-H", + "Accept: application/json", "-H", "Authorization: Bearer 1234" }; + int exitCode = cmd.execute(args); + + // Asserts + assertEquals(0, exitCode); + verify(exactly(1), postRequestedFor( + urlPathEqualTo("/sparql")) + .withHeader("Content-Type", equalTo("application/sparql-query")) + .withRequestBody(equalTo(querySelect)) + .withHeader("Accept", equalTo("application/json")) + .withHeader("Authorization", equalTo("Bearer 1234"))); + } +} diff --git a/corese-command/src/test/java/fr/inria/corese/command/programs/ShaclTest.java b/corese-command/src/test/java/fr/inria/corese/command/programs/ShaclTest.java index 7be8fbc694..cde1893eaa 100644 --- a/corese-command/src/test/java/fr/inria/corese/command/programs/ShaclTest.java +++ b/corese-command/src/test/java/fr/inria/corese/command/programs/ShaclTest.java @@ -1,20 +1,23 @@ package fr.inria.corese.command.programs; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import java.io.ByteArrayInputStream; import java.io.IOException; +import java.io.InputStream; import java.io.PrintWriter; import java.io.StringWriter; -import java.nio.file.Files; import java.nio.file.Paths; -import java.util.Arrays; import java.util.regex.Pattern; -import java.util.stream.Collectors; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.load.Load; +import fr.inria.corese.core.print.CanonicalRdf10Format; import picocli.CommandLine; public class ShaclTest { @@ -38,40 +41,58 @@ public class ShaclTest { .getPath(); private static final String UUID_REGEX = ""; - private static final String BLANK_NODE_REGEX = "_:(b|bb)\\d+"; + private static final String NEUTRAL_UUID = ""; - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp() { PrintWriter out = new PrintWriter(this.out); PrintWriter err = new PrintWriter(this.err); cmd.setOut(out); cmd.setErr(err); } - public boolean compareFiles(String filePath1, String filePath2) throws IOException { - String content1 = new String(Files.readAllBytes(Paths.get(filePath1))); - String content2 = new String(Files.readAllBytes(Paths.get(filePath2))); + public boolean compareFiles(String filePath1, String filePath2, int format) throws IOException { + // Get content of files + String content1 = getStringContent(filePath1); + String content2 = getStringContent(filePath2); - String normalizedContent1 = sort(trimLines(removeUUIDsAndBlankNodes(content1))); - String normalizedContent2 = sort(trimLines(removeUUIDsAndBlankNodes(content2))); + // Remove UUIDs and Blank Nodes + String clearContent1 = maskUUIDs(content1); + String clearContent2 = maskUUIDs(content2); - return normalizedContent1.equals(normalizedContent2); - } + // Canonicalize RDF content + String canonicallFile1 = canonicalize(clearContent1, format); + String canonicallFile2 = canonicalize(clearContent2, format); - private String sort(String content) { - String[] lines = content.split("\n"); - Arrays.sort(lines); - return Arrays.stream(lines).collect(Collectors.joining("\n")); + return canonicallFile1.equals(canonicallFile2); } - private String removeUUIDsAndBlankNodes(String content) { - content = Pattern.compile(UUID_REGEX).matcher(content).replaceAll(""); - content = Pattern.compile(BLANK_NODE_REGEX).matcher(content).replaceAll(""); + private String maskUUIDs(String content) { + content = Pattern.compile(UUID_REGEX).matcher(content).replaceAll(NEUTRAL_UUID); return content; } - private String trimLines(String content) { - return Arrays.stream(content.split("\n")).map(String::trim).collect(Collectors.joining("\n")); + private String getStringContent(String path) throws IOException { + return new String(java.nio.file.Files.readAllBytes(Paths.get(path))); + } + + private String canonicalize(String content, int format) { + + // Content String to Input Stream + InputStream is = new ByteArrayInputStream(content.getBytes()); + + // Load RDF content into a Graph + Graph graph = Graph.create(); + Load ld = Load.create(graph); + + try { + ld.parse(is, format); + } catch (Exception e) { + e.printStackTrace(); + } + + // Return Canonical RDF content + return CanonicalRdf10Format.create(graph).toString(); } @Test @@ -91,7 +112,8 @@ public void test1RDF1SHACLBeatlesOk() throws IOException { assertEquals(0, exitCode); assertEquals("", this.out.toString()); assertEquals("", this.err.toString()); - assertTrue(this.compareFiles(expected, result)); + assertTrue(this.compareFiles(expected, result, Load.TURTLE_FORMAT)); + assertNotEquals("", result); } @Test @@ -111,7 +133,8 @@ public void test1RDF1SHACLBeatlesErr() throws IOException { assertEquals(0, exitCode); assertEquals("", this.out.toString()); assertEquals("", this.err.toString()); - assertTrue(this.compareFiles(expected, result)); + assertTrue(this.compareFiles(expected, result, Load.TURTLE_FORMAT)); + assertNotEquals("", result); } @Test @@ -136,7 +159,8 @@ public void test2RDF2SHACLBeatlesOk() throws IOException { assertEquals(0, exitCode); assertEquals("", this.out.toString()); assertEquals("", this.err.toString()); - assertTrue(this.compareFiles(expected, result)); + assertTrue(this.compareFiles(expected, result, Load.TURTLE_FORMAT)); + assertNotEquals("", result); } @Test @@ -161,7 +185,8 @@ public void test2RDF2SHACLBeatlesErr() throws IOException { assertEquals(0, exitCode); assertEquals("", this.out.toString()); assertEquals("", this.err.toString()); - assertTrue(this.compareFiles(expected, result)); + assertTrue(this.compareFiles(expected, result, Load.TURTLE_FORMAT)); + assertNotEquals("", result); } @Test @@ -181,7 +206,8 @@ public void test1RDFUrl1SHACLBeatlesOk() throws IOException { assertEquals(0, exitCode); assertEquals("", this.out.toString()); assertEquals("", this.err.toString()); - assertTrue(this.compareFiles(expected, result)); + assertTrue(this.compareFiles(expected, result, Load.TURTLE_FORMAT)); + assertNotEquals("", result); } @Test @@ -201,7 +227,8 @@ public void test1RDF1SHACLUrlBeatlesOk() throws IOException { assertEquals(0, exitCode); assertEquals("", this.out.toString()); assertEquals("", this.err.toString()); - assertTrue(this.compareFiles(expected, result)); + assertTrue(this.compareFiles(expected, result, Load.TURTLE_FORMAT)); + assertNotEquals("", result); } @Test @@ -221,7 +248,8 @@ public void testRDFSHACLDirectoryBeatlesErr() throws IOException { assertEquals(0, exitCode); assertEquals("", this.out.toString()); assertEquals("", this.err.toString()); - assertTrue(this.compareFiles(expected, result)); + assertTrue(this.compareFiles(expected, result, Load.TURTLE_FORMAT)); + assertNotEquals("", result); } @Test @@ -244,7 +272,8 @@ public void test1RDF1SHACLBeatlesOkrdf() throws IOException { assertEquals(0, exitCode); assertEquals("", this.out.toString()); assertEquals("", this.err.toString()); - assertTrue(this.compareFiles(expected, result)); + assertTrue(this.compareFiles(expected, result, Load.RDFXML_FORMAT)); + assertNotEquals("", result); } @Test @@ -267,7 +296,8 @@ public void test1RDF1SHACLBeatlesOkjsonld() throws IOException { assertEquals(0, exitCode); assertEquals("", this.out.toString()); assertEquals("", this.err.toString()); - assertTrue(this.compareFiles(expected, result)); + assertTrue(this.compareFiles(expected, result, Load.JSONLD_FORMAT)); + assertNotEquals("", result); } @Test @@ -288,7 +318,8 @@ public void testRDFSHACLDirectoryRecursiveBeatlesErr() throws IOException { assertEquals(0, exitCode); assertEquals("", this.out.toString()); assertEquals("", this.err.toString()); - assertTrue(this.compareFiles(expected, result)); + assertTrue(this.compareFiles(expected, result, Load.TURTLE_FORMAT)); + assertNotEquals("", result); } } diff --git a/corese-command/src/test/java/fr/inria/corese/command/programs/SparqlTest.java b/corese-command/src/test/java/fr/inria/corese/command/programs/SparqlTest.java index 5a4f4ce6d3..d6cb9e628d 100644 --- a/corese-command/src/test/java/fr/inria/corese/command/programs/SparqlTest.java +++ b/corese-command/src/test/java/fr/inria/corese/command/programs/SparqlTest.java @@ -1,20 +1,20 @@ package fr.inria.corese.command.programs; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; -import java.io.BufferedReader; -import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.nio.file.Paths; -import java.util.HashSet; -import java.util.Set; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.load.Load; +import fr.inria.corese.core.print.CanonicalRdf10Format; import picocli.CommandLine; public class SparqlTest { @@ -35,37 +35,36 @@ public class SparqlTest { private String queriesPath = SparqlTest.class.getResource("/fr/inria/corese/command/programs/sparql/queries/") .getPath(); - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp() { PrintWriter out = new PrintWriter(this.out); PrintWriter err = new PrintWriter(this.err); cmd.setOut(out); cmd.setErr(err); } - private boolean compareFiles(String filePath1, String filePath2) throws IOException { - // Créer deux sets pour stocker les lignes de chaque fichier - Set file1Lines = new HashSet<>(); - Set file2Lines = new HashSet<>(); + public boolean compareFiles(String filePath1, String filePath2) throws IOException { + // Canonicalize RDF content + String canonicallFile1 = canonicalize(filePath1); + String canonicallFile2 = canonicalize(filePath2); - // Lire le premier fichier et stocker chaque ligne dans le set - try (BufferedReader reader = new BufferedReader(new FileReader(filePath1))) { - String line; - while ((line = reader.readLine()) != null) { - file1Lines.add(line); - } - } + return canonicallFile1.equals(canonicallFile2); + } + + private String canonicalize(String filePath) { + + // Load RDF content into a Graph + Graph graph = Graph.create(); + Load ld = Load.create(graph); - // Lire le deuxième fichier et stocker chaque ligne dans le set - try (BufferedReader reader = new BufferedReader(new FileReader(filePath2))) { - String line; - while ((line = reader.readLine()) != null) { - file2Lines.add(line); - } + try { + ld.parse(filePath, ""); + } catch (Exception e) { + e.printStackTrace(); } - // Vérifier que les deux sets sont identiques - return file1Lines.equals(file2Lines); + // Return Canonical RDF content + return CanonicalRdf10Format.create(graph).toString(); } @Test @@ -84,6 +83,7 @@ public void testSelectRdfxmlInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -102,6 +102,7 @@ public void testSelectTurtleInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -120,6 +121,7 @@ public void testSelectTriginvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -138,6 +140,7 @@ public void testSelectJsonldInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -253,6 +256,7 @@ public void testAskTrueRdfxmlInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @@ -272,6 +276,7 @@ public void testAskFalseRdfxmlInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -290,6 +295,7 @@ public void testAskTrueTurtleInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -308,6 +314,7 @@ public void testAskFalseTurtleInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -325,6 +332,7 @@ public void testAskTrigTrueInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -343,6 +351,7 @@ public void testAskTrigFalseInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -361,6 +370,7 @@ public void testAskTrueJsonldInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -379,6 +389,7 @@ public void testAskFalseJsonldInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -650,6 +661,7 @@ public void testInsertBidingXmlInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -669,6 +681,7 @@ public void testInsertBidingJsonInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -688,6 +701,7 @@ public void testInsertBidingCsvInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -707,6 +721,7 @@ public void testInsertBidingTsvInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -725,6 +740,7 @@ public void testInsertBidingMarkdownInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -820,6 +836,7 @@ public void testInsertWhereBidingxmlInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -839,6 +856,7 @@ public void testInsertWhereBidingjsonInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -856,6 +874,7 @@ public void testInsertWhereBidingCsvInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -873,6 +892,7 @@ public void testInsertWhereBidingTsvInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -890,6 +910,7 @@ public void testInsertWhereBidingMarkdownInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -976,6 +997,7 @@ public void testDeleteBidingXmlInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -993,6 +1015,7 @@ public void testDeleteBidingJsonInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1010,6 +1033,7 @@ public void testDeleteBidingCsvInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1027,6 +1051,7 @@ public void testDeleteBidingTsvInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1044,6 +1069,7 @@ public void testDeleteBidingMarkdownInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1138,6 +1164,7 @@ public void testDeleteWhereBidingXmlInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1155,6 +1182,7 @@ public void testDeleteWhereBidingJsonInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1172,6 +1200,7 @@ public void testDeleteWhereBidingCsvInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1189,6 +1218,7 @@ public void testDeleteWhereBidingTsvInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1206,6 +1236,7 @@ public void testDeleteWhereBidingMarkdownInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1295,6 +1326,7 @@ public void testBidingConstructXmlInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1312,6 +1344,7 @@ public void testBidingConstructJsonInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1328,6 +1361,7 @@ public void testBidingConstructCsvInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1344,6 +1378,7 @@ public void testBidingConstructTsvInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1360,6 +1395,7 @@ public void testBidingConstructMarkdownInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1448,6 +1484,7 @@ public void testBidingDescribeXmlInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1464,6 +1501,7 @@ public void testBidingDescribeJsonInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1480,6 +1518,7 @@ public void testBidingDescribeCsvInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1496,6 +1535,7 @@ public void testBidingDescribeTsvInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test @@ -1512,6 +1552,7 @@ public void testBidingDescribeMarkdownInvalid() throws IOException { assertEquals(1, exitCode); assertEquals("", out.toString()); assertTrue(actualOutput.contains(expectedOutput)); + assertNotEquals("", actualOutput); } @Test diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/input/beatles.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/input/beatles.ttl new file mode 100644 index 0000000000..e25763992a --- /dev/null +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/input/beatles.ttl @@ -0,0 +1,42 @@ +@prefix xsd: . +@prefix ns1: . +@prefix rdf: . + + + ns1:length 125 ; +ns1:name "Love Me Do" ; +ns1:writer ns1:John_Lennon ; +ns1:writer ns1:Paul_McCartney ; +rdf:type ns1:Song . + +ns1:The_Beatles ns1:member ns1:John_Lennon ; +ns1:member ns1:Paul_McCartney ; +ns1:member ns1:Ringo_Starr ; +ns1:member ns1:George_Harrison ; +ns1:name "The Beatles" ; +rdf:type ns1:Band . + +ns1:Please_Please_Me ns1:artist ns1:The_Beatles ; +ns1:date "1963-03-22"^^xsd:date ; +ns1:name "Please Please Me" ; +ns1:track ns1:Love_Me_Do ; +rdf:type ns1:Album . + +ns1:George_Harrison rdf:type ns1:SoloArtist . + +ns1:Ringo_Starr rdf:type ns1:SoloArtist . + +ns1:John_Lennon rdf:type ns1:SoloArtist . + +ns1:Paul_McCartney rdf:type ns1:SoloArtist . + +ns1:McCartney ns1:artist ns1:Paul_McCartney ; +ns1:date "1970-04-17"^^xsd:date ; +ns1:name "McCartney" ; +rdf:type ns1:Album . + +ns1:Imagine ns1:artist ns1:John_Lennon ; +ns1:date "1971-10-11"^^xsd:date ; +ns1:name "Imagine" ; +rdf:type ns1:Album . + diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/input/recursive-level1/person.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/input/recursive-level1/person.ttl new file mode 100644 index 0000000000..5eff228bbf --- /dev/null +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/input/recursive-level1/person.ttl @@ -0,0 +1,23 @@ +@prefix ex: . +@prefix rdf: . + +ex:Alice ex:ssn "987-65-4323" ; + ex:worksFor ex:Haribo, ex:KitKat ; + rdf:type ex:Person . + +ex:Bob ex:ssn "124-35-6789" ; + ex:worksFor ex:Twitch ; + rdf:type ex:Person . + +ex:Calvin ex:ssn "648-67-6545" ; + ex:worksFor ex:UntypedCompany ; + rdf:type ex:Person . + +ex:Haribo rdf:type ex:Company . + +ex:KitKat rdf:type ex:Company . + +ex:Twitch rdf:type ex:Company . + +ex:UntypedCompany rdf:type ex:Company . + diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/references/beatles-sha384.nq b/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/references/beatles-sha384.nq new file mode 100644 index 0000000000..978a006b31 --- /dev/null +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/references/beatles-sha384.nq @@ -0,0 +1,28 @@ + . + . + "1971-10-11"^^ . + "Imagine" . + . + . + "125"^^ . + "Love Me Do" . + . + . + . + . + "1970-04-17"^^ . + "McCartney" . + . + . + . + "1963-03-22"^^ . + "Please Please Me" . + . + . + . + . + . + . + . + "The Beatles" . + . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/references/beatles.nq b/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/references/beatles.nq new file mode 100644 index 0000000000..978a006b31 --- /dev/null +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/references/beatles.nq @@ -0,0 +1,28 @@ + . + . + "1971-10-11"^^ . + "Imagine" . + . + . + "125"^^ . + "Love Me Do" . + . + . + . + . + "1970-04-17"^^ . + "McCartney" . + . + . + . + "1963-03-22"^^ . + "Please Please Me" . + . + . + . + . + . + . + . + "The Beatles" . + . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/references/multiple.nq b/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/references/multiple.nq new file mode 100644 index 0000000000..eedd1da8fe --- /dev/null +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/references/multiple.nq @@ -0,0 +1,42 @@ + . + . + "1971-10-11"^^ . + "Imagine" . + . + . + "125"^^ . + "Love Me Do" . + . + . + . + . + "1970-04-17"^^ . + "McCartney" . + . + . + . + "1963-03-22"^^ . + "Please Please Me" . + . + . + . + . + . + . + . + "The Beatles" . + . + "987-65-4323" . + . + . + . + "124-35-6789" . + . + . + "648-67-6545" . + . + . + . + . + . + . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/references/recursive.nq b/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/references/recursive.nq new file mode 100644 index 0000000000..eedd1da8fe --- /dev/null +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/references/recursive.nq @@ -0,0 +1,42 @@ + . + . + "1971-10-11"^^ . + "Imagine" . + . + . + "125"^^ . + "Love Me Do" . + . + . + . + . + "1970-04-17"^^ . + "McCartney" . + . + . + . + "1963-03-22"^^ . + "Please Please Me" . + . + . + . + . + . + . + . + "The Beatles" . + . + "987-65-4323" . + . + . + . + "124-35-6789" . + . + . + "648-67-6545" . + . + . + . + . + . + . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/results/output.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/canonicalize/results/output.ttl new file mode 100644 index 0000000000..e69de29bb2 diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/Recursive2/Recursive3/beatles-err.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/Recursive2/Recursive3/beatles-err.ttl index 6bd1313a88..7d91221bae 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/Recursive2/Recursive3/beatles-err.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/Recursive2/Recursive3/beatles-err.ttl @@ -1,5 +1,5 @@ -PREFIX : +PREFIX : PREFIX rdf: PREFIX xsd: diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/Recursive2/Recursive3/beatles-ok.jsonld b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/Recursive2/Recursive3/beatles-ok.jsonld index 4a161655da..cade87c96b 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/Recursive2/Recursive3/beatles-ok.jsonld +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/Recursive2/Recursive3/beatles-ok.jsonld @@ -1,68 +1,68 @@ { "@context": { "xsd": "http://www.w3.org/2001/XMLSchema#", - "ns1": "http://stardog.com/tutorial/" + "ns1": "http://example.com/" }, "@graph": [ { - "@id": "http://stardog.com/tutorial/George_Harrison", - "@type": "http://stardog.com/tutorial/SoloArtist" + "@id": "http://example.com/George_Harrison", + "@type": "http://example.com/SoloArtist" }, { - "@id": "http://stardog.com/tutorial/Imagine", - "@type": "http://stardog.com/tutorial/Album", - "ns1:artist": {"@id": "http://stardog.com/tutorial/John_Lennon"}, + "@id": "http://example.com/Imagine", + "@type": "http://example.com/Album", + "ns1:artist": {"@id": "http://example.com/John_Lennon"}, "ns1:date": {"@value": "1971-10-11", "@type": "xsd:date"}, "ns1:name": "Imagine" }, { - "@id": "http://stardog.com/tutorial/John_Lennon", - "@type": "http://stardog.com/tutorial/SoloArtist" + "@id": "http://example.com/John_Lennon", + "@type": "http://example.com/SoloArtist" }, { - "@id": "http://stardog.com/tutorial/Love_Me_Do", - "@type": "http://stardog.com/tutorial/Song", + "@id": "http://example.com/Love_Me_Do", + "@type": "http://example.com/Song", "ns1:length": 125, "ns1:name": "Love Me Do", - "ns1:writer": [{"@id": "http://stardog.com/tutorial/John_Lennon"}, {"@id": "http://stardog.com/tutorial/Paul_McCartney"}] + "ns1:writer": [{"@id": "http://example.com/John_Lennon"}, {"@id": "http://example.com/Paul_McCartney"}] }, { - "@id": "http://stardog.com/tutorial/McCartney", - "@type": "http://stardog.com/tutorial/Album", - "ns1:artist": {"@id": "http://stardog.com/tutorial/Paul_McCartney"}, + "@id": "http://example.com/McCartney", + "@type": "http://example.com/Album", + "ns1:artist": {"@id": "http://example.com/Paul_McCartney"}, "ns1:date": {"@value": "1970-04-17", "@type": "xsd:date"}, "ns1:name": "McCartney" }, { - "@id": "http://stardog.com/tutorial/Paul_McCartney", - "@type": "http://stardog.com/tutorial/SoloArtist" + "@id": "http://example.com/Paul_McCartney", + "@type": "http://example.com/SoloArtist" }, { - "@id": "http://stardog.com/tutorial/Please_Please_Me", - "@type": "http://stardog.com/tutorial/Album", - "ns1:artist": {"@id": "http://stardog.com/tutorial/The_Beatles"}, + "@id": "http://example.com/Please_Please_Me", + "@type": "http://example.com/Album", + "ns1:artist": {"@id": "http://example.com/The_Beatles"}, "ns1:date": {"@value": "1963-03-22", "@type": "xsd:date"}, "ns1:name": "Please Please Me", - "ns1:track": {"@id": "http://stardog.com/tutorial/Love_Me_Do"} + "ns1:track": {"@id": "http://example.com/Love_Me_Do"} }, { - "@id": "http://stardog.com/tutorial/Ringo_Starr", - "@type": "http://stardog.com/tutorial/SoloArtist" + "@id": "http://example.com/Ringo_Starr", + "@type": "http://example.com/SoloArtist" }, { - "@id": "http://stardog.com/tutorial/The_Beatles", - "@type": "http://stardog.com/tutorial/Band", - "ns1:member": [{"@id": "http://stardog.com/tutorial/John_Lennon"}, {"@id": "http://stardog.com/tutorial/Paul_McCartney"}, {"@id": "http://stardog.com/tutorial/Ringo_Starr"}, {"@id": "http://stardog.com/tutorial/George_Harrison"}], + "@id": "http://example.com/The_Beatles", + "@type": "http://example.com/Band", + "ns1:member": [{"@id": "http://example.com/John_Lennon"}, {"@id": "http://example.com/Paul_McCartney"}, {"@id": "http://example.com/Ringo_Starr"}, {"@id": "http://example.com/George_Harrison"}], "ns1:name": "The Beatles" } ] diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/Recursive2/beatles-ok.rdf b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/Recursive2/beatles-ok.rdf index 352c9ee02e..3228b5aae9 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/Recursive2/beatles-ok.rdf +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/Recursive2/beatles-ok.rdf @@ -3,62 +3,62 @@ ]> - - + + - - + + 1971-10-11 Imagine - + - - + + - + 125 Love Me Do - - - + + + - - + + 1970-04-17 McCartney - + - - + + - - + + 1963-03-22 Please Please Me - + - - + + - - - - - + + + + + The Beatles - + \ No newline at end of file diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/beatles-ok.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/beatles-ok.ttl index fd4b4d9040..db53dffdf5 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/beatles-ok.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf-Recursive1/beatles-ok.ttl @@ -1,4 +1,4 @@ -PREFIX : +PREFIX : PREFIX rdf: PREFIX xsd: diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-err.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-err.ttl index 6bd1313a88..7d91221bae 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-err.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-err.ttl @@ -1,5 +1,5 @@ -PREFIX : +PREFIX : PREFIX rdf: PREFIX xsd: diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-ok.jsonld b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-ok.jsonld index 4a161655da..cade87c96b 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-ok.jsonld +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-ok.jsonld @@ -1,68 +1,68 @@ { "@context": { "xsd": "http://www.w3.org/2001/XMLSchema#", - "ns1": "http://stardog.com/tutorial/" + "ns1": "http://example.com/" }, "@graph": [ { - "@id": "http://stardog.com/tutorial/George_Harrison", - "@type": "http://stardog.com/tutorial/SoloArtist" + "@id": "http://example.com/George_Harrison", + "@type": "http://example.com/SoloArtist" }, { - "@id": "http://stardog.com/tutorial/Imagine", - "@type": "http://stardog.com/tutorial/Album", - "ns1:artist": {"@id": "http://stardog.com/tutorial/John_Lennon"}, + "@id": "http://example.com/Imagine", + "@type": "http://example.com/Album", + "ns1:artist": {"@id": "http://example.com/John_Lennon"}, "ns1:date": {"@value": "1971-10-11", "@type": "xsd:date"}, "ns1:name": "Imagine" }, { - "@id": "http://stardog.com/tutorial/John_Lennon", - "@type": "http://stardog.com/tutorial/SoloArtist" + "@id": "http://example.com/John_Lennon", + "@type": "http://example.com/SoloArtist" }, { - "@id": "http://stardog.com/tutorial/Love_Me_Do", - "@type": "http://stardog.com/tutorial/Song", + "@id": "http://example.com/Love_Me_Do", + "@type": "http://example.com/Song", "ns1:length": 125, "ns1:name": "Love Me Do", - "ns1:writer": [{"@id": "http://stardog.com/tutorial/John_Lennon"}, {"@id": "http://stardog.com/tutorial/Paul_McCartney"}] + "ns1:writer": [{"@id": "http://example.com/John_Lennon"}, {"@id": "http://example.com/Paul_McCartney"}] }, { - "@id": "http://stardog.com/tutorial/McCartney", - "@type": "http://stardog.com/tutorial/Album", - "ns1:artist": {"@id": "http://stardog.com/tutorial/Paul_McCartney"}, + "@id": "http://example.com/McCartney", + "@type": "http://example.com/Album", + "ns1:artist": {"@id": "http://example.com/Paul_McCartney"}, "ns1:date": {"@value": "1970-04-17", "@type": "xsd:date"}, "ns1:name": "McCartney" }, { - "@id": "http://stardog.com/tutorial/Paul_McCartney", - "@type": "http://stardog.com/tutorial/SoloArtist" + "@id": "http://example.com/Paul_McCartney", + "@type": "http://example.com/SoloArtist" }, { - "@id": "http://stardog.com/tutorial/Please_Please_Me", - "@type": "http://stardog.com/tutorial/Album", - "ns1:artist": {"@id": "http://stardog.com/tutorial/The_Beatles"}, + "@id": "http://example.com/Please_Please_Me", + "@type": "http://example.com/Album", + "ns1:artist": {"@id": "http://example.com/The_Beatles"}, "ns1:date": {"@value": "1963-03-22", "@type": "xsd:date"}, "ns1:name": "Please Please Me", - "ns1:track": {"@id": "http://stardog.com/tutorial/Love_Me_Do"} + "ns1:track": {"@id": "http://example.com/Love_Me_Do"} }, { - "@id": "http://stardog.com/tutorial/Ringo_Starr", - "@type": "http://stardog.com/tutorial/SoloArtist" + "@id": "http://example.com/Ringo_Starr", + "@type": "http://example.com/SoloArtist" }, { - "@id": "http://stardog.com/tutorial/The_Beatles", - "@type": "http://stardog.com/tutorial/Band", - "ns1:member": [{"@id": "http://stardog.com/tutorial/John_Lennon"}, {"@id": "http://stardog.com/tutorial/Paul_McCartney"}, {"@id": "http://stardog.com/tutorial/Ringo_Starr"}, {"@id": "http://stardog.com/tutorial/George_Harrison"}], + "@id": "http://example.com/The_Beatles", + "@type": "http://example.com/Band", + "ns1:member": [{"@id": "http://example.com/John_Lennon"}, {"@id": "http://example.com/Paul_McCartney"}, {"@id": "http://example.com/Ringo_Starr"}, {"@id": "http://example.com/George_Harrison"}], "ns1:name": "The Beatles" } ] diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-ok.rdf b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-ok.rdf index 352c9ee02e..3228b5aae9 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-ok.rdf +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-ok.rdf @@ -3,62 +3,62 @@ ]> - - + + - - + + 1971-10-11 Imagine - + - - + + - + 125 Love Me Do - - - + + + - - + + 1970-04-17 McCartney - + - - + + - - + + 1963-03-22 Please Please Me - + - - + + - - - - - + + + + + The Beatles - + \ No newline at end of file diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-ok.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-ok.ttl index fd4b4d9040..f0fe64d2ac 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-ok.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputRdf/beatles-ok.ttl @@ -1,34 +1,40 @@ -PREFIX : -PREFIX rdf: -PREFIX xsd: - -:The_Beatles rdf:type :Band . -:The_Beatles :name "The Beatles" . -:The_Beatles :member :John_Lennon . -:The_Beatles :member :Paul_McCartney . -:The_Beatles :member :Ringo_Starr . -:The_Beatles :member :George_Harrison . -:John_Lennon rdf:type :SoloArtist . -:Paul_McCartney rdf:type :SoloArtist . -:Ringo_Starr rdf:type :SoloArtist . -:George_Harrison rdf:type :SoloArtist . -:Please_Please_Me rdf:type :Album . -:Please_Please_Me :name "Please Please Me" . -:Please_Please_Me :date "1963-03-22"^^xsd:date . -:Please_Please_Me :artist :The_Beatles . -:Please_Please_Me :track :Love_Me_Do . -:Love_Me_Do rdf:type :Song . -:Love_Me_Do :name "Love Me Do" . -:Love_Me_Do :length 125 . -:Love_Me_Do :writer :John_Lennon . -:Love_Me_Do :writer :Paul_McCartney . - -:McCartney rdf:type :Album . -:McCartney :name "McCartney" . -:McCartney :date "1970-04-17"^^xsd:date . -:McCartney :artist :Paul_McCartney . - -:Imagine rdf:type :Album . -:Imagine :name "Imagine" . -:Imagine :date "1971-10-11"^^xsd:date . -:Imagine :artist :John_Lennon . +@prefix xsd: . +@prefix ex: . +@prefix rdf: . + +ex:Love_Me_Do ex:length 125 ; + ex:name "Love Me Do" ; + ex:writer ex:John_Lennon ; + ex:writer ex:Paul_McCartney ; + rdf:type ex:Song . + +ex:The_Beatles ex:member ex:John_Lennon ; + ex:member ex:Paul_McCartney ; + ex:member ex:Ringo_Starr ; + ex:member ex:George_Harrison ; + ex:name "The Beatles" ; + rdf:type ex:Band . + +ex:Please_Please_Me ex:artist ex:The_Beatles ; + ex:date "1963-03-22"^^xsd:date ; + ex:name "Please Please Me" ; + ex:track ex:Love_Me_Do ; + rdf:type ex:Album . + +ex:George_Harrison rdf:type ex:SoloArtist . + +ex:Ringo_Starr rdf:type ex:SoloArtist . + +ex:John_Lennon rdf:type ex:SoloArtist . + +ex:Paul_McCartney rdf:type ex:SoloArtist . + +ex:McCartney ex:artist ex:Paul_McCartney ; + ex:date "1970-04-17"^^xsd:date ; + ex:name "McCartney" ; + rdf:type ex:Album . + +ex:Imagine ex:artist ex:John_Lennon ; + ex:date "1971-10-11"^^xsd:date ; + ex:name "Imagine" ; + rdf:type ex:Album . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputShacl/beatles-validator.jsonld b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputShacl/beatles-validator.jsonld index 3bf76eb5da..1d71d4f369 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputShacl/beatles-validator.jsonld +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputShacl/beatles-validator.jsonld @@ -11,14 +11,14 @@ "sh:datatype": {"@id": "http://www.w3.org/2001/XMLSchema#string"}, "sh:maxCount": 1, "sh:minCount": 1, - "sh:path": {"@id": "http://stardog.com/tutorial/name"} + "sh:path": {"@id": "http://example.com/name"} }, { "@id": "_:b1", - "sh:class": {"@id": "http://stardog.com/tutorial/SoloArtist"}, + "sh:class": {"@id": "http://example.com/SoloArtist"}, "sh:minCount": 1, - "sh:path": {"@id": "http://stardog.com/tutorial/member"} + "sh:path": {"@id": "http://example.com/member"} }, { @@ -26,7 +26,7 @@ "sh:datatype": {"@id": "http://www.w3.org/2001/XMLSchema#string"}, "sh:maxCount": 1, "sh:minCount": 1, - "sh:path": {"@id": "http://stardog.com/tutorial/name"} + "sh:path": {"@id": "http://example.com/name"} }, { @@ -34,7 +34,7 @@ "sh:datatype": {"@id": "http://www.w3.org/2001/XMLSchema#date"}, "sh:maxCount": 1, "sh:minCount": 1, - "sh:path": {"@id": "http://stardog.com/tutorial/date"} + "sh:path": {"@id": "http://example.com/date"} }, { @@ -42,7 +42,7 @@ "sh:maxCount": 1, "sh:minCount": 1, "sh:nodeKind": {"@id": "http://www.w3.org/ns/shacl#IRI"}, - "sh:path": {"@id": "http://stardog.com/tutorial/artist"} + "sh:path": {"@id": "http://example.com/artist"} }, { @@ -50,7 +50,7 @@ "sh:datatype": {"@id": "http://www.w3.org/2001/XMLSchema#string"}, "sh:maxCount": 1, "sh:minCount": 1, - "sh:path": {"@id": "http://stardog.com/tutorial/name"} + "sh:path": {"@id": "http://example.com/name"} }, { @@ -58,41 +58,41 @@ "sh:datatype": {"@id": "http://www.w3.org/2001/XMLSchema#integer"}, "sh:maxCount": 1, "sh:minCount": 1, - "sh:path": {"@id": "http://stardog.com/tutorial/length"} + "sh:path": {"@id": "http://example.com/length"} }, { "@id": "_:b7", "sh:minCount": 1, "sh:nodeKind": {"@id": "http://www.w3.org/ns/shacl#IRI"}, - "sh:path": {"@id": "http://stardog.com/tutorial/writer"} + "sh:path": {"@id": "http://example.com/writer"} }, { - "@id": "http://stardog.com/tutorial/AlbumShape", + "@id": "http://example.com/AlbumShape", "@type": "http://www.w3.org/ns/shacl#NodeShape", "sh:property": [{"@id": "_:b2"}, {"@id": "_:b3"}, {"@id": "_:b4"}], - "sh:targetClass": {"@id": "http://stardog.com/tutorial/Album"} + "sh:targetClass": {"@id": "http://example.com/Album"} }, { - "@id": "http://stardog.com/tutorial/BandShape", + "@id": "http://example.com/BandShape", "@type": "http://www.w3.org/ns/shacl#NodeShape", "sh:property": [{"@id": "_:b0"}, {"@id": "_:b1"}], - "sh:targetClass": {"@id": "http://stardog.com/tutorial/Band"} + "sh:targetClass": {"@id": "http://example.com/Band"} }, { - "@id": "http://stardog.com/tutorial/SoloArtistShape", + "@id": "http://example.com/SoloArtistShape", "@type": "http://www.w3.org/ns/shacl#NodeShape", - "sh:targetClass": {"@id": "http://stardog.com/tutorial/SoloArtist"} + "sh:targetClass": {"@id": "http://example.com/SoloArtist"} }, { - "@id": "http://stardog.com/tutorial/SongShape", + "@id": "http://example.com/SongShape", "@type": "http://www.w3.org/ns/shacl#NodeShape", "sh:property": [{"@id": "_:b5"}, {"@id": "_:b6"}, {"@id": "_:b7"}], - "sh:targetClass": {"@id": "http://stardog.com/tutorial/Song"} + "sh:targetClass": {"@id": "http://example.com/Song"} } ] } \ No newline at end of file diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputShacl/beatles-validator.rdf b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputShacl/beatles-validator.rdf index b64f11ece6..bafdd6db7d 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputShacl/beatles-validator.rdf +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputShacl/beatles-validator.rdf @@ -8,14 +8,14 @@ xmlns:sh='http://www.w3.org/ns/shacl#' xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#'> - + 1 1 - + @@ -23,7 +23,7 @@ 1 1 - + @@ -31,44 +31,44 @@ 1 1 - + - + - + 1 1 - + - + 1 - + - + - + - + - + 1 1 - + @@ -76,17 +76,17 @@ 1 1 - + 1 - + - + \ No newline at end of file diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputShacl/beatles-validator.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputShacl/beatles-validator.ttl index 6d131116a8..24b388f650 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputShacl/beatles-validator.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/inputShacl/beatles-validator.ttl @@ -1,66 +1,65 @@ PREFIX sh: PREFIX xsd: -PREFIX : +PREFIX ex: # Shape for Bands -:BandShape a sh:NodeShape ; - sh:targetClass :Band ; +ex:BandShape a sh:NodeShape ; + sh:targetClass ex:Band ; sh:property [ - sh:path :name ; + sh:path ex:name ; sh:datatype xsd:string ; sh:minCount 1 ; sh:maxCount 1 ; ] ; sh:property [ - sh:path :member ; - sh:class :SoloArtist ; + sh:path ex:member ; + sh:class ex:SoloArtist ; sh:minCount 1 ; ] . # Shape for Solo Artists -:SoloArtistShape a sh:NodeShape ; - sh:targetClass :SoloArtist . +ex:SoloArtistShape a sh:NodeShape ; + sh:targetClass ex:SoloArtist . # Shape for Albums -:AlbumShape a sh:NodeShape ; - sh:targetClass :Album ; +ex:AlbumShape a sh:NodeShape ; + sh:targetClass ex:Album ; sh:property [ - sh:path :name ; + sh:path ex:name ; sh:datatype xsd:string ; sh:minCount 1 ; sh:maxCount 1 ; ] ; sh:property [ - sh:path :date ; + sh:path ex:date ; sh:datatype xsd:date ; sh:minCount 1 ; sh:maxCount 1 ; ] ; sh:property [ - sh:path :artist ; + sh:path ex:artist ; sh:nodeKind sh:IRI ; sh:minCount 1 ; sh:maxCount 1 ; ] . # Shape for Songs -:SongShape a sh:NodeShape ; - sh:targetClass :Song ; +ex:SongShape a sh:NodeShape ; + sh:targetClass ex:Song ; sh:property [ - sh:path :name ; + sh:path ex:name ; sh:datatype xsd:string ; sh:minCount 1 ; sh:maxCount 1 ; ] ; sh:property [ - sh:path :length ; + sh:path ex:length ; sh:datatype xsd:integer ; sh:minCount 1 ; sh:maxCount 1 ; ] ; sh:property [ - sh:path :writer ; + sh:path ex:writer ; sh:nodeKind sh:IRI ; sh:minCount 1 ; ] . - diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/beatles-err.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/beatles-err.ttl index 6346c94aba..ef6df44f9e 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/beatles-err.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/beatles-err.ttl @@ -1,11 +1,11 @@ @prefix xsh: . -@prefix ns1: . +@prefix ns1: . @prefix sh: . @prefix rdf: . a sh:ValidationResult ; sh:focusNode ns1:Please_Please_Me ; - sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; + sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; sh:resultPath ns1:name ; sh:resultSeverity sh:Violation ; sh:sourceConstraintComponent sh:MaxCountConstraintComponent ; diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/beatles-person-err.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/beatles-person-err.ttl index b0e514e4d5..d26141cefe 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/beatles-person-err.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/beatles-person-err.ttl @@ -1,6 +1,6 @@ @prefix xsd: . @prefix xsh: . -@prefix ns1: . +@prefix ns1: . @prefix sh: . @prefix ex: . @prefix rdf: . @@ -34,7 +34,7 @@ a sh:ValidationResult ; sh:focusNode ns1:Please_Please_Me ; - sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; + sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; sh:resultPath ns1:name ; sh:resultSeverity sh:Violation ; sh:sourceConstraintComponent sh:MaxCountConstraintComponent ; diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/directory-err.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/directory-err.ttl index 6bf4fbe3eb..026d61de3c 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/directory-err.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/directory-err.ttl @@ -1,6 +1,6 @@ @prefix xsd: . @prefix xsh: . -@prefix ns1: . +@prefix ns1: . @prefix sh: . @prefix ex: . @prefix rdf: . @@ -25,7 +25,7 @@ a sh:ValidationResult ; sh:focusNode ns1:Please_Please_Me ; - sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; + sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; sh:resultPath ns1:name ; sh:resultSeverity sh:Violation ; sh:sourceConstraintComponent sh:MaxCountConstraintComponent ; @@ -34,7 +34,7 @@ a sh:ValidationResult ; sh:focusNode ns1:Please_Please_Me ; - sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; + sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; sh:resultPath ns1:name ; sh:resultSeverity sh:Violation ; sh:sourceConstraintComponent sh:MaxCountConstraintComponent ; @@ -43,7 +43,7 @@ a sh:ValidationResult ; sh:focusNode ns1:Please_Please_Me ; - sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; + sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; sh:resultPath ns1:name ; sh:resultSeverity sh:Violation ; sh:sourceConstraintComponent sh:MaxCountConstraintComponent ; diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/directory-recursive-err.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/directory-recursive-err.ttl index ee06c01a63..49e6aa2188 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/directory-recursive-err.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/shacl/references/directory-recursive-err.ttl @@ -1,13 +1,13 @@ @prefix xsd: . @prefix xsh: . -@prefix ns1: . +@prefix ns1: . @prefix sh: . @prefix ex: . @prefix rdf: . a sh:ValidationResult ; sh:focusNode ns1:Please_Please_Me ; - sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; + sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; sh:resultPath ns1:name ; sh:resultSeverity sh:Violation ; sh:sourceConstraintComponent sh:MaxCountConstraintComponent ; @@ -34,7 +34,7 @@ a sh:ValidationResult ; sh:focusNode ns1:Please_Please_Me ; - sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; + sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; sh:resultPath ns1:name ; sh:resultSeverity sh:Violation ; sh:sourceConstraintComponent sh:MaxCountConstraintComponent ; @@ -61,7 +61,7 @@ a sh:ValidationResult ; sh:focusNode ns1:Please_Please_Me ; - sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; + sh:resultMessage "Fail at: [sh:datatype xsd:string ;\n sh:maxCount 1 ;\n sh:minCount 1 ;\n sh:path ]" ; sh:resultPath ns1:name ; sh:resultSeverity sh:Violation ; sh:sourceConstraintComponent sh:MaxCountConstraintComponent ; diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/input/beatles.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/input/beatles.ttl index fd4b4d9040..db53dffdf5 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/input/beatles.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/input/beatles.ttl @@ -1,4 +1,4 @@ -PREFIX : +PREFIX : PREFIX rdf: PREFIX xsd: diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/ask/beatlesFalse.rq b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/ask/beatlesFalse.rq index 6e6cfe2557..3b1050f482 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/ask/beatlesFalse.rq +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/ask/beatlesFalse.rq @@ -1,4 +1,4 @@ -PREFIX : +PREFIX : PREFIX rdf: ASK { diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/ask/beatlesTrue.rq b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/ask/beatlesTrue.rq index c08d019f3f..58ede0917f 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/ask/beatlesTrue.rq +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/ask/beatlesTrue.rq @@ -1,4 +1,4 @@ -PREFIX : +PREFIX : PREFIX rdf: ASK { diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/construct/albumBeatles.rq b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/construct/albumBeatles.rq index 281d42225e..b09de592b2 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/construct/albumBeatles.rq +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/construct/albumBeatles.rq @@ -1,4 +1,4 @@ -PREFIX : +PREFIX : PREFIX rdf: PREFIX xsd: diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/delete-where/deleteLenon.rq b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/delete-where/deleteLenon.rq index b349a0500b..c3fe716978 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/delete-where/deleteLenon.rq +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/delete-where/deleteLenon.rq @@ -1,4 +1,4 @@ -PREFIX : +PREFIX : PREFIX rdf: PREFIX xsd: diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/delete/deleteMcCartney.rq b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/delete/deleteMcCartney.rq index 9401f0b500..13fe0f9e4e 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/delete/deleteMcCartney.rq +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/delete/deleteMcCartney.rq @@ -1,4 +1,4 @@ -PREFIX : +PREFIX : PREFIX rdf: PREFIX xsd: diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/describe/describeBeatles.rq b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/describe/describeBeatles.rq index a9d9f1b88d..b2f357e90f 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/describe/describeBeatles.rq +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/describe/describeBeatles.rq @@ -1,2 +1,2 @@ -PREFIX : +PREFIX : DESCRIBE :The_Beatles diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/insert-where/beatlesAge.rq b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/insert-where/beatlesAge.rq index 1034b77dd5..2949a4edb0 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/insert-where/beatlesAge.rq +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/insert-where/beatlesAge.rq @@ -1,4 +1,4 @@ -PREFIX : +PREFIX : PREFIX rdf: INSERT { diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/insert/beatlesInsertRock.rq b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/insert/beatlesInsertRock.rq index fbc6378b6a..d2b3a98fc2 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/insert/beatlesInsertRock.rq +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/insert/beatlesInsertRock.rq @@ -1,4 +1,4 @@ -PREFIX : +PREFIX : INSERT DATA { diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/select/beatlesAlbums.rq b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/select/beatlesAlbums.rq index 69ecfccd87..7b0f4cb046 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/select/beatlesAlbums.rq +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/queries/select/beatlesAlbums.rq @@ -1,4 +1,4 @@ -PREFIX : +PREFIX : PREFIX rdf: # Get albums informations diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-jsonld-false.jsonld b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-jsonld-false.jsonld index 7dc26c829a..a84ec6bcda 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-jsonld-false.jsonld +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-jsonld-false.jsonld @@ -1,6 +1,6 @@ { "@context": { - "": "http://stardog.com/tutorial/", + "": "http://example.com/", "xsd": "http://www.w3.org/2001/XMLSchema#", "ns1": "http://www.w3.org/2001/sw/DataAccess/tests/result-set#", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-jsonld-true.jsonld b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-jsonld-true.jsonld index d1a4d4a37d..36d495dad5 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-jsonld-true.jsonld +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-jsonld-true.jsonld @@ -1,6 +1,6 @@ { "@context": { - "": "http://stardog.com/tutorial/", + "": "http://example.com/", "xsd": "http://www.w3.org/2001/XMLSchema#", "ns1": "http://www.w3.org/2001/sw/DataAccess/tests/result-set#", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-rdfxml-false.rdf b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-rdfxml-false.rdf index a5d48046c6..b58cbdf9a0 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-rdfxml-false.rdf +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-rdfxml-false.rdf @@ -1,6 +1,6 @@ diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-rdfxml-true.rdf b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-rdfxml-true.rdf index d9be261826..79360b496b 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-rdfxml-true.rdf +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-rdfxml-true.rdf @@ -1,6 +1,6 @@ diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-trig-false.trig b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-trig-false.trig index eb5b7a55ea..2c4c691de0 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-trig-false.trig +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-trig-false.trig @@ -1,4 +1,4 @@ -@prefix : . +@prefix : . @prefix ns1: . @prefix rdf: . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-trig-true.trig b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-trig-true.trig index bfd317dae8..271c24d8d5 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-trig-true.trig +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-trig-true.trig @@ -1,4 +1,4 @@ -@prefix : . +@prefix : . @prefix ns1: . @prefix rdf: . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-turtle-false.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-turtle-false.ttl index eb5b7a55ea..2c4c691de0 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-turtle-false.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-turtle-false.ttl @@ -1,4 +1,4 @@ -@prefix : . +@prefix : . @prefix ns1: . @prefix rdf: . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-turtle-true.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-turtle-true.ttl index bfd317dae8..271c24d8d5 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-turtle-true.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/ask/beatles-ask-turtle-true.ttl @@ -1,4 +1,4 @@ -@prefix : . +@prefix : . @prefix ns1: . @prefix rdf: . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-jsonld.jsonld b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-jsonld.jsonld index c81da5bc77..701d4ab2ae 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-jsonld.jsonld +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-jsonld.jsonld @@ -1,6 +1,6 @@ { "@context": { - "": "http://stardog.com/tutorial/", + "": "http://example.com/", "xsd": "http://www.w3.org/2001/XMLSchema#", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" }, diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-rdfxml.xml b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-rdfxml.xml index 07e926e15f..e370d16ce4 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-rdfxml.xml +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-rdfxml.xml @@ -1,10 +1,10 @@ - - + + 1963-03-22 Please Please Me diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-trig.trig b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-trig.trig index ab931e5078..01f15ccf7b 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-trig.trig +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-trig.trig @@ -1,4 +1,4 @@ -@prefix : . +@prefix : . @prefix xsd: . @prefix rdf: . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-turtle.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-turtle.ttl index ab931e5078..01f15ccf7b 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-turtle.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/construct/beatles-construct-turtle.ttl @@ -1,4 +1,4 @@ -@prefix : . +@prefix : . @prefix xsd: . @prefix rdf: . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-jsonld.jsonld b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-jsonld.jsonld index 5f29aa33de..f81f138109 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-jsonld.jsonld +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-jsonld.jsonld @@ -1,7 +1,7 @@ { "@context": { "xsd": "http://www.w3.org/2001/XMLSchema#", - "ns1": "http://stardog.com/tutorial/", + "ns1": "http://example.com/", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" }, "@graph": [ diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-rdfxml.xml b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-rdfxml.xml index 7ece3345d1..a9b045cea3 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-rdfxml.xml +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-rdfxml.xml @@ -1,49 +1,49 @@ - + - - + + 1970-04-17 McCartney - - - - + + + + The Beatles - + 125 Love Me Do - + - + - - + + 1963-03-22 Please Please Me - + - + - + 1971-10-11 Imagine - + \ No newline at end of file diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-trig.trig b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-trig.trig index 3b28f4c02e..915fd5311b 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-trig.trig +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-trig.trig @@ -1,6 +1,5 @@ @prefix xsd: . -@prefix ns1: . -@prefix rdf: . +@prefix ns1: . ns1:Paul_McCartney a ns1:SoloArtist . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-turtle.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-turtle.ttl index e3599fa2e7..7a6eb2fecc 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-turtle.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete-where/beatles-delete-where-turtle.ttl @@ -1,6 +1,5 @@ @prefix xsd: . -@prefix ns1: . -@prefix rdf: . +@prefix ns1: . ns1:George_Harrison a ns1:SoloArtist . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-jsonld.jsonld b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-jsonld.jsonld index ff6f8fe885..adc34a48a3 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-jsonld.jsonld +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-jsonld.jsonld @@ -1,7 +1,7 @@ { "@context": { "xsd": "http://www.w3.org/2001/XMLSchema#", - "ns1": "http://stardog.com/tutorial/", + "ns1": "http://example.com/", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" }, "@graph": [ diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-rdfxml.xml b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-rdfxml.xml index 34cd580be1..f2186dd29c 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-rdfxml.xml +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-rdfxml.xml @@ -1,46 +1,46 @@ - + - - - - - + + + + + The Beatles - + 125 Love Me Do - - + + - + - - + + 1963-03-22 Please Please Me - + - + - - + + 1971-10-11 Imagine - + \ No newline at end of file diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-trig.trig b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-trig.trig index 74f8c8a78d..8686b8cfdb 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-trig.trig +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-trig.trig @@ -1,6 +1,5 @@ @prefix xsd: . -@prefix ns1: . -@prefix rdf: . +@prefix ns1: . ns1:Paul_McCartney a ns1:SoloArtist . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-turtle.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-turtle.ttl index 6061ae42f2..44183c2cfa 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-turtle.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/delete/beatles-delete-turtle.ttl @@ -1,6 +1,5 @@ @prefix xsd: . -@prefix ns1: . -@prefix rdf: . +@prefix ns1: . ns1:George_Harrison a ns1:SoloArtist . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-jsonld.jsonld b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-jsonld.jsonld index ef17573d5a..411459a143 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-jsonld.jsonld +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-jsonld.jsonld @@ -1,6 +1,6 @@ { "@context": { - "": "http://stardog.com/tutorial/", + "": "http://example.com/", "xsd": "http://www.w3.org/2001/XMLSchema#", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" }, diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-rdfxml.xml b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-rdfxml.xml index eb08c7b3ee..8cff21e02f 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-rdfxml.xml +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-rdfxml.xml @@ -1,18 +1,18 @@ - - - - - + + + + + The Beatles - - + + \ No newline at end of file diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-trig.trig b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-trig.trig index 69ad82d0de..b25ea997c9 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-trig.trig +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-trig.trig @@ -1,5 +1,4 @@ -@prefix : . -@prefix rdf: . +@prefix : . :Please_Please_Me :artist :The_Beatles . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-turtle.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-turtle.ttl index fd7632fb6f..c528bcb2ef 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-turtle.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/describe/beatles-describe-turtle.ttl @@ -1,5 +1,4 @@ -@prefix : . -@prefix rdf: . +@prefix : . :The_Beatles :member :John_Lennon ; :member :Paul_McCartney ; diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingjsonld.jsonld b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingjsonld.jsonld index 551ea11ff0..21ac5e7f18 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingjsonld.jsonld +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingjsonld.jsonld @@ -1,7 +1,7 @@ { "@context": { "xsd": "http://www.w3.org/2001/XMLSchema#", - "ns1": "http://stardog.com/tutorial/", + "ns1": "http://example.com/", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" }, "@graph": [ diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingrdfxml.xml b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingrdfxml.xml index 6e8b1237b6..723ae9e365 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingrdfxml.xml +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingrdfxml.xml @@ -1,55 +1,55 @@ - + 30 - - + + 1970-04-17 McCartney - - - - - + + + + + The Beatles - + 125 Love Me Do - - + + - + 30 - - + + 1963-03-22 Please Please Me - + - + 30 - - + + 1971-10-11 Imagine - + 30 diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingtrig.trig b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingtrig.trig index fc0dda9a45..136d7ee0f7 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingtrig.trig +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingtrig.trig @@ -1,6 +1,5 @@ @prefix xsd: . -@prefix ns1: . -@prefix rdf: . +@prefix ns1: . ns1:George_Harrison ns1:age 30 ; a ns1:SoloArtist . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingturtle.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingturtle.ttl index b4d8812d68..5c76eaae82 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingturtle.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert-where/beatles-insertwhere-bidingturtle.ttl @@ -1,6 +1,5 @@ @prefix xsd: . -@prefix ns1: . -@prefix rdf: . +@prefix ns1: . ns1:George_Harrison ns1:age 30 ; a ns1:SoloArtist . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-jsonld.jsonld b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-jsonld.jsonld index 1e19551c5d..d15f581c12 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-jsonld.jsonld +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-jsonld.jsonld @@ -1,7 +1,7 @@ { "@context": { "xsd": "http://www.w3.org/2001/XMLSchema#", - "ns1": "http://stardog.com/tutorial/", + "ns1": "http://example.com/", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" }, "@graph": [ diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-rdfxml.xml b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-rdfxml.xml index 5a20cdd0f9..522603df07 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-rdfxml.xml +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-rdfxml.xml @@ -1,53 +1,53 @@ - + - - + + 1970-04-17 McCartney - - - - - + + + + + The Beatles - + 125 Love Me Do - - + + - + - - + + 1963-03-22 Rock and Roll Please Please Me - + - + - - + + 1971-10-11 Imagine - + \ No newline at end of file diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-trig.trig b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-trig.trig index 607929c9b4..d4dd1ddf77 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-trig.trig +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-trig.trig @@ -1,6 +1,5 @@ @prefix xsd: . -@prefix ns1: . -@prefix rdf: . +@prefix ns1: . ns1:George_Harrison a ns1:SoloArtist . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-turtle.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-turtle.ttl index fd2f027135..89d48b9499 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-turtle.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/insert/beatles-insert-turtle.ttl @@ -1,6 +1,5 @@ @prefix xsd: . -@prefix ns1: . -@prefix rdf: . +@prefix ns1: . ns1:George_Harrison a ns1:SoloArtist . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingcsv.csv b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingcsv.csv index bbf073a939..bb8f067135 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingcsv.csv +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingcsv.csv @@ -1,4 +1,4 @@ albumName,albumDate,artist -Please Please Me,1963-03-22,http://stardog.com/tutorial/The_Beatles -McCartney,1970-04-17,http://stardog.com/tutorial/Paul_McCartney -Imagine,1971-10-11,http://stardog.com/tutorial/John_Lennon +Please Please Me,1963-03-22,http://example.com/The_Beatles +McCartney,1970-04-17,http://example.com/Paul_McCartney +Imagine,1971-10-11,http://example.com/John_Lennon diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingjson.json b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingjson.json index 14e9a36a34..60704bcb94 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingjson.json +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingjson.json @@ -9,19 +9,19 @@ { "albumName": { "type": "typed-literal", "datatype": "http://www.w3.org/2001/XMLSchema#string", "value": "Please Please Me"}, "albumDate": { "type": "typed-literal", "datatype": "http://www.w3.org/2001/XMLSchema#date", "value": "1963-03-22"}, -"artist": { "type": "uri", "value": "http://stardog.com/tutorial/The_Beatles"} +"artist": { "type": "uri", "value": "http://example.com/The_Beatles"} } , { "albumName": { "type": "typed-literal", "datatype": "http://www.w3.org/2001/XMLSchema#string", "value": "McCartney"}, "albumDate": { "type": "typed-literal", "datatype": "http://www.w3.org/2001/XMLSchema#date", "value": "1970-04-17"}, -"artist": { "type": "uri", "value": "http://stardog.com/tutorial/Paul_McCartney"} +"artist": { "type": "uri", "value": "http://example.com/Paul_McCartney"} } , { "albumName": { "type": "typed-literal", "datatype": "http://www.w3.org/2001/XMLSchema#string", "value": "Imagine"}, "albumDate": { "type": "typed-literal", "datatype": "http://www.w3.org/2001/XMLSchema#date", "value": "1971-10-11"}, -"artist": { "type": "uri", "value": "http://stardog.com/tutorial/John_Lennon"} +"artist": { "type": "uri", "value": "http://example.com/John_Lennon"} } ] } } diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingmarkdown.md b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingmarkdown.md index 8e2af0f367..152d839594 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingmarkdown.md +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingmarkdown.md @@ -1,5 +1,5 @@ -| ?albumName | ?albumDate | ?artist | -| ------------------------------------------------------------- | ----------------------------------------------------- | -------------------------------------------- | -| "Please Please Me"^^ | "1963-03-22"^^ | | -| "McCartney"^^ | "1970-04-17"^^ | | -| "Imagine"^^ | "1971-10-11"^^ | | \ No newline at end of file +| ?albumName | ?albumDate | ?artist | +| ------------------------------------------------------------- | ----------------------------------------------------- | ----------------------------------- | +| "Please Please Me"^^ | "1963-03-22"^^ | | +| "McCartney"^^ | "1970-04-17"^^ | | +| "Imagine"^^ | "1971-10-11"^^ | | \ No newline at end of file diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingtrig.trig b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingtrig.trig index 88ff44c721..10e5541762 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingtrig.trig +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingtrig.trig @@ -16,7 +16,7 @@ rs:value "1963-03-22"^^xsd:date ] ; rs:binding [ rs:variable 'artist' ; -rs:value +rs:value ] ; ] ; rs:solution [ @@ -31,7 +31,7 @@ rs:value "1970-04-17"^^xsd:date ] ; rs:binding [ rs:variable 'artist' ; -rs:value +rs:value ] ; ] ; rs:solution [ @@ -46,6 +46,6 @@ rs:value "1971-10-11"^^xsd:date ] ; rs:binding [ rs:variable 'artist' ; -rs:value +rs:value ] ; ]. diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingtsv.tsv b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingtsv.tsv index b440786d0d..f96520f647 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingtsv.tsv +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingtsv.tsv @@ -1,4 +1,4 @@ ?albumName ?albumDate ?artist -"Please Please Me" "1963-03-22"^^ -"McCartney" "1970-04-17"^^ -"Imagine" "1971-10-11"^^ +"Please Please Me" "1963-03-22"^^ +"McCartney" "1970-04-17"^^ +"Imagine" "1971-10-11"^^ diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingturtle.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingturtle.ttl index 88ff44c721..10e5541762 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingturtle.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingturtle.ttl @@ -16,7 +16,7 @@ rs:value "1963-03-22"^^xsd:date ] ; rs:binding [ rs:variable 'artist' ; -rs:value +rs:value ] ; ] ; rs:solution [ @@ -31,7 +31,7 @@ rs:value "1970-04-17"^^xsd:date ] ; rs:binding [ rs:variable 'artist' ; -rs:value +rs:value ] ; ] ; rs:solution [ @@ -46,6 +46,6 @@ rs:value "1971-10-11"^^xsd:date ] ; rs:binding [ rs:variable 'artist' ; -rs:value +rs:value ] ; ]. diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingxml.xml b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingxml.xml index 4813cb79d4..a340301df8 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingxml.xml +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-bidingxml.xml @@ -9,17 +9,17 @@ Please Please Me 1963-03-22 -http://stardog.com/tutorial/The_Beatles +http://example.com/The_Beatles McCartney 1970-04-17 -http://stardog.com/tutorial/Paul_McCartney +http://example.com/Paul_McCartney Imagine 1971-10-11 -http://stardog.com/tutorial/John_Lennon +http://example.com/John_Lennon diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-jsonld.jsonld b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-jsonld.jsonld index 2d4dd52d4b..2881c6d614 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-jsonld.jsonld +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-jsonld.jsonld @@ -1,6 +1,6 @@ { "@context": { - "": "http://stardog.com/tutorial/", + "": "http://example.com/", "xsd": "http://www.w3.org/2001/XMLSchema#", "ns1": "http://www.w3.org/2001/sw/DataAccess/tests/result-set#", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-rdfxml.rdf b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-rdfxml.rdf index e302c7bdd7..1e52c079be 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-rdfxml.rdf +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-rdfxml.rdf @@ -1,6 +1,6 @@ @@ -15,7 +15,7 @@ xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#'> - + artist @@ -54,7 +54,7 @@ xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#'> - + artist @@ -76,7 +76,7 @@ xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#'> - + artist diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-trig.trig b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-trig.trig index 7e59f2774e..f956a40629 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-trig.trig +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-trig.trig @@ -1,4 +1,4 @@ -@prefix : . +@prefix : . @prefix xsd: . @prefix ns1: . @prefix rdf: . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-turtle.ttl b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-turtle.ttl index 7e59f2774e..f956a40629 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-turtle.ttl +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/beatles-select-turtle.ttl @@ -1,4 +1,4 @@ -@prefix : . +@prefix : . @prefix xsd: . @prefix ns1: . @prefix rdf: . diff --git a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/url.md b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/url.md index cdc29514ee..ae42c95130 100644 --- a/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/url.md +++ b/corese-command/src/test/resources/fr/inria/corese/command/programs/sparql/references/select/url.md @@ -1,6 +1,6 @@ -| ?s | ?p | ?o | -| ---------------------------------------------- | ------------------------------------ | ----------------------------------------------------- | -| | | | -| | | | -| | | | -| | | "1963-03-22"^^ | \ No newline at end of file +| ?s | ?p | ?o | +| ------------------------------------- | --------------------------- | ----------------------------------------------------- | +| | | | +| | | | +| | | | +| | | "1963-03-22"^^ | \ No newline at end of file diff --git a/corese-core/src/main/java/fr/inria/corese/core/Corese.java b/corese-core/src/main/java/fr/inria/corese/core/Corese.java index bb5c75c9ac..4256d65e2a 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/Corese.java +++ b/corese-core/src/main/java/fr/inria/corese/core/Corese.java @@ -5,7 +5,7 @@ import org.slf4j.Logger; /** - * + * Corese main class, used to initialize the Corese system. * @author edemairy */ public class Corese { diff --git a/corese-core/src/main/java/fr/inria/corese/core/api/DataBrokerConstruct.java b/corese-core/src/main/java/fr/inria/corese/core/api/DataBrokerConstruct.java index 06d9f39391..d6b354f322 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/api/DataBrokerConstruct.java +++ b/corese-core/src/main/java/fr/inria/corese/core/api/DataBrokerConstruct.java @@ -23,7 +23,7 @@ public interface DataBrokerConstruct extends DataBroker { default void startRuleEngine() { - System.out.println("DataBrokerConstruct startRuleEngine"); + // System.out.println("DataBrokerConstruct startRuleEngine"); } default void endRuleEngine() { diff --git a/corese-core/src/main/java/fr/inria/corese/core/load/CreateImpl.java b/corese-core/src/main/java/fr/inria/corese/core/load/CreateImpl.java index 6d73e69fe8..c3f188f2ea 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/load/CreateImpl.java +++ b/corese-core/src/main/java/fr/inria/corese/core/load/CreateImpl.java @@ -78,7 +78,12 @@ public static CreateImpl create(Graph g, Load ld) { @Override public void graph(Atom src) { stack.add(source); - source = addGraph(src); + if (src.isBlankOrBlankNode()) { + source = addGraph(getID(src.getLabel()), true); + } + else { + source = addGraph(src); + } } @Override @@ -97,7 +102,16 @@ public void triple(Atom graph, Atom subject, Atom property, Atom object) { } Node getGraph(Atom graph) { - return graph == null ? addDefaultGraphNode() : addGraph(graph); + //return graph == null ? addDefaultGraphNode() : addGraph(graph); + if (graph == null) { + return addDefaultGraphNode(); + } + else if (graph.isBlankOrBlankNode()) { + return addGraph(getID(graph.getLabel()), true); + } + else { + return addGraph(graph); + } } @Override diff --git a/corese-core/src/main/java/fr/inria/corese/core/load/CreateTriple.java b/corese-core/src/main/java/fr/inria/corese/core/load/CreateTriple.java index 8a21132b7b..8489faf5d7 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/load/CreateTriple.java +++ b/corese-core/src/main/java/fr/inria/corese/core/load/CreateTriple.java @@ -124,8 +124,12 @@ Node addGraph(String src) { return graph.addGraph(src); } + Node addGraph(String src, boolean bnode) { + return graph.addGraph(src, bnode); + } + Node addGraph(Atom src) { - return graph.addGraph(src.getLabel(), src.isBlank()); + return graph.addGraph(src.getLabel(), src.isBlankOrBlankNode()); } Node addDefaultGraphNode() { diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/CanonicalRdf10Format.java b/corese-core/src/main/java/fr/inria/corese/core/print/CanonicalRdf10Format.java new file mode 100644 index 0000000000..d7c5a7662f --- /dev/null +++ b/corese-core/src/main/java/fr/inria/corese/core/print/CanonicalRdf10Format.java @@ -0,0 +1,88 @@ +package fr.inria.corese.core.print; + +import java.util.Map; + +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.print.rdfc10.CanonicalRdf10; +import fr.inria.corese.core.print.rdfc10.CanonicalizedDataset; +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; +import fr.inria.corese.kgram.api.core.Node; +import fr.inria.corese.kgram.core.Mappings; + +/** + * This class provides functionality to convert a Graph object to a string in + * Canonical RDF 1.0 format. + */ +public class CanonicalRdf10Format extends NQuadsFormat { + + private CanonicalizedDataset canonicalizedDataset; + + public CanonicalRdf10Format(Graph graph) { + super(graph); + this.canonicalizedDataset = CanonicalRdf10.create(graph).canonicalRdf10(); + } + + public CanonicalRdf10Format(Graph graph, HashAlgorithm hashAlgorithm) { + super(graph); + this.canonicalizedDataset = CanonicalRdf10.create(graph, hashAlgorithm).canonicalRdf10(); + } + + public static CanonicalRdf10Format create(Graph graph) { + return new CanonicalRdf10Format(graph); + } + + public static CanonicalRdf10Format create(Graph graph, HashAlgorithm hashAlgorithm) { + return new CanonicalRdf10Format(graph, hashAlgorithm); + } + + public static CanonicalRdf10Format create(Mappings map) { + return new CanonicalRdf10Format((Graph) map.getGraph()); + } + + public static CanonicalRdf10Format create(Mappings map, HashAlgorithm hashAlgorithm) { + return new CanonicalRdf10Format((Graph) map.getGraph(), hashAlgorithm); + } + + /** + * Converts the graph to a string in Canonical RDF 1.0 format. + * + * @return a string representation of the graph in Canonical RDF 1.0 format + */ + @Override + public String toString() { + String nquads = super.toString(); + + // Check if nquads is empty and return early if it is + if (nquads.isEmpty()) { + return ""; + } + + // Sort in codepoint order by line + String[] lines = nquads.split("\n"); + java.util.Arrays.sort(lines); + + // Concatenate lines + StringBuilder sb = new StringBuilder(); + for (String line : lines) { + sb.append(line).append("\n"); + } + + return sb.toString(); + } + + @Override + protected String printBlank(Node node) { + String identifier = this.canonicalizedDataset.getIdentifierForBlankNode(node); + return "_:" + this.canonicalizedDataset.getIssuedIdentifier(identifier); + } + + /** + * Retrieves the mapping of blank nodes to their identifiers. + * + * @return a map of blank nodes to their identifiers + */ + public Map getIssuedIdentifiersMap() { + return this.canonicalizedDataset.getIssuedIdentifiersMap(); + } + +} diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/JSOND3Format.java b/corese-core/src/main/java/fr/inria/corese/core/print/JSOND3Format.java index b6703ae10a..651d0a3778 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/JSOND3Format.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/JSOND3Format.java @@ -1,15 +1,15 @@ package fr.inria.corese.core.print; -import fr.inria.corese.sparql.api.IDatatype; -import fr.inria.corese.sparql.triple.parser.NSManager; -import fr.inria.corese.sparql.triple.parser.ASTQuery; -import fr.inria.corese.kgram.api.core.Node; -import fr.inria.corese.kgram.core.Mappings; -import fr.inria.corese.kgram.core.Query; -import fr.inria.corese.core.Graph; import java.util.HashMap; import java.util.Map; + +import fr.inria.corese.core.Graph; import fr.inria.corese.kgram.api.core.Edge; +import fr.inria.corese.kgram.api.core.Node; +import fr.inria.corese.kgram.core.Mappings; +import fr.inria.corese.kgram.core.Query; +import fr.inria.corese.sparql.api.IDatatype; +import fr.inria.corese.sparql.triple.parser.NSManager; public class JSOND3Format extends RDFFormat { @@ -86,17 +86,17 @@ public StringBuilder getStringBuilder() { return sb; } -// if (isGraph) { -// graphNodes(); -// } else { -// nodes(); -// } + // if (isGraph) { + // graphNodes(); + // } else { + // nodes(); + // } StringBuilder bb = new StringBuilder(); -// header(bb); + // header(bb); bb.append(OOBJ); bb.append(NL); -// bb.append(TAB); + // bb.append(TAB); bb.append(" \"nodes\" : [ "); bb.append(NL); d3Nodes(); @@ -104,7 +104,7 @@ public StringBuilder getStringBuilder() { bb.append("] ,"); bb.append(NL); -// bb.append(TAB); + // bb.append(TAB); bb.append(" \"edges\" : [ "); bb.append(NL); d3Edges(); @@ -117,7 +117,7 @@ public StringBuilder getStringBuilder() { } void d3Nodes() { - + for (Node node : graph.getRBNodes()) { int group = 1; if (node.isBlank()) { @@ -125,12 +125,12 @@ void d3Nodes() { } else if (node.toString().contains("/sparql")) { group = 2; } - + sdisplay(TAB); sdisplay(OOBJ); sdisplay("\"name\" : "); sdisplay(DQUOTE); - + sdisplay(JSONFormat.addJSONEscapes(node.toString())); sdisplay(DQUOTE); sdisplay(V); @@ -144,12 +144,12 @@ void d3Nodes() { } for (Node node : graph.getLiteralNodes()) { -// for (Entity e : graph.getRBNodes()) { + // for (Entity e : graph.getRBNodes()) { sdisplay(TAB); sdisplay(OOBJ); sdisplay("\"name\" : "); sdisplay(DQUOTE); - + sdisplay(JSONFormat.addJSONEscapes(node.toString())); sdisplay(DQUOTE); sdisplay(V); @@ -171,7 +171,9 @@ void d3Edges() { for (Edge e : graph.getEdges()) { - Edge edge = e; + // Create a new clean iterable (because corse iterable does not have a perfectly + // defined behavior for optimization reasons) + Edge edge = this.graph.getEdgeFactory().copy(e); sdisplay(TAB); sdisplay(OOBJ); @@ -194,8 +196,7 @@ void d3Edges() { sb.deleteCharAt(sb.lastIndexOf(V)); } } - - + void nodes() { for (Node node : getNodes()) { print(null, node); diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java index 137a15849f..174ff09bb9 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/NQuadsFormat.java @@ -1,7 +1,9 @@ package fr.inria.corese.core.print; import fr.inria.corese.core.Graph; +import fr.inria.corese.kgram.api.core.Edge; import fr.inria.corese.kgram.api.core.ExpType; +import fr.inria.corese.kgram.core.Mappings; public class NQuadsFormat extends NTriplesFormat { @@ -13,6 +15,10 @@ public static NQuadsFormat create(Graph graph) { return new NQuadsFormat(graph); } + public static NQuadsFormat create(Mappings map) { + return new NQuadsFormat((Graph) map.getGraph()); + } + /** * Converts the graph to a string in N-Quads format. * @@ -22,7 +28,12 @@ public static NQuadsFormat create(Graph graph) { public String toString() { StringBuilder sb = new StringBuilder(); - for (var edge : graph.getEdges()) { + for (Edge e : graph.getEdges()) { + + // Create a new clean iterable (because corse iterable does not have a perfectly + // defined behavior for optimization reasons) + Edge edge = this.graph.getEdgeFactory().copy(e); + sb.append(printNode(edge.getNode(0))) .append(" ") .append(printNode(edge.getEdgeNode())) @@ -32,9 +43,10 @@ public String toString() { if (edge.getGraph().getValue().stringValue() != ExpType.DEFAULT_GRAPH) { sb.append(printNode(edge.getGraph())); + sb.append(" "); } - sb.append(" .\n"); + sb.append(".\n"); } return sb.toString(); diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java index 723ad2f7bb..a7d1a317d1 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/NTriplesFormat.java @@ -2,11 +2,12 @@ import java.io.IOException; import java.io.OutputStream; -import java.net.URI; -import java.net.URISyntaxException; import fr.inria.corese.core.Graph; +import fr.inria.corese.kgram.api.core.Edge; import fr.inria.corese.kgram.api.core.Node; +import fr.inria.corese.kgram.core.Mappings; +import fr.inria.corese.sparql.datatype.RDF; import fr.inria.corese.sparql.triple.parser.NSManager; /** @@ -40,6 +41,16 @@ public static NTriplesFormat create(Graph graph) { return new NTriplesFormat(graph); } + /** + * Factory method to create a new NTriplesFormat instance. + * + * @param map the mappings to be formatted + * @return a new NTriplesFormat instance + */ + public static NTriplesFormat create(Mappings map) { + return new NTriplesFormat((Graph) map.getGraph()); + } + /** * Converts the graph to a string in N-Triples format. * @@ -49,7 +60,12 @@ public static NTriplesFormat create(Graph graph) { public String toString() { StringBuilder sb = new StringBuilder(); - for (var edge : graph.getEdges()) { + for (Edge e : graph.getEdges()) { + + // Create a new clean iterable (because corse iterable does not have a perfectly + // defined behavior for optimization reasons) + Edge edge = this.graph.getEdgeFactory().copy(e); + sb.append(printNode(edge.getNode(0))) .append(" ") .append(printNode(edge.getEdgeNode())) @@ -78,13 +94,15 @@ public void write(OutputStream out) throws IOException { * @param node the node to be formatted * @return a string representation of the node */ - protected String printNode(Node node) { + public String printNode(Node node) { if (node.getDatatypeValue().isURI()) { return printURI(node); } else if (node.getDatatypeValue().isLiteral()) { return printDatatype(node); + } else if (node.isBlank()) { + return printBlank(node); } else { - throw new IllegalArgumentException("Node " + node + " is not a URI or a literal"); + throw new IllegalArgumentException("Node " + node + " is not a URI, Literal, or blank node."); } } @@ -95,13 +113,7 @@ protected String printNode(Node node) { * @return a string representation of the URI node */ private String printURI(Node node) { - try { - // Validate URI and percent-encode if necessary - URI uri = new URI(node.getLabel()); - return "<" + uri.toASCIIString() + ">"; - } catch (URISyntaxException e) { - throw new IllegalArgumentException("Invalid URI: " + node.getLabel(), e); - } + return "<" + node.getLabel() + ">"; } /** @@ -117,13 +129,23 @@ private String printDatatype(Node node) { if (language != null && !language.isEmpty()) { return "\"" + label + "\"@" + language; - } else if (datatype != null && !datatype.isEmpty()) { + } else if (datatype != null && !datatype.isEmpty() && !datatype.equals(RDF.xsdstring)) { return "\"" + label + "\"^^<" + datatype + ">"; } else { return "\"" + label + "\""; } } + /** + * Converts a blank node to a string. + * + * @param node the blank node to be formatted + * @return a string representation of the blank node + */ + protected String printBlank(Node node) { + return node.getLabel(); + } + /** * Escapes special characters in a string. * @@ -133,29 +155,38 @@ private String printDatatype(Node node) { private String escape(String str) { StringBuilder escaped = new StringBuilder(); for (char ch : str.toCharArray()) { - if (ch >= 0x00 && ch <= 0x1F || ch >= 0x7F && ch <= 0x9F) { - escaped.append(String.format("\\u%04x", (int) ch)); - } else { - switch (ch) { - case '\\': - escaped.append("\\\\"); - break; - case '\"': - escaped.append("\\\""); - break; - case '\n': - escaped.append("\\n"); - break; - case '\r': - escaped.append("\\r"); - break; - case '\t': - escaped.append("\\t"); - break; - default: + switch (ch) { + case '\\': // Backslash + escaped.append("\\\\"); + break; + case '\"': // Double quote + escaped.append("\\\""); + break; + case '\n': // Line Feed + escaped.append("\\n"); + break; + case '\r': // Carriage Return + escaped.append("\\r"); + break; + case '\t': // Horizontal Tab + escaped.append("\\t"); + break; + case '\b': // Backspace + escaped.append("\\b"); + break; + case '\f': // Form Feed + escaped.append("\\f"); + break; + default: + // Uses UCHAR for specific characters and those outside the Char production of + // XML 1.1 + if ((ch >= '\u0000' && ch <= '\u0007') || ch == '\u000B' || (ch >= '\u000E' && ch <= '\u001F') + || ch == '\u007F') { + escaped.append(String.format("\\u%04X", (int) ch)); + } else { + // Uses the native representation for all other characters escaped.append(ch); - break; - } + } } } return escaped.toString(); diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java index f5a52ea8f3..ce9e1584ee 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java @@ -8,6 +8,7 @@ import fr.inria.corese.compiler.parser.Pragma; import fr.inria.corese.core.Graph; +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; import fr.inria.corese.core.transform.Transformer; import fr.inria.corese.core.util.MappingsGraph; import fr.inria.corese.kgram.api.core.Node; @@ -34,6 +35,7 @@ public class ResultFormat implements ResultFormatDef { public static final String SPARQL_RESULTS_CSV = "text/csv"; // application/sparql-results+csv"; public static final String SPARQL_RESULTS_TSV = "text/tab-separated-values"; // application/sparql-results+tsv"; public static final String SPARQL_RESULTS_MD = "text/markdown"; + public static final String SPARQL_RESULTS_HTML = "application/n-quads"; static final String HEADER = "\n" + "\n" @@ -67,6 +69,7 @@ public class ResultFormat implements ResultFormatDef { private Graph graph; private Binding bind; private Context context; + private NSManager nsmanager; int type = UNDEF_FORMAT; private int transformType = UNDEF_FORMAT; private int construct_format = DEFAULT_CONSTRUCT_FORMAT; @@ -95,7 +98,6 @@ static void init() { table.put(Metadata.DISPLAY_RDF, RDF_FORMAT); table.put(Metadata.DISPLAY_XML, XML_FORMAT); table.put(Metadata.DISPLAY_JSON, JSON_FORMAT); - table.put(Metadata.DISPLAY_MARKDOWN, MARKDOWN_FORMAT); } @@ -112,11 +114,14 @@ static void initFormat() { defContent(SPARQL_RESULTS_CSV, CSV_FORMAT); defContent(SPARQL_RESULTS_TSV, TSV_FORMAT); defContent(SPARQL_RESULTS_MD, MARKDOWN_FORMAT); + // Graph defContent(RDF_XML, RDF_XML_FORMAT); defContent(TURTLE_TEXT, TURTLE_FORMAT); defContent(TRIG, TRIG_FORMAT); defContent(JSON_LD, JSONLD_FORMAT); + defContent(N_TRIPLES, NTRIPLES_FORMAT); + defContent(N_QUADS, NQUADS_FORMAT); // defContent(JSON, JSON_LD_FORMAT); format.put(TRIG_TEXT, TRIG_FORMAT); @@ -140,6 +145,8 @@ static void initFormat() { format.put("turtle", TURTLE_FORMAT); format.put("trig", TRIG_FORMAT); format.put("rdfxml", RDF_XML_FORMAT); + format.put("nt", NTRIPLES_FORMAT); + format.put("nq", NQUADS_FORMAT); } static void defContent(String f, int t) { @@ -170,6 +177,12 @@ static void defContent(String f, int t) { this(g); this.type = type; } + + ResultFormat(Graph g, NSManager nsm, int type) { + this(g); + setNsmanager(nsm); + this.type = type; + } static public ResultFormat create(Mappings m) { return new ResultFormat(m, type(m)); @@ -277,6 +290,10 @@ static public ResultFormat create(Graph g) { static public ResultFormat create(Graph g, int type) { return new ResultFormat(g, type); } + + static public ResultFormat create(Graph g, NSManager nsm, int type) { + return new ResultFormat(g, nsm, type); + } static public ResultFormat create(Graph g, String type) { return new ResultFormat(g, getSyntax(type)); @@ -435,20 +452,23 @@ String graphToString(Node node) { case RDF_XML_FORMAT: return RDFFormat.create(getGraph()).toString(); case TRIG_FORMAT: - return TripleFormat.create(getGraph(), true) + return TripleFormat.create(getGraph(), getNsmanager(), true) .setNbTriple(getNbTriple()).toString(node); case JSONLD_FORMAT: return JSONLDFormat.create(getGraph()).toString(); case NTRIPLES_FORMAT: return NTriplesFormat.create(getGraph()).toString(); case NQUADS_FORMAT: - return TripleFormat.create(getGraph(), true) - .setNbTriple(getNbTriple()).toString(node); + return NQuadsFormat.create(getGraph()).toString(); + case RDFC10_FORMAT: + return CanonicalRdf10Format.create(getGraph(), HashAlgorithm.SHA_256).toString(); + case RDFC10_SHA384_FORMAT: + return CanonicalRdf10Format.create(getGraph(), HashAlgorithm.SHA_384).toString(); case TURTLE_FORMAT: default: // e.g. HTML - String str = TripleFormat.create(getGraph()) - .setNbTriple(getNbTriple()).toString(node); + TripleFormat tf = TripleFormat.create(getGraph(), getNsmanager()); + String str = tf.setNbTriple(getNbTriple()).toString(node); if (type() == HTML_FORMAT) { return html(str); } @@ -485,6 +505,10 @@ boolean isGraphFormat(int type) { case TURTLE_FORMAT: case TRIG_FORMAT: case JSONLD_FORMAT: + case NTRIPLES_FORMAT: + case NQUADS_FORMAT: + case RDFC10_FORMAT: + case RDFC10_SHA384_FORMAT: // case RDF_FORMAT: return true; default: @@ -535,6 +559,14 @@ String processBasic(Mappings map, int type) { return TripleFormat.create(map, true).setNbTriple(getNbTriple()).toString(); case JSONLD_FORMAT: return JSONLDFormat.create(map).toString(); + case NTRIPLES_FORMAT: + return NTriplesFormat.create(map).toString(); + case NQUADS_FORMAT: + return NQuadsFormat.create(map).toString(); + case RDFC10_FORMAT: + return CanonicalRdf10Format.create(map, HashAlgorithm.SHA_256).toString(); + case RDFC10_SHA384_FORMAT: + return CanonicalRdf10Format.create(map, HashAlgorithm.SHA_384).toString(); case RDF_FORMAT: // W3C RDF Graph Mappings @@ -755,4 +787,12 @@ public ResultFormat setNbTriple(int nbTriple) { return this; } + public NSManager getNsmanager() { + return nsmanager; + } + + public void setNsmanager(NSManager nsmanager) { + this.nsmanager = nsmanager; + } + } diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java b/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java index cbd6e4d1cf..93405078e1 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java +++ b/corese-core/src/main/java/fr/inria/corese/core/print/TripleFormat.java @@ -1,12 +1,14 @@ package fr.inria.corese.core.print; import fr.inria.corese.core.Graph; +import fr.inria.corese.core.logic.RDF; import fr.inria.corese.kgram.api.core.Edge; import fr.inria.corese.kgram.api.core.Node; import fr.inria.corese.kgram.core.Mappings; import fr.inria.corese.kgram.core.Query; import fr.inria.corese.sparql.api.IDatatype; import fr.inria.corese.sparql.triple.parser.NSManager; +import java.util.List; /** * Turtle & Trig Format @@ -27,7 +29,7 @@ public class TripleFormat extends RDFFormat { static final String RDF_TYPE = "rdf:type"; static final String TAB = " "; - static final boolean addPrefix = true; + public boolean addPrefix = true; boolean isGraph = false; // when true: display default graph kg:default with embedding graph kg:default @@ -55,6 +57,9 @@ public void disableCompactBlankNodeSyntax() { } public static TripleFormat create(Graph g, NSManager n) { + if (n == null) { + return new TripleFormat(g, nsm()); + } return new TripleFormat(g, n); } @@ -94,6 +99,12 @@ public static TripleFormat create(Graph g, boolean isGraph) { t.setGraph(isGraph); return t; } + + public static TripleFormat create(Graph g, NSManager nsm, boolean isGraph) { + TripleFormat t = TripleFormat.create(g, nsm); + t.setGraph(isGraph); + return t; + } public void setGraph(boolean b) { isGraph = b; @@ -215,14 +226,12 @@ void basicGraphNode(Node gNode) { } private boolean isRdfPrefixNeeded() { - //for (Node node : graph.getGraphNodes()) { - for (Edge edge : graph.getEdges()) { - String pred = nsm.toPrefix(edge.getEdgeNode().getLabel(), !addPrefix); - if (pred.startsWith("rdf:") && !pred.equals(RDF_TYPE)) { - return true; - } + for (Edge edge : graph.getEdges()) { + String pred = nsm.toPrefix(edge.getEdgeNode().getLabel(), !addPrefix); + if (pred.startsWith("rdf:") && !pred.equals(RDF_TYPE)) { + return true; } - //} + } return false; } @@ -230,12 +239,6 @@ private boolean isRdfPrefixNeeded() { void header(StringBuilder bb) { link(bb); bb.append(nsm.toString(PREFIX, false, false)); -// if (isRdfPrefixNeeded()) { -// bb.append(nsm.toString(PREFIX, false, false)); -// } else { -// // Si le préfixe rdf: n'est pas nécessaire, supprimez-le de la sortie -// bb.append(nsm.toString(PREFIX, false, false).replaceAll("@prefix rdf:.*\n", "")); -// } } void link(StringBuilder bb) { @@ -265,10 +268,6 @@ void print(Node gNode, Node node) { } if (first) { first = false; -// if (isBlankNode) { -// sdisplay("["); -// } -// else { subject(edge); sdisplay(SPACE); @@ -284,9 +283,6 @@ void print(Node gNode, Node node) { } if (!first) { -// if (isBlankNode) { -// sdisplay("]"); -// } sdisplay(DOT); sdisplay(NL); sdisplay(NL); @@ -312,13 +308,10 @@ void subject(Edge ent) { } void predicate(Node node) { - String pred = nsm.toPrefix(node.getLabel(), !addPrefix); - if (pred.equals(RDF_TYPE)) { + if (node.getLabel().equals(RDF.TYPE)) { sdisplay("a"); - } else if (pred.equals(node.getLabel())) { // Si l'URI n'est pas abrégée - uri(node.getLabel()); // Utiliser la méthode uri pour ajouter des chevrons si nécessaire - } else { // Si l'URI est abrégée - sdisplay(pred); + } else { + node(node); } } @@ -336,9 +329,14 @@ void node(Node node, boolean rec) { } else if (dt.isBlank()) { sdisplay(dt.getLabel()); } else { - uri(dt.getLabel()); + // uri(dt.getLabel()); + sdisplay(dt.toSparql(true, false, !addPrefix, nsm)); } } + + void blank(Node node) { + List list = graph.getList(node); + } // node is triple reference of edge // node is subject/object @@ -366,19 +364,6 @@ void basicTriple(Node node, Edge edge, boolean rec) { node(edge.getObjectNode(), true); } - // void triple2(Node node, Edge edge, boolean rec) { - // if (edge.isNested() || hasNestedTriple(edge) || rec) { - // nestedTriple(node, edge, rec); - // } else { - // basicTriple(node, edge, rec); - // } - // } - // - - // void basicTriple(Node node, Edge edge) { - // basicTriple(node, edge, false); - // } - boolean hasNestedTriple(Edge edge) { return edge.getSubjectValue().isTripleWithEdge() || edge.getObjectValue().isTripleWithEdge(); } diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalIssuer.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalIssuer.java new file mode 100644 index 0000000000..7bd6d3a159 --- /dev/null +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalIssuer.java @@ -0,0 +1,109 @@ +package fr.inria.corese.core.print.rdfc10; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; + +/** + * This class manages the issuance of canonical identifiers for blank nodes. + */ +public class CanonicalIssuer { + + private final String IDPREFIX; + private int idCounter; + // Maps blank node identifiers to their canonical identifiers + // Use LinkedHashMap to preserve insertion order + private final LinkedHashMap issuedIdentifierMap; + + ///////////////// + // Constructor // + ///////////////// + + /** + * Constructs a new CanonicalIssuer instance. + * + * @param idPrefix The prefix to be used for identifiers issued by this + */ + public CanonicalIssuer(String idPrefix) { + this.IDPREFIX = idPrefix; + this.idCounter = 0; + this.issuedIdentifierMap = new LinkedHashMap<>(); + } + + /** + * Constructs a new CanonicalIssuer instance as a copy of another. + * + * @param ci The CanonicalIssuer to copy. + */ + public CanonicalIssuer(CanonicalIssuer ci) { + this.IDPREFIX = ci.IDPREFIX; + this.idCounter = ci.idCounter; + this.issuedIdentifierMap = new LinkedHashMap<>(ci.issuedIdentifierMap); + } + + ///////////// + // Methods // + ///////////// + + /** + * Issues a new canonical identifier for a blank node or returns an existing one + * if already issued. + * + * @return The canonical identifier for the blank node. + */ + public String issueCanonicalIdentifier(String blankNodeId) { + if (this.issuedIdentifierMap.containsKey(blankNodeId)) { + return this.issuedIdentifierMap.get(blankNodeId); + } + String issuedIdentifier = this.IDPREFIX + this.idCounter; + this.idCounter++; + this.issuedIdentifierMap.put(blankNodeId, issuedIdentifier); + return issuedIdentifier; + } + + /** + * Retrieves the canonical identifier for a blank node. + * + * @param blankNodeId The identifier of the blank node. + * @return The canonical identifier for the blank node. + */ + public String getCanonicalIdentifier(String blankNodeId) { + return this.issuedIdentifierMap.get(blankNodeId); + } + + /** + * Retrieves a set of all issued blank node identifiers. + * + * @return A set of all issued blank node identifiers. + */ + public Set getBlankNodeIdentifiers() { + return Collections.unmodifiableSet(this.issuedIdentifierMap.keySet()); + } + + /** + * Tests whether a blank node has a canonical identifier. + * + * @param blankNodeId The identifier of the blank node. + * @return True if the blank node has a canonical identifier + * false otherwise. + */ + public boolean hasCanonicalIdentifier(String blankNodeId) { + return this.issuedIdentifierMap.containsKey(blankNodeId); + } + + /** + * Retrieves the issued identifier map. + * + * @return The issued identifier map. + */ + public Map getIssuedIdentifierMap() { + return Collections.unmodifiableMap(this.issuedIdentifierMap); + } + + @Override + public String toString() { + return this.issuedIdentifierMap.toString(); + } + +} diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalRdf10.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalRdf10.java new file mode 100644 index 0000000000..e3745bf5e2 --- /dev/null +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalRdf10.java @@ -0,0 +1,664 @@ +package fr.inria.corese.core.print.rdfc10; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.lang3.tuple.Pair; + +import fr.inria.corese.core.EdgeFactory; +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.print.NTriplesFormat; +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; +import fr.inria.corese.kgram.api.core.Edge; +import fr.inria.corese.kgram.api.core.ExpType; +import fr.inria.corese.kgram.api.core.Node; +import fr.inria.corese.kgram.core.Mappings; + +/** + * The {@code CanonicalRdf10Format} class extends {@code RDFFormat} to provide + * RDF canonicalization in alignment with the RDF 1.0 specification. This class + * manages the process of transforming RDF graphs into a canonical form. + * + * @see RDF Dataset + * Canonicalization + * + */ +public class CanonicalRdf10 { + + private CanonicalizationState canonicalizationState; + private CanonicalizedDataset canonicalizedDataset; + + private EdgeFactory edgeFactory = Graph.create().getEdgeFactory(); + private NTriplesFormat ntriplesFormat = NTriplesFormat.create(Graph.create()); + + private HashAlgorithm hashAlgorithm = HashAlgorithm.SHA_256; + + private int depthFactor = 5; + private int permutationLimit = 50000; + + ////////////////// + // Constructors // + ////////////////// + + /** + * Constructs a new {@code CanonicalRdf10Format} with the specified RDF graph. + * Initializes the canonicalization state and dataset for the graph. + * + * @param graph the RDF graph to be canonicalized + */ + private CanonicalRdf10(Graph graph) { + this.canonicalizationState = new CanonicalizationState(); + this.canonicalizedDataset = new CanonicalizedDataset(graph); + } + + ///////////////////// + // Factory methods // + ///////////////////// + + /** + * Creates a new {@code CanonicalRdf10Format} instance for the given graph. + * + * @param graph the RDF graph to be canonicalized + * @return a new instance of {@code CanonicalRdf10Format} + */ + public static CanonicalRdf10 create(Graph graph) { + return new CanonicalRdf10(graph); + } + + /** + * Creates a new {@code CanonicalRdf10Format} instance for the graph associated + * with the given mappings. + * + * @param map the mappings containing the RDF graph to be canonicalized + * @return a new instance of {@code CanonicalRdf10Format} + */ + public static CanonicalRdf10 create(Mappings map) { + return new CanonicalRdf10((Graph) map.getGraph()); + } + + /** + * Creates a new {@code CanonicalRdf10Format} instance for the given graph with + * a hash algorithm. + * + * @param graph the RDF graph to be canonicalized + * @param hashAlgorithm the hash algorithm to be used for the canonicalization + * @return a new instance of {@code CanonicalRdf10Format} + */ + public static CanonicalRdf10 create(Graph graph, HashAlgorithm hashAlgorithm) { + CanonicalRdf10 canonicalRdf10 = new CanonicalRdf10(graph); + canonicalRdf10.setHashAlgorithm(hashAlgorithm); + return canonicalRdf10; + } + + /** + * Creates a new {@code CanonicalRdf10Format} instance for the graph associated + * with the given mappings with a hash algorithm. + * + * @param map the mappings containing the RDF graph to be + * canonicalized + * @param hashAlgorithm the hash algorithm to be used for the canonicalization + * @return a new instance of {@code CanonicalRdf10Format} + */ + public static CanonicalRdf10 create(Mappings map, HashAlgorithm hashAlgorithm) { + CanonicalRdf10 canonicalRdf10 = new CanonicalRdf10((Graph) map.getGraph()); + canonicalRdf10.setHashAlgorithm(hashAlgorithm); + return canonicalRdf10; + } + + /////////////// + // Accessors // + /////////////// + + /** + * Returns the depth factor for the canonicalization algorithm. + * + * @return the depth factor for the canonicalization algorithm + */ + public int getDepthFactor() { + return depthFactor; + } + + /** + * Sets the depth factor for the canonicalization algorithm. + * + * @param depthFactor the depth factor for the canonicalization algorithm + */ + public void setDepthFactor(int depthFactor) { + this.depthFactor = depthFactor; + } + + /** + * Returns the permutation limit for the canonicalization algorithm. + * + * @return the permutation limit for the canonicalization algorithm + */ + public int getPermutationLimit() { + return permutationLimit; + } + + /** + * Sets the permutation limit for the canonicalization algorithm. + * + * @param permutationLimit the permutation limit for the canonicalization + * algorithm + */ + public void setPermutationLimit(int permutationLimit) { + this.permutationLimit = permutationLimit; + } + + /** + * Returns the hash algorithm used for the canonicalization algorithm. + * + * @return the hash algorithm used for the canonicalization algorithm + */ + public HashAlgorithm getHashAlgorithm() { + return hashAlgorithm; + } + + /** + * Sets the hash algorithm used for the canonicalization algorithm. + * + * @param hashAlgorithm the hash algorithm used for the canonicalization + * algorithm + */ + public void setHashAlgorithm(HashAlgorithm hashAlgorithm) { + this.hashAlgorithm = hashAlgorithm; + } + + //////////////////// + // Main algorithm // + //////////////////// + + /** + * Performs the canonicalization of an RDF 1.0 dataset. + * + * @see CanonicalizationAlgorithm + */ + public CanonicalizedDataset canonicalRdf10() { + // Build blank nodes to identifiers if not already done + // Build blank nodes identifiers to quads + // 4.4.3) Step 1, 2 + this.extractQuadsForBlankNodes(); + + // Build first degree hash for each blank node + // 4.4.3) Step 3 + for (String blankNodeIdentifier : this.canonicalizedDataset.getBlankNodeIdentifiers()) { + // 4.4.3) Step 3.1 + String hash = this.hashFirstDegreeQuads(blankNodeIdentifier); + // 4.4.3) Step 3.2 + this.canonicalizationState.associateHashWithBlankNode(hash, blankNodeIdentifier); + } + + // Generate canonical identifiers for blank nodes with a unique first degree + // hash + // 4.4.3) Step 4 + for (String hash : this.canonicalizationState.getHashesSorted()) { + // 4.4.3) Step 4.1 + if (this.canonicalizationState.getBlankNodeForHash(hash).size() > 1) { + continue; + } + + // 4.4.3) Step 4.2 + String blankNodeIdentifier = this.canonicalizationState.getBlankNodeForHash(hash).get(0); + // 4.5.2) Step 1 2, 3, 4, 5 + this.canonicalizationState.issueCanonicalBlankNodeIdFor(blankNodeIdentifier); + + // 4.4.3) Step 4.3 + this.canonicalizationState.removeHash(hash); // Can be removed inside the loop because + // this.canonicalizationState.getHashesSorted() is a copy of + // the original list + } + + // Build N-degree hash for each blank node with multiple first degree hash + // 4.4.3) Step 5 + for (String hash : this.canonicalizationState.getHashesSorted()) { + // 4.4.3) Step 5.1 + List> hashPathList = new ArrayList<>(); + + // 4.4.3) Step 5.2 + for (String blankNodeIdentifier : this.canonicalizationState.getBlankNodeForHash(hash)) { + + // 4.4.3) Step 5.2.1 + if (this.canonicalizationState.hasCanonicalIdentifier(blankNodeIdentifier)) { + continue; + } + + // 4.4.3) Step 5.2.2 + CanonicalIssuer tempIssuer = new CanonicalIssuer("b"); + + // 4.4.3) Step 5.2.3 + tempIssuer.issueCanonicalIdentifier(blankNodeIdentifier); + + // 4.4.3) Step 5.2.4 + Pair result = this.hashNdegreeQuads(tempIssuer, blankNodeIdentifier, 0); + hashPathList.add(result); + } + + // 4.4.3) Step 5.3 + + // sort the list by the hash + hashPathList.sort((p1, p2) -> p1.getLeft().compareTo(p2.getLeft())); + for (Pair result : hashPathList) { + CanonicalIssuer issuer = result.getRight(); + + // 4.4.3) Step 5.3.1 + for (String existingIdentifier : issuer.getBlankNodeIdentifiers()) { + this.canonicalizationState.issueCanonicalBlankNodeIdFor(existingIdentifier); + } + } + } + + // 4.4.3) Step 6 + // Add the issued identifiers map from the canonical issuer to the canonicalized + // dataset. + this.canonicalizedDataset.setIssuedIdentifierMap(this.canonicalizationState.getIssuedIdentifierMap()); + + return this.canonicalizedDataset; + } + + //////////////////// + // Initialization // + //////////////////// + + /** + * Extracts the quads for blank nodes from the RDF graph and adds them to the + * canonicalization state (BlankNodeIdentifier -> List). + * Also adds the blank nodes to identifiers to the canonicalized dataset if not + * already done in the constructor of the class (BlankNode -> + * BlankNodeIdentifier). + */ + private void extractQuadsForBlankNodes() { + Iterable edges = this.canonicalizedDataset.getDataset().getEdges(); + + for (Edge e : edges) { + + // Create a new clean iterable (because corse iterable does not have a perfectly + // defined behavior for optimization reasons) + Edge edge = this.edgeFactory.copy(e); + + Node subject = edge.getSubjectNode(); + Node object = edge.getObjectNode(); + Node graph = edge.getGraph(); + processAndMapBlankNode(subject, edge); + processAndMapBlankNode(object, edge); + processAndMapBlankNode(graph, edge); + } + } + + /** + * Processes a given blank node by mapping to an identifier if not already done + * and adding the associated quad to the canonicalization state. If the node is + * not a blank node, the method does nothing. + * + * @param node the node to be processed and mapped + * @param edge the edge associated with the node + */ + private void processAndMapBlankNode(Node node, Edge edge) { + if (node.isBlank()) { + // Add blank node to identifiers if not already done + this.canonicalizedDataset.associateBlankNodeWithIdentifier(node); + + // Add quad to blank node identifier + // 4.4.3) Step 2.1 + this.canonicalizationState + .associateBlankNodeWithQuad(this.canonicalizedDataset.getIdentifierForBlankNode(node), edge); + } + } + + ////////////////////////// + // HashFirstDegreeQuads // + ////////////////////////// + + /** + * Hashes the first degree quads for a given blank node identifier. + * + * @param blankNodeIdentifier the identifier of the blank node + * @return the hash of the first degree quads for the given blank node + * + * @see Hashing the + * First Degree Quads + */ + private String hashFirstDegreeQuads(String blankNodeIdentifier) { + // 4.6.3) Step 1 + List nquads = new ArrayList<>(); + + // 4.6.3) Step 2, 3 + for (Edge quad : this.canonicalizationState.getQuadsForBlankNode(blankNodeIdentifier)) { + nquads.add(serializeQuad(quad, blankNodeIdentifier)); + } + + // 4.6.3) Step 4 + nquads.sort(String::compareTo); + return HashingUtility.hash(String.join("\n", nquads) + "\n", this.hashAlgorithm); + } + + /** + * Serializes a quad in N-Quads format. The method replaces the blank node + * identifier of the reference blank node with "_:a" and all other blank node + * identifiers with "_:z". + * + * @param quad the quad to be serialized + * @param referenceBlankNodeIdentifier the identifier of the blank node to be + * referenced + * @return the serialized quad + */ + private String serializeQuad(Edge quad, String referenceBlankNodeIdentifier) { + Node subject = quad.getSubjectNode(); + Node predicate = quad.getEdgeNode(); + Node object = quad.getObjectNode(); + Node graph = quad.getGraph(); + + boolean isDefaultGraph = graph.getLabel().equals(ExpType.DEFAULT_GRAPH); + + String subjectString = getNodeString(subject, referenceBlankNodeIdentifier); + String predicateString = getNodeString(predicate, referenceBlankNodeIdentifier); + String objectString = getNodeString(object, referenceBlankNodeIdentifier); + String graphString = isDefaultGraph ? "" : getNodeString(graph, referenceBlankNodeIdentifier); + + return subjectString + " " + predicateString + " " + objectString + (isDefaultGraph ? "" : " " + graphString) + + " ."; + } + + /** + * Returns the string representation of a node. If the node is a blank node, the + * method returns "_:a" if the node is the reference blank node identifier and + * "_:z" otherwise. + * + * @param node the node to be serialized + * @param referenceBlankNodeIdentifier the identifier of the blank node to be + * referenced + * @return the string representation of the node + */ + private String getNodeString(Node node, String referenceBlankNodeIdentifier) { + if (node.isBlank()) { + return this.canonicalizedDataset.getIdentifierForBlankNode(node).equals(referenceBlankNodeIdentifier) + ? "_:a" + : "_:z"; + } else { + return this.ntriplesFormat.printNode(node); + } + } + + /////////////// + // Exception // + /////////////// + + /** + * Thrown to indicate that an error occurred during the canonicalization of an + * RDF dataset. + */ + public static class CanonicalizationException extends RuntimeException { + + private static final long serialVersionUID = 1L; + + /** + * Constructs a new {@code CanonicalizationException} with the specified + * detail message. + * + * @param message the detail message + */ + public CanonicalizationException(String message) { + super(message); + } + + } + + /////////////////////// + // HashN-DegreeQuads // + /////////////////////// + + /** + * Hashes the N-degree quads for a given blank node identifier. + * + * @param issuer the canonical issuer + * @param blankNodeId the identifier of the blank node + * @return a pair containing the hash of the N-degree quads for the given blank + * node and the canonical issuer + * + * @see Hashing + * N-degree Quads + * + */ + private Pair hashNdegreeQuads(CanonicalIssuer issuer, String blankNodeId, int depth) { + + // Check if depth factor is reached + if (depth >= this.depthFactor * this.canonicalizedDataset.getBlankNodeIdentifiers().size()) { + throw new CanonicalizationException("Depth factor reached, too many recursions"); + } + + // in step 4.8.3) Step 5.6 + CanonicalIssuer refIssuer = issuer; + + // 4.8.3) Step 1 + // Use a tree map to ensure that the hashes are sorted + ListMap relatedHashToRelatedBNIdMap = new ListMap<>(); + + // 4.8.3) Step 2 + List quads = this.canonicalizationState.getQuadsForBlankNode(blankNodeId); + + // 4.8.3) Step 3 + for (Edge quad : quads) { + processQuadEntry(quad, refIssuer, blankNodeId, relatedHashToRelatedBNIdMap, "s", quad.getSubjectNode()); + processQuadEntry(quad, refIssuer, blankNodeId, relatedHashToRelatedBNIdMap, "o", quad.getObjectNode()); + processQuadEntry(quad, refIssuer, blankNodeId, relatedHashToRelatedBNIdMap, "g", quad.getGraph()); + } + + // 4.8.3) Step 4 + StringBuilder data = new StringBuilder(); + + // 4.8.3) Step 5 + // Hash are sorted by the tree map + for (String hash : relatedHashToRelatedBNIdMap.keySet()) { + + // 4.8.3) Step 5.1 + data.append(hash); + + // 4.8.3) Step 5.2 + String chosenPath = ""; + + // 4.8.3) Step 5.3 + CanonicalIssuer chosenIssuer = null; + + // 4.8.3) Step 5.4 + List> permutations = this.permute(relatedHashToRelatedBNIdMap.get(hash)); + + // Check if the permutation limit is reached + if (permutations.size() > this.permutationLimit) { + throw new CanonicalizationException("Permutation limit reached, too many permutations"); + } + + for (List permutation : permutations) { + + // 4.8.3) Step 5.4.1 + CanonicalIssuer issuerCopy = new CanonicalIssuer(refIssuer); + + // 4.8.3) Step 5.4.2 + String path = ""; + + // 4.8.3) Step 5.4.3 + List recursionList = new ArrayList<>(); + + // 4.8.3) Step 5.4.4 + for (String relatedBNId : permutation) { + + // 4.8.3) Step 5.4.4.1 + if (this.canonicalizationState.hasCanonicalIdentifier(relatedBNId)) { + path += "_:" + this.canonicalizationState.getCanonicalIdentifierFor(relatedBNId); + } + // 4.8.3) Step 5.4.4.2 + else { + // 4.8.3) Step 5.4.4.2.1 + if (!issuerCopy.hasCanonicalIdentifier(relatedBNId)) { + recursionList.add(relatedBNId); + } + // 4.8.3) Step 5.4.4.2.2 + path += "_:" + issuerCopy.issueCanonicalIdentifier(relatedBNId); + } + + // 4.8.3) Step 5.4.4.3 + if (!chosenPath.isEmpty() && path.length() >= chosenPath.length() + && path.compareTo(chosenPath) > 0) { + break; + } + } + + // 4.8.3) Step 5.4.5 + for (String relatedBNId : recursionList) { + // 4.8.3) Step 5.4.5.1 + Pair result = this.hashNdegreeQuads(issuerCopy, relatedBNId, depth + 1); + + // 4.8.3) Step 5.4.5.2 + path += "_:" + issuerCopy.issueCanonicalIdentifier(relatedBNId); + + // 4.8.3) Step 5.4.5.3 + path += "<" + result.getLeft() + ">"; + + // 4.8.3) Step 5.4.5.4 + issuerCopy = result.getRight(); + + // 4.8.3) Step 5.4.5.5 + if (!chosenPath.isEmpty() && path.length() >= chosenPath.length() + && path.compareTo(chosenPath) > 0) { + break; + } + } + + // 4.8.3) Step 5.4.6 + if (chosenPath.isEmpty() || path.compareTo(chosenPath) < 0) { + chosenPath = path; + chosenIssuer = issuerCopy; + } + } + + // 4.8.3) Step 5.5 + data.append(chosenPath); + + // 4.8.3) Step 5.6 + refIssuer = chosenIssuer; + } + + // 4.8.3) Step 6 + return Pair.of(HashingUtility.hash(data.toString(), this.hashAlgorithm), refIssuer); + } + + /** + * Generates all possible permutations of a given list. + * + * @param original The original list to be permuted. + * @param The type of elements in the list. + * @return A list of lists, where each inner list represents a permutation of + * the original list. + */ + private List> permute(List original) { + if (original.isEmpty()) { + List> result = new ArrayList<>(); + result.add(new ArrayList<>()); + return result; + } + + T firstElement = original.remove(0); + List> returnValue = new ArrayList<>(); + List> permutations = permute(original); + + for (List smallerPermutated : permutations) { + for (int index = 0; index <= smallerPermutated.size(); index++) { + List temp = new ArrayList<>(smallerPermutated); + temp.add(index, firstElement); + returnValue.add(temp); + } + } + return returnValue; + } + + /** + * Processes a quad entry by generating a hash for the related blank node and + * updating the hash-to-blank-node map. + * + * @param quad The quad edge to process. + * @param issuer The canonical issuer. + * @param blankNodeId The identifier for the current blank node. + * @param relatedHashToRelatedBNIdMap The map that stores the hash-to-blank-node + * mappings. + * @param position The position of the quad entry. + * @param relatedBN The related blank node. + */ + private void processQuadEntry(Edge quad, CanonicalIssuer issuer, String blankNodeId, + ListMap relatedHashToRelatedBNIdMap, String position, Node relatedBN) { + String relatedBNId = this.canonicalizedDataset.getIdentifierForBlankNode(relatedBN); + + if (relatedBN.isBlank() && !relatedBNId.equals(blankNodeId)) { + // 4.8.3) Step 3.1.1 + String relatedHash = this.hashRelatedBlankNode(relatedBNId, quad, issuer, position); + + // 4.8.3) Step 3.1.2 + relatedHashToRelatedBNIdMap.add(relatedHash, relatedBNId); + } + } + + ////////////////////////// + // HashRelatedBlankNode // + ////////////////////////// + + /** + * Hashes a related blank node. + * + * @param relatedBNId the identifier of the related blank node + * @param quad the quad to be associated with the blank node + * @param issuer the canonical issuer + * @param position the position of the related blank node + * @return the related hash for the related blank node + * + * @see Hashing + * a Related Blank Node + * + */ + private String hashRelatedBlankNode(String relatedBNId, Edge quad, CanonicalIssuer issuer, + String position) { + // 4.7.3) Step 1 + StringBuilder input = new StringBuilder(); + input.append(position); + + // 4.7.3) Step 2 + // Append predicate value if position is not 'g' + if (!position.equals("g")) { + input.append(quad.getPredicateValue().toString()); + } + + // 4.7.3) Step 3 + // If there is a canonical identifier for relatedBNId, use it; otherwise, use + // the issuer's identifier. + if (this.canonicalizationState.hasCanonicalIdentifier(relatedBNId) + || issuer.hasCanonicalIdentifier(relatedBNId)) { + + input.append("_:" + (this.canonicalizationState.hasCanonicalIdentifier(relatedBNId) + ? this.canonicalizationState.getCanonicalIdentifierFor(relatedBNId) + : issuer.getCanonicalIdentifier(relatedBNId))); + } + // 4.7.3) Step 4 + // Append hash for blank node as fallback + else { + input.append(this.canonicalizationState.getHashForBlankNode(relatedBNId)); + } + + // 4.7.3) Step 5 + return HashingUtility.hash(input.toString(), this.hashAlgorithm); + } + + ///////////////////////// + // Overriding toString // + ///////////////////////// + + /** + * Returns a string representation of the RDF graph in canonical form. + * + * @return a string representation of the RDF graph in canonical form + */ + @Override + public String toString() { + return super.toString(); + } + +} diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java new file mode 100644 index 0000000000..4d83a4fcfd --- /dev/null +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizationState.java @@ -0,0 +1,196 @@ +package fr.inria.corese.core.print.rdfc10; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import fr.inria.corese.kgram.api.core.Edge; + +/** + * This class manages the state of canonicalization, particularly handling + * the associations between blank nodes and their corresponding quads, + * maintaining a mapping from hash values to blank nodes and maintaining a + * mapping from blank node identifiers to canonical blank node identifiers. + */ +public class CanonicalizationState { + + private final ListMap blankNodesToQuad = new ListMap<>(); + private final ListMap hashToBlankNode = new ListMap<>(); + private final CanonicalIssuer canonicalIssuer = new CanonicalIssuer("c14n"); + + ///////////////// + // Constructor // + ///////////////// + + /** + * Constructs a new CanonicalizationState instance. + */ + public CanonicalizationState() { + } + + /////////////////////////////////// + // Quad to Blank Node Management // + /////////////////////////////////// + + /** + * Maps a blank node identifier to a specific quad. + * + * @param blankNodeId The identifier of the blank node. + * @param quad The quad to be associated with the blank node. + */ + public void associateBlankNodeWithQuad(String blankNodeId, Edge quad) { + this.blankNodesToQuad.add(blankNodeId, quad); + } + + /** + * Retrieves the list of quads associated with a specific blank node. + * + * @param blankNodeId The identifier of the blank node. + * @return A list of quads associated with the blank node. + */ + public List getQuadsForBlankNode(String blankNodeId) { + return Collections.unmodifiableList(this.blankNodesToQuad.get(blankNodeId)); + } + + /////////////////////////////////// + // Hash to Blank Node Management // + /////////////////////////////////// + + /** + * Maps a hash value to a specific blank node identifier. + * + * @param hash The hash value. + * @param blankNodeId The identifier of the blank node. + */ + public void associateHashWithBlankNode(String hash, String blankNodeId) { + this.hashToBlankNode.add(hash, blankNodeId); + } + + /** + * Retrieves blanks nodes associated with a specific hash value. + * + * @param hash The hash value. + * @return A list of blank nodes associated with the hash value. + */ + public void removeHash(String hash) { + this.hashToBlankNode.remove(hash); + } + + /** + * Retrieves sorted list of blank nodes identifiers associated with a specific + * hash value. + * + * @param hash The hash value. + * @return A list of blank nodes associated with the hash value. + */ + public List getBlankNodeForHash(String hash) { + List list = this.hashToBlankNode.get(hash); + Collections.sort(list); + return Collections.unmodifiableList(list); + } + + /** + * Retrieves the hash value associated with a specific blank node identifier. + * + * @param blankNodeId The identifier of the blank node. + * @return The hash value associated with the blank node or null if no hash + * value is associated. + */ + public String getHashForBlankNode(String blankNodeId) { + for (String hash : hashToBlankNode.keySet()) { + if (hashToBlankNode.get(hash).contains(blankNodeId)) { + return hash; + } + } + return null; + } + + /** + * Retrieves a sorted list of hashes. + * + * @return A copy of the list of hashes sorted in code point order. + */ + public List getHashesSorted() { + // hash are sorted in code point order by the ListMap implementation + List sortedHashes = new ArrayList<>(hashToBlankNode.keySet()); + return Collections.unmodifiableList(sortedHashes); + } + + //////////////////////////////////////// + // Canonical Blank Node ID Management // + //////////////////////////////////////// + + /** + * Issues a canonical blank node identifier for a given blank node identifier. + * If a canonical blank node identifier has already been issued for the given + * blank node identifier, the previously issued identifier is returned. + * + * @param blankNodeId The blank node identifier. + * @return The canonical blank node identifier. + */ + public String issueCanonicalBlankNodeIdFor(String blankNodeId) { + return this.canonicalIssuer.issueCanonicalIdentifier(blankNodeId); + } + + /** + * Tests whether a canonical blank node identifier has been issued for a given + * blank node identifier. + * + * @param blankNodeId The blank node identifier. + * @return True if a canonical blank node identifier has been issued for the + * given + */ + public boolean hasCanonicalIdentifier(String blankNodeId) { + return this.canonicalIssuer.hasCanonicalIdentifier(blankNodeId); + } + + /** + * Retrieves the canonical blank node identifier for a given blank node + * identifier. + * + * @param blankNodeId The blank node identifier. + * @return The canonical blank node identifier. + */ + public String getCanonicalIdentifierFor(String blankNodeId) { + return this.canonicalIssuer.getCanonicalIdentifier(blankNodeId); + } + + /** + * Retrieves the issued identifier map. + * + * @return A unmodifiable map of issued blank node identifiers to canonical + */ + public Map getIssuedIdentifierMap() { + return this.canonicalIssuer.getIssuedIdentifierMap(); + } + + /////////////// + // To String // + /////////////// + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + + sb.append("Blank Nodes to Quads Mapping: \n"); + this.blankNodesToQuad.forEach((key, value) -> sb.append(key).append(" -> ").append(value).append("\n")); + + sb.append("\n"); + + sb.append("Hash to Blank Node Mapping: \n"); + this.hashToBlankNode.forEach((key, value) -> sb.append(key).append(" -> ").append(value).append("\n")); + + sb.append("\n"); + + sb.append("Blank Node to Canonical Blank Node Mapping: \n"); + this.canonicalIssuer.getBlankNodeIdentifiers().forEach(identifier -> { + sb.append(identifier); + sb.append(" -> "); + sb.append(this.canonicalIssuer.issueCanonicalIdentifier(identifier)); + sb.append("\n"); + }); + + return sb.toString(); + } +} diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizedDataset.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizedDataset.java new file mode 100644 index 0000000000..faacfc46cc --- /dev/null +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/CanonicalizedDataset.java @@ -0,0 +1,153 @@ +package fr.inria.corese.core.print.rdfc10; + +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; + +import fr.inria.corese.core.Graph; +import fr.inria.corese.kgram.api.core.Node; + +/** + * Represents a dataset that has undergone canonicalization. + * This class manages a graph and the mapping of blank nodes to their + * identifiers. + */ +public class CanonicalizedDataset { + + private final Graph dataset; + private Map blankNodesToIdentifiers = new LinkedHashMap<>(); + private Map issuedIdentifierMap = new LinkedHashMap<>(); + + ///////////////// + // Constructor // + ///////////////// + + /** + * Constructs a CanonicalizedDataset with a given graph. + * Initializes the blank node to identifier mapping as uninitialized. + * + * @param graph The graph to be associated with this dataset. + */ + public CanonicalizedDataset(Graph graph) { + this.dataset = graph; + } + + //////////////////////// + // Dataset Management // + //////////////////////// + + /** + * Retrieves the dataset associated with this CanonicalizedDataset. + * + * @return The associated graph. + */ + public Graph getDataset() { + return dataset; + } + + /////////////////////////////////////////////////// + // Blank Nodes to Identifiers Mapping Management // + /////////////////////////////////////////////////// + + /** + * Adds a blank node and its identifier to the mapping. + * Only adds the blank node identifier if the mapping has not been initialized. + * + * @param blankNode The blank node to be added. + * @throws IllegalArgumentException if the node is not a blank node. + */ + public void associateBlankNodeWithIdentifier(Node blankNode) { + if (!blankNode.isBlank()) { + throw new IllegalArgumentException("Node is not blank"); + } + + String identifier = blankNode.getLabel().replace("_:", ""); + this.blankNodesToIdentifiers.put(blankNode, identifier); + } + + /** + * Retrieves the identifier associated with a given blank node. + * + * @param blankNode The blank node. + * @return The identifier associated with the blank node. + */ + public String getIdentifierForBlankNode(Node blankNode) { + return blankNodesToIdentifiers.get(blankNode); + } + + /** + * Retrieves the mapping of blank nodes to identifiers. + * + * @return The mapping of blank nodes to identifiers. + */ + public Collection getBlankNodeIdentifiers() { + return Collections.unmodifiableCollection(blankNodesToIdentifiers.values()); + } + + ////////////////////////////////////// + // Issued Identifier Map Management // + ////////////////////////////////////// + + /** + * Sets the issued identifier map. + * + * @param issuedIdentifierMap The issued identifier map. + */ + public void setIssuedIdentifierMap(Map issuedIdentifierMap) { + this.issuedIdentifierMap = issuedIdentifierMap; + } + + /** + * Retrieves the issued identifier map. + * + * @return The issued identifier map. + */ + public String getIssuedIdentifier(String blankNodeId) { + return issuedIdentifierMap.get(blankNodeId); + } + + /** + * Retrieves the issued identifier map. + * + * @return The issued identifier map. + */ + public Map getIssuedIdentifiersMap() { + return Collections.unmodifiableMap(issuedIdentifierMap); + } + + /////////////// + // To String // + /////////////// + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + + sb.append("Dataset: \n"); + sb.append(dataset.size()); + sb.append(" triples\n"); + + sb.append("\n"); + + sb.append("Blank Nodes to Identifiers Mapping: \n"); + blankNodesToIdentifiers.forEach((blankNode, identifier) -> { + sb.append(blankNode); + sb.append(" -> "); + sb.append(identifier); + sb.append("\n"); + }); + + sb.append("\n"); + + sb.append("Issued Identifier Map: \n"); + issuedIdentifierMap.forEach((blankNodeId, issuedIdentifier) -> { + sb.append(blankNodeId); + sb.append(" -> "); + sb.append(issuedIdentifier); + sb.append("\n"); + }); + + return sb.toString(); + } +} diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/HashingUtility.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/HashingUtility.java new file mode 100644 index 0000000000..07cb9add02 --- /dev/null +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/HashingUtility.java @@ -0,0 +1,76 @@ +package fr.inria.corese.core.print.rdfc10; + +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; + +/** + * Utility class for hashing functionalities. + */ +public class HashingUtility { + + /** + * Represents the hash algorithm to use. + */ + public static enum HashAlgorithm { + /** + * Represents the SHA-256 hash algorithm. + */ + SHA_256("SHA-256"), + + /** + * Represents the SHA-384 hash algorithm. + */ + SHA_384("SHA-384"); + + private final String algorithm; + + private HashAlgorithm(String algorithm) { + this.algorithm = algorithm.replace("-", ""); + } + + /** + * Gets the algorithm name. + * + * @return the algorithm name + */ + public String getAlgorithm() { + return algorithm; + } + } + + /** + * Hashes a string using the specified algorithm. + * + * @param input the string to hash + * @param algorithm the algorithm to use + * @return the hash of the input string + */ + public static String hash(String input, HashAlgorithm algorithm) { + try { + MessageDigest digest = MessageDigest.getInstance(algorithm.getAlgorithm()); + byte[] encodedhash = digest.digest(input.getBytes(StandardCharsets.UTF_8)); + return toHexString(encodedhash); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(algorithm.getAlgorithm() + " algorithm not found", e); + } + } + + /** + * Converts a byte array to a hexadecimal string. + * + * @param hash the byte array to convert + * @return the hexadecimal string + */ + private static String toHexString(byte[] hash) { + StringBuilder hexString = new StringBuilder(2 * hash.length); + for (byte b : hash) { + String hex = Integer.toHexString(0xff & b); + if (hex.length() == 1) { + hexString.append('0'); + } + hexString.append(hex); + } + return hexString.toString(); + } +} diff --git a/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/ListMap.java b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/ListMap.java new file mode 100644 index 0000000000..57c75391c8 --- /dev/null +++ b/corese-core/src/main/java/fr/inria/corese/core/print/rdfc10/ListMap.java @@ -0,0 +1,106 @@ +package fr.inria.corese.core.print.rdfc10; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; + +/** + * A map structure where each key is associated with a list of values. + * This class facilitates the storage of multiple values per key. + * + * @param the type of keys maintained by this map + * @param the type of mapped values + */ +public class ListMap implements Map> { + private final Map> map = new TreeMap<>(); + + @Override + public void clear() { + map.clear(); + } + + @Override + public boolean containsKey(Object key) { + return map.containsKey(key); + } + + @Override + public boolean containsValue(Object value) { + return map.containsValue(value); + } + + @Override + public Set>> entrySet() { + return map.entrySet(); + } + + @Override + public List get(Object key) { + return map.get(key); + } + + @Override + public boolean isEmpty() { + return map.isEmpty(); + } + + /** + * Returns a set view of the keys contained in this map. + * The set is ordered according to the natural ordering of its elements, + * which is determined by the internal TreeMap used in the implementation. + * + * @return a set view of the keys contained in this map, ordered according to + * the natural ordering of its elements + */ + @Override + public Set keySet() { + return map.keySet(); + } + + @Override + public List put(K key, List value) { + return map.put(key, value); + } + + @Override + public void putAll(Map> m) { + map.putAll(m); + } + + @Override + public List remove(Object key) { + return map.remove(key); + } + + @Override + public int size() { + return map.size(); + } + + @Override + public Collection> values() { + return map.values(); + } + + /** + * Adds a value to the list associated with a specific key. + * + * @param key the key with which the specified value is to be associated + * @param value the value to be associated with the specified key + */ + public void add(K key, V value) { + map.computeIfAbsent(key, k -> new ArrayList<>()).add(value); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("{\n"); + map.forEach((key, value) -> sb.append("\t").append(key).append(" -> ").append(value).append("\n")); + sb.append("}"); + + return sb.toString(); + } +} diff --git a/corese-core/src/main/java/fr/inria/corese/core/storage/CoreseGraphDataManager.java b/corese-core/src/main/java/fr/inria/corese/core/storage/CoreseGraphDataManager.java index b7e90136bd..e7828b1e0d 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/storage/CoreseGraphDataManager.java +++ b/corese-core/src/main/java/fr/inria/corese/core/storage/CoreseGraphDataManager.java @@ -35,7 +35,7 @@ public class CoreseGraphDataManager implements DataManager { */ protected CoreseGraphDataManager() { setGraph(new Graph()); - init(); + initlocal(); } /** @@ -46,10 +46,10 @@ protected CoreseGraphDataManager() { */ protected CoreseGraphDataManager(Graph g) { setGraph(g); - init(); + initlocal(); } - void init() { + void initlocal() { emptyNodeList = new ArrayList<>(0); emptyEdgeList = new ArrayList<>(0); } diff --git a/corese-core/src/main/java/fr/inria/corese/core/storage/DataManagerJava.java b/corese-core/src/main/java/fr/inria/corese/core/storage/DataManagerJava.java index e87a5ef9ed..39e87b69c0 100644 --- a/corese-core/src/main/java/fr/inria/corese/core/storage/DataManagerJava.java +++ b/corese-core/src/main/java/fr/inria/corese/core/storage/DataManagerJava.java @@ -128,7 +128,7 @@ String clean(String str) { return str.replace("%20", " "); } - @Override + //@Override void init() { if (isLdscript()) { initldscript(); @@ -149,9 +149,10 @@ void initgraph() { Load ld = Load.create(getGraph()); ld.setDataManager(this); // temporary authorize xt:read file to read e.g. json document - Level read = Access.setValue(Feature.READ, Level.DEFAULT); - Level readFile = Access.setValue(Feature.READ_FILE, Level.DEFAULT); - + //Level read = Access.setValue(Feature.READ, Level.DEFAULT); + // Level readFile = Access.setValue(Feature.READ_FILE, Level.DEFAULT); + // authorize xt:read() because accept list is empty during this initialization + Access.setDefaultResultWhenEmptyAccept(true); try { if (getLoad()!=null) { for (String name : getLoad()) { @@ -186,8 +187,9 @@ void initgraph() { logger.error(ex.getMessage()); } finally { - Access.set(Feature.READ, read); - Access.set(Feature.READ_FILE, readFile); + //Access.set(Feature.READ, read); + //Access.set(Feature.READ_FILE, readFile); + Access.setDefaultResultWhenEmptyAccept(false); } } diff --git a/corese-core/src/main/java/module-info.java b/corese-core/src/main/java/module-info.java index f2b33b0f90..0da70623eb 100644 --- a/corese-core/src/main/java/module-info.java +++ b/corese-core/src/main/java/module-info.java @@ -15,6 +15,7 @@ requires semargl.rdfa; requires jdk.management; requires org.json; + requires org.apache.commons.lang3; exports fr.inria.corese.core.load; exports fr.inria.corese.core.load.result; @@ -26,6 +27,7 @@ exports fr.inria.corese.core.util; exports fr.inria.corese.core.index; exports fr.inria.corese.core.print; + exports fr.inria.corese.core.print.rdfc10; exports fr.inria.corese.core.api; exports fr.inria.corese.core.edge; exports fr.inria.corese.core.logic; diff --git a/corese-core/src/main/resources/data/corese/property.properties b/corese-core/src/main/resources/data/corese/property.properties index 07f9ffa155..a695af2d26 100644 --- a/corese-core/src/main/resources/data/corese/property.properties +++ b/corese-core/src/main/resources/data/corese/property.properties @@ -69,7 +69,7 @@ GRAPH_NODE_AS_DATATYPE = false EXTERNAL_NAMED_GRAPH = true -# load in kg:default or in file path as named graph +# load in kg:default LOAD_IN_DEFAULT_GRAPH = true # skolemize bnode as URI diff --git a/corese-gui/metadata/flatpak/fr.inria.corese.CoreseGui.yml b/corese-gui/metadata/flatpak/fr.inria.corese.CoreseGui.yml index 56a521fbd0..b711212aa9 100644 --- a/corese-gui/metadata/flatpak/fr.inria.corese.CoreseGui.yml +++ b/corese-gui/metadata/flatpak/fr.inria.corese.CoreseGui.yml @@ -32,7 +32,7 @@ modules: sources: - type: file - url: https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-gui-4.5.0.jar + url: https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-gui-4.5.1.jar sha256: cb3270d2ceccc9e8194c7d657eeca3c037e957de994f7eca3bbd1cf58fdbea89 x-checker-data: type: json @@ -43,6 +43,6 @@ modules: - type: git dest: git_repo url: https://github.com/Wimmics/corese - tag: metadata-4.5.0 + tag: metadata-4.5.1 - type: file path: run.sh diff --git a/corese-gui/metadata/fr.inria.corese.CoreseGui.appdata.xml b/corese-gui/metadata/fr.inria.corese.CoreseGui.appdata.xml index a0a713a826..6f1cc91d95 100644 --- a/corese-gui/metadata/fr.inria.corese.CoreseGui.appdata.xml +++ b/corese-gui/metadata/fr.inria.corese.CoreseGui.appdata.xml @@ -3,7 +3,7 @@ fr.inria.corese.CoreseGui Corese-Gui - + Unlock the power of the Semantic Web Débloquez la puissance du Web sémantique @@ -26,7 +26,8 @@ the creation, manipulation, analysis, serialization, and querying of RDF data.

    - Moreover, the GUI brings advanced capabilities to your fingertips, incorporating extended + Moreover, the GUI brings advanced capabilities to your fingertips, incorporating + extended functionalities such as STTL SPARQL, SPARQL Rule, and LDScript. This enhances your data processing and reasoning workflows, making Corese-GUI an indispensable tool for both novices and experts in the Semantic Web domain. @@ -124,6 +125,10 @@ + + + +

      @@ -134,7 +139,8 @@
      • Mise à jour du message de chargement dans Corese-GUI.
      • -
      • Correction de la requête fédérée avec des déclarations PREFIX échouant dans certaines conditions.
      • +
      • Correction de la requête fédérée avec des déclarations PREFIX échouant dans + certaines conditions.
      • Mise à jour de la bibliothèque json à 20231013.
      diff --git a/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java b/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java index c7076cb4b7..ce152dd780 100755 --- a/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java +++ b/corese-gui/src/main/java/fr/inria/corese/gui/core/MainFrame.java @@ -46,6 +46,7 @@ import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; +import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JRadioButton; import javax.swing.JTabbedPane; @@ -71,7 +72,10 @@ import fr.inria.corese.core.load.LoadException; import fr.inria.corese.core.load.QueryLoad; import fr.inria.corese.core.load.result.SPARQLResultParser; +import fr.inria.corese.core.print.CanonicalRdf10Format; import fr.inria.corese.core.print.ResultFormat; +import fr.inria.corese.core.print.rdfc10.CanonicalRdf10.CanonicalizationException; +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; import fr.inria.corese.core.query.QueryProcess; import fr.inria.corese.core.rule.RuleEngine; import fr.inria.corese.core.transform.TemplatePrinter; @@ -108,7 +112,7 @@ public class MainFrame extends JFrame implements ActionListener { private static MainFrame singleton; private static final long serialVersionUID = 1L; private static final int LOAD = 1; - private static final String TITLE = "Corese 4.5.0 - Inria UCA I3S - 2023-10-14"; + private static final String TITLE = "Corese 4.5.1 - Inria UCA I3S - 2023-10-14"; // On déclare notre conteneur d'onglets protected static JTabbedPane conteneurOnglets; // Compteur pour le nombre d'onglets query créés @@ -133,15 +137,25 @@ public class MainFrame extends JFrame implements ActionListener { private JMenuItem loadRule; private JMenuItem loadStyle; private JMenuItem cpTransform, shex; + private JMenu fileMenuSaveResult; private JMenuItem saveQuery; - private JMenuItem saveResult; + private JMenuItem saveResultXml; + private JMenuItem saveResultJson; + private JMenuItem saveResultCsv; + private JMenuItem saveResultTsv; + private JMenuItem saveResultMarkdown; private JMenuItem loadAndRunRule; private JMenuItem refresh; private JMenuItem exportRDF; private JMenuItem exportTurtle; - private JMenuItem exportOwl; - private JMenuItem exportJson; private JMenuItem exportTrig; + private JMenuItem exportJson; + private JMenuItem exportNt; + private JMenuItem exportNq; + private JMenuItem exportOwl; + private JMenu exportCanonic; + private JMenuItem saveRDFC_1_0_sha256; + private JMenuItem saveRDFC_1_1_sha384; private JMenuItem copy; private JMenuItem cut; private JMenuItem paste; @@ -351,7 +365,7 @@ public void stateChanged(ChangeEvent changeEvent) { duplicateFrom.setEnabled(true); comment.setEnabled(true); saveQuery.setEnabled(true); - saveResult.setEnabled(true); + fileMenuSaveResult.setEnabled(true); MyJPanelQuery temp = (MyJPanelQuery) getConteneurOnglets().getComponentAt(selected); @@ -370,7 +384,7 @@ public void stateChanged(ChangeEvent changeEvent) { duplicateFrom.setEnabled(false); comment.setEnabled(false); saveQuery.setEnabled(false); - saveResult.setEnabled(false); + fileMenuSaveResult.setEnabled(false); } // Si l'onglet sélectionné est le "+" on crée un nouvel onglet Query if (c == plus) { @@ -636,17 +650,34 @@ private void initMenu() { exportTurtle.addActionListener(this); exportTurtle.setToolTipText("Export graph in Turtle format"); + exportTrig = new JMenuItem("TriG"); + exportTrig.addActionListener(this); + exportTrig.setToolTipText("Export graph in TriG format"); + + exportJson = new JMenuItem("JsonLD"); + exportJson.addActionListener(this); + exportJson.setToolTipText("Export graph in JSON format"); + + exportNt = new JMenuItem("NTriple"); + exportNt.addActionListener(this); + exportNt.setToolTipText("Export graph in NTriple format"); + + exportNq = new JMenuItem("NQuad"); + exportNq.addActionListener(this); + exportNq.setToolTipText("Export graph in NQuad format"); + exportOwl = new JMenuItem("OWL"); exportOwl.addActionListener(this); exportOwl.setToolTipText("Export graph in OWL format"); - exportJson = new JMenuItem("JSON"); - exportJson.addActionListener(this); - exportJson.setToolTipText("Export graph in JSON format"); + exportCanonic = new JMenu("Canonic"); + exportCanonic.addActionListener(this); - exportTrig = new JMenuItem("TriG"); - exportTrig.addActionListener(this); - exportTrig.setToolTipText("Export graph in TriG format"); + saveRDFC_1_0_sha256 = new JMenuItem("RDFC-1.0 (sha256)"); + saveRDFC_1_0_sha256.addActionListener(this); + + saveRDFC_1_1_sha384 = new JMenuItem("RDFC-1.0 (sha384)"); + saveRDFC_1_1_sha384.addActionListener(this); execWorkflow = new JMenuItem("Process Workflow"); execWorkflow.addActionListener(this); @@ -660,8 +691,20 @@ private void initMenu() { saveQuery = new JMenuItem("Save Query"); saveQuery.addActionListener(this); - saveResult = new JMenuItem("Save Result"); - saveResult.addActionListener(this); + saveResultXml = new JMenuItem("XML"); + saveResultXml.addActionListener(this); + + saveResultJson = new JMenuItem("JSON"); + saveResultJson.addActionListener(this); + + saveResultCsv = new JMenuItem("CSV"); + saveResultCsv.addActionListener(this); + + saveResultTsv = new JMenuItem("TSV"); + saveResultTsv.addActionListener(this); + + saveResultMarkdown = new JMenuItem("Markdown"); + saveResultMarkdown.addActionListener(this); itable = new HashMap<>(); @@ -798,6 +841,7 @@ private void initMenu() { JMenu fileMenuLoad = new JMenu("Load"); JMenu fileMenuSaveGraph = new JMenu("Save Graph"); + fileMenuSaveResult = new JMenu("Save Result"); // On ajoute tout au menu fileMenu.add(fileMenuLoad); @@ -823,12 +867,23 @@ private void initMenu() { fileMenu.add(fileMenuSaveGraph); fileMenuSaveGraph.add(exportRDF); fileMenuSaveGraph.add(exportTurtle); - fileMenuSaveGraph.add(exportOwl); - fileMenuSaveGraph.add(exportJson); fileMenuSaveGraph.add(exportTrig); + fileMenuSaveGraph.add(exportJson); + fileMenuSaveGraph.add(exportNt); + fileMenuSaveGraph.add(exportNq); + fileMenuSaveGraph.add(exportOwl); + fileMenuSaveGraph.add(exportCanonic); + exportCanonic.add(saveRDFC_1_0_sha256); + exportCanonic.add(saveRDFC_1_1_sha384); fileMenu.add(saveQuery); - fileMenu.add(saveResult); + + fileMenu.add(fileMenuSaveResult); + fileMenuSaveResult.add(saveResultXml); + fileMenuSaveResult.add(saveResultJson); + fileMenuSaveResult.add(saveResultCsv); + fileMenuSaveResult.add(saveResultTsv); + fileMenuSaveResult.add(saveResultMarkdown); queryMenu.add(iselect); queryMenu.add(iconstruct); @@ -1150,7 +1205,7 @@ public void actionPerformed(ActionEvent l_Event) { duplicateFrom.setEnabled(false); comment.setEnabled(false); saveQuery.setEnabled(false); - saveResult.setEnabled(false); + fileMenuSaveResult.setEnabled(false); } } @@ -1308,23 +1363,47 @@ else if (e.getSource() == saveQuery) { String style = loadText(); defaultStylesheet = style; } // Sauvegarde le résultat sous forme XML dans un fichier texte - else if (e.getSource() == saveResult) { - save(current.getTextAreaXMLResult().getText()); + else if (e.getSource() == saveResultXml) { + saveResult(ResultFormat.XML_FORMAT); + } // Sauvegarde le résultat sous forme JSON dans un fichier texte + else if (e.getSource() == saveResultJson) { + saveResult(ResultFormat.JSON_FORMAT); + } // Sauvegarde le résultat sous forme CSV dans un fichier texte + else if (e.getSource() == saveResultCsv) { + saveResult(ResultFormat.CSV_FORMAT); + } // Sauvegarde le résultat sous forme TSV dans un fichier texte + else if (e.getSource() == saveResultTsv) { + saveResult(ResultFormat.TSV_FORMAT); + } // Sauvegarde le résultat sous forme Markdown dans un fichier texte + else if (e.getSource() == saveResultMarkdown) { + saveResult(ResultFormat.MARKDOWN_FORMAT); } // Exporter le graph au format RDF/XML else if (e.getSource() == exportRDF) { saveGraph(Transformer.RDFXML); } // Exporter le graph au format Turle else if (e.getSource() == exportTurtle) { saveGraph(Transformer.TURTLE); - } // Exporter le graph au format OWL - else if (e.getSource() == exportOwl) { - saveGraph(Transformer.OWL); - } // Exporter le graph au format Json - else if (e.getSource() == exportJson) { - saveGraph(Transformer.JSON); } // Exporter le graph au format TriG else if (e.getSource() == exportTrig) { saveGraph(Transformer.TRIG); + } // Exporter le graph au format Json + else if (e.getSource() == exportJson) { + saveGraph(Transformer.JSON); + } // Exporter le graph au format NTriple + else if (e.getSource() == exportNt) { + saveGraph(ResultFormat.NTRIPLES_FORMAT); + } // Exporter le graph au format NQuad + else if (e.getSource() == exportNq) { + saveGraph(ResultFormat.NQUADS_FORMAT); + } // Exporter le graph au format OWL + else if (e.getSource() == exportOwl) { + saveGraph(Transformer.OWL); + } // Exporter le graph au format RDFC-1.0 (sha256) + else if (e.getSource() == saveRDFC_1_0_sha256) { + saveGraphCanonic(HashAlgorithm.SHA_256); + } // Exporter le graph au format RDFC-1.0 (sha384) + else if (e.getSource() == saveRDFC_1_1_sha384) { + saveGraphCanonic(HashAlgorithm.SHA_384); } // Charge et exécute une règle directement else if (e.getSource() == loadAndRunRule) { loadRunRule(); @@ -1483,6 +1562,52 @@ void saveGraph(String format) { } } + /** + * Save the graph in canonic format with the specified algorithm + * + * @param format the format in which the graph will be saved + */ + void saveGraphCanonic(HashAlgorithm algo) { + Graph graph = myCorese.getGraph(); + CanonicalRdf10Format transformer = null; + + try { + transformer = new CanonicalRdf10Format(graph, algo); + } catch (CanonicalizationException ex) { + // Create a new alert dialog with the error message and ok button + String errorMessage = "Unable to canonicalize the RDF data. " + ex.getMessage(); + JOptionPane.showMessageDialog(this, errorMessage, "Error", JOptionPane.ERROR_MESSAGE); + } + + if (transformer != null) { + save(transformer.toString()); + } + } + + /** + * Save the graph in the specified format + * + * @param format the format in which the graph will be saved + * (See ResultFormat.java for the list of formats) + */ + void saveGraph(int format) { + Graph graph = myCorese.getGraph(); + + ResultFormat ft = ResultFormat.create(graph, format); + save(ft.toString()); + } + + /** + * Save the result of a query in the specified format + * + * @param format the format in which the result will be saved + * (See ResultFormat.java for the list of formats) + */ + void saveResult(int format) { + ResultFormat ft = ResultFormat.create(current.getMappings(), format); + save(ft.toString()); + } + void saveQuery() { // Créer un JFileChooser JFileChooser filechoose = new JFileChooser(getPath()); @@ -1729,6 +1854,14 @@ void load(boolean wf, boolean exec, boolean run, Filter... filter) { if (!model.contains(lPath) && !wf) { model.addElement(lPath); } + + if (extension(lPath) == null) { + appendMsg("Error: No extension for file: " + lPath + "\n"); + appendMsg("Please select a file with an extension (e.g: .ttl, .rdf, .trig, .jsonld, .html, ...)\n"); + appendMsg("Load is aborted\n"); + return; + } + appendMsg("Loading " + extension(lPath) + " File from path : " + lPath + "\n"); if (wf) { if (exec) { diff --git a/corese-gui/src/main/java/fr/inria/corese/gui/query/GraphEngine.java b/corese-gui/src/main/java/fr/inria/corese/gui/query/GraphEngine.java index d51a59eec1..09fab0e73d 100644 --- a/corese-gui/src/main/java/fr/inria/corese/gui/query/GraphEngine.java +++ b/corese-gui/src/main/java/fr/inria/corese/gui/query/GraphEngine.java @@ -240,7 +240,7 @@ public Load loader() { public void load(String path) throws EngineException, LoadException { Load ld = loader(); - ld.parse(path); + ld.parse(path, ld.defaultGraph()); // in case of load rule if (ld.getRuleEngine() != null) { setRuleEngine(ld.getRuleEngine()); diff --git a/corese-gui/src/main/java/fr/inria/corese/gui/query/MyJPanelQuery.java b/corese-gui/src/main/java/fr/inria/corese/gui/query/MyJPanelQuery.java index 6e1a4053fc..fb7858156a 100644 --- a/corese-gui/src/main/java/fr/inria/corese/gui/query/MyJPanelQuery.java +++ b/corese-gui/src/main/java/fr/inria/corese/gui/query/MyJPanelQuery.java @@ -597,7 +597,8 @@ String graphToString(Mappings map) { case Property.RDF_XML: return ResultFormat.create(g, ResultFormat.RDF_XML_FORMAT).toString(); case Property.TURTLE: - return ResultFormat.create(g, ResultFormat.TURTLE_FORMAT).toString(); + return ResultFormat.create(g, map.getQuery().getAST().getNSM(), + ResultFormat.TURTLE_FORMAT).toString(); case Property.TRIG: return ResultFormat.create(g, ResultFormat.TRIG_FORMAT).toString(); case Property.JSON: @@ -605,7 +606,8 @@ String graphToString(Mappings map) { } } // default - return ResultFormat.create(g, ResultFormat.TRIG_FORMAT).toString(); + return ResultFormat.create(g, + map.getQuery().getAST().getNSM(), ResultFormat.TRIG_FORMAT).toString(); // return turtle(g); } diff --git a/corese-server/build-docker/README.md b/corese-server/build-docker/README.md index 4865bf8221..0950105d8e 100644 --- a/corese-server/build-docker/README.md +++ b/corese-server/build-docker/README.md @@ -17,6 +17,7 @@ Corese also implements the LDScript and STTL SPARQL extensions. The Docker image tag includes the Corese version installed in the image. The following versions are currently available: +- corese:4.5.1 - corese:4.5.0 - corese:4.4.1 - corese:4.4.0 diff --git a/corese-server/build-docker/corese/Dockerfile b/corese-server/build-docker/corese/Dockerfile index 5ce4463ba8..2993c23f83 100644 --- a/corese-server/build-docker/corese/Dockerfile +++ b/corese-server/build-docker/corese/Dockerfile @@ -7,7 +7,7 @@ ENV CORESE="/usr/local/corese" RUN mkdir -p $CORESE WORKDIR $CORESE -RUN wget https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-server-4.5.0.jar +RUN wget https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-server-4.5.1.jar COPY log4j2.xml $CORESE/log4j2.xml COPY corese-default-profile.ttl $CORESE/corese-default-profile.ttl diff --git a/corese-server/build-docker/corese/corese-server.sh b/corese-server/build-docker/corese/corese-server.sh index a60a8498dd..38c6e02437 100644 --- a/corese-server/build-docker/corese/corese-server.sh +++ b/corese-server/build-docker/corese/corese-server.sh @@ -1,7 +1,7 @@ #!/bin/bash CORESE=/usr/local/corese -JAR=$CORESE/corese-server-4.5.0.jar +JAR=$CORESE/corese-server-4.5.1.jar PROFILE=$CORESE/config/corese-profile.ttl PROPERTIES=$CORESE/config/corese-properties.properties OPTIONS=${OPTIONS:-} @@ -77,4 +77,4 @@ java \ -lp \ -pp file://$PROFILE \ -init $PROPERTIES \ - "$OPTIONS" + $OPTIONS diff --git a/corese-server/pom.xml b/corese-server/pom.xml index 75dc0bae5f..a680d946c5 100644 --- a/corese-server/pom.xml +++ b/corese-server/pom.xml @@ -9,17 +9,46 @@ fr.inria.corese corese-parent ${revision} - .. + - - - true - + + fr.inria.corese corese-server - corese-server + + ${project.parent.groupId} + corese-core + + + ${project.parent.groupId} + corese-jena + ${revision} + + + ${project.parent.groupId} + corese-rdf4j + ${revision} + + + + javax.xml.bind + jaxb-api + 2.3.1 + + + com.sun.xml.bind + jaxb-core + 2.3.0.1 + + + com.sun.xml.bind + jaxb-impl + 2.3.2 + + + org.glassfish.jersey.core jersey-client @@ -36,25 +65,28 @@ org.glassfish.jersey.inject jersey-hk2 + + org.glassfish.metro + webservices-rt + + + org.glassfish.jersey.containers + jersey-container-servlet-core + + junit junit 4.13.2 test + ${project.parent.groupId} shex ${project.parent.version} - - ${project.parent.groupId} - corese-core - - - org.glassfish.metro - webservices-rt - + org.apache.logging.log4j log4j-slf4j18-impl @@ -67,12 +99,11 @@ org.apache.logging.log4j log4j-core + commons-lang commons-lang - - commons-cli commons-cli @@ -85,18 +116,7 @@ commons-io commons-io - - org.seleniumhq.selenium - selenium-firefox-driver - 3.13.0 - - - - org.seleniumhq.selenium - selenium-java - 3.13.0 - - + org.jsoup jsoup @@ -110,39 +130,11 @@ org.eclipse.jetty jetty-servlets - - org.glassfish.jersey.containers - jersey-container-servlet-core - org.eclipse.jetty.websocket websocket-jetty-server - - ${project.parent.groupId} - corese-jena - ${revision} - - - ${project.parent.groupId} - corese-rdf4j - ${revision} - - - org.apache.jena - jena-tdb - 4.8.0 - - - org.apache.jena - jena-arq - 4.8.0 - - - org.apache.jena - jena-core - 4.8.0 - + @@ -181,43 +173,6 @@ - - maven-antrun-plugin - 1.8 - - - start-third-parties - pre-integration-test - - - - - - - - - - - run - - - - stop-third-parties - post-integration-test - - - - - - - - run - - - - org.jacoco jacoco-maven-plugin diff --git a/corese-server/src/main/java/fr/inria/corese/server/webservice/EmbeddedJettyServer.java b/corese-server/src/main/java/fr/inria/corese/server/webservice/EmbeddedJettyServer.java index c691e9806e..73ac01237d 100644 --- a/corese-server/src/main/java/fr/inria/corese/server/webservice/EmbeddedJettyServer.java +++ b/corese-server/src/main/java/fr/inria/corese/server/webservice/EmbeddedJettyServer.java @@ -188,7 +188,7 @@ public static void main(String args[]) throws Exception { port = Integer.parseInt(cmd.getOptionValue("p")); } if (cmd.hasOption("v")) { - logger.info("version 4.5.0"); + logger.info("version 4.5.1"); System.exit(0); } if (cmd.hasOption("e")) { @@ -424,7 +424,7 @@ public static void main(String args[]) throws Exception { server.join(); } catch (ParseException exp) { - System.err.println("Parsing failed. Reason: " + exp.getMessage()); + logger.error("Parsing failed. Reason: " + exp.getMessage()); } } diff --git a/corese-server/src/main/java/fr/inria/corese/server/webservice/Profile.java b/corese-server/src/main/java/fr/inria/corese/server/webservice/Profile.java index ba2fe585b3..31763a0ff1 100644 --- a/corese-server/src/main/java/fr/inria/corese/server/webservice/Profile.java +++ b/corese-server/src/main/java/fr/inria/corese/server/webservice/Profile.java @@ -20,7 +20,9 @@ import fr.inria.corese.core.transform.ContextBuilder; import fr.inria.corese.core.util.Parameter; import fr.inria.corese.kgram.api.core.Edge; +import fr.inria.corese.sparql.exceptions.SafetyException; import fr.inria.corese.sparql.triple.function.term.TermEval; +import static fr.inria.corese.sparql.triple.function.term.TermEval.READ_MESS; import fr.inria.corese.sparql.triple.parser.Access; import fr.inria.corese.sparql.triple.parser.Access.Feature; import fr.inria.corese.sparql.triple.parser.Access.Level; @@ -385,8 +387,18 @@ String read(String path) throws IOException, LoadException { } String loadQuery(String path) throws IOException, LoadException { - if (isProtected && !path.startsWith(getServer())) { - throw new IOException(path); +// if (isProtected && !path.startsWith(getServer())) { +// throw new IOException(path); +// } + if (path.startsWith(getServer())) { + // OK + } else { + try { + // do not accept (file) path when accept list is empty + Access.check(Feature.READ, Access.getQueryAccessLevel(true), path, READ_MESS, false); + } catch (SafetyException ex) { + throw new IOException(path); + } } return read(path); } diff --git a/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java b/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java index c826c5ec04..3dfc9924fb 100644 --- a/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java +++ b/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLRestAPI.java @@ -1,5 +1,6 @@ package fr.inria.corese.server.webservice; +import java.util.ArrayList; import java.util.List; import java.util.UUID; @@ -54,6 +55,7 @@ public class SPARQLRestAPI implements ResultFormatDef, URLParam { static final String SPARQL_RESULTS_JSON = ResultFormat.SPARQL_RESULTS_JSON; static final String SPARQL_RESULTS_CSV = ResultFormat.SPARQL_RESULTS_CSV; static final String SPARQL_RESULTS_TSV = ResultFormat.SPARQL_RESULTS_TSV; + static final String SPARQL_RESULTS_MD = ResultFormat.SPARQL_RESULTS_MD; static final String SPARQL_QUERY = ResultFormat.SPARQL_QUERY; static final String XML = ResultFormat.XML; @@ -65,9 +67,16 @@ public class SPARQLRestAPI implements ResultFormatDef, URLParam { static final String TRIG = ResultFormat.TRIG; static final String TRIG_TEXT = ResultFormat.TRIG_TEXT; static final String NT_TEXT = ResultFormat.NT_TEXT; + static final String N_TRIPLES = ResultFormat.N_TRIPLES; + static final String N_QUADS = ResultFormat.N_QUADS; static final String TEXT = ResultFormat.TEXT; static final String HTML = ResultFormat.HTML; + // Profiles + private final String CN10_SHA = "https://www.w3.org/TR/rdf-canon"; + private final String CN10_SHA256 = "https://www.w3.org/TR/rdf-canon#sha-256"; + private final String CN10_SHA384 = "https://www.w3.org/TR/rdf-canon#sha-384"; + public static final String PROFILE_DEFAULT = "profile.ttl"; public static final String DEFAULT = NSManager.STL + "default"; @@ -124,6 +133,7 @@ static TripleStore getTripleStore() { return store; } + // Named sparql endpoint static TripleStore getTripleStore(String name) { if (name == null) { return getTripleStore(); @@ -256,7 +266,7 @@ public Response loadRDF( return Response.status(404).header(headerAccept, "*").entity(output).build(); } - logger.info(output = "Successfully loaded " + remotePath); + logger.info(output + "Successfully loaded " + remotePath); return Response.status(200).header(headerAccept, "*").entity(output).build(); } @@ -373,6 +383,35 @@ String getResult(Mappings map, String format) { return getResultFormat(map, format).toString(); } + /** + * Get the profiles from the Accept header + * + * @param accept The Accept header + * @return The profiles + */ + private ArrayList getProfiles(String accept) { + ArrayList profiles = new ArrayList<>(); + String[] parts = accept.split(";"); + for (String part : parts) { + if (part.contains("profile=")) { + String[] profileParts = part.split("="); + String[] profileUrls = profileParts[1].split(" "); + + for (String profileUrl : profileUrls) { + // Remove the quotes + profileUrl = profileUrl.replace("\"", ""); + + profiles.add(profileUrl); + } + } + } + return profiles; + + // eg: Accept: + // application/n-quads;profile="https://www.w3.org/TR/rdf-canon/#sha-256 + // https://www.w3.org/TR/rdf-canon#sha-384" + } + @GET @Produces({ HTML }) public Response getHTMLForGet(@jakarta.ws.rs.core.Context HttpServletRequest request, @@ -396,8 +435,22 @@ public Response getHTMLForGet(@jakarta.ws.rs.core.Context HttpServletRequest req (mode == null || mode.isEmpty())) { query = "select * where {?s ?p ?o} limit 5"; return new Transformer() - .queryGETHTML(request, fr.inria.corese.core.transform.Transformer.SPARQL, - null, null, null, null, format, access, query, null, null, null, defaut, named); + .queryGETHTML( + request, + oper, + fr.inria.corese.core.transform.Transformer.SPARQL, + null, + null, + null, + null, + format, + access, + query, + name, + null, + null, + defaut, + named); } return getResultFormat(request, name, oper, uri, param, mode, query, access, defaut, named, null, HTML_FORMAT, transform); @@ -473,6 +526,23 @@ public Response getTriplesTSVForGet(@jakarta.ws.rs.core.Context HttpServletReque return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, TSV_FORMAT); } + @GET + @Produces(SPARQL_RESULTS_MD) + public Response getTriplesMDForGet(@jakarta.ws.rs.core.Context HttpServletRequest request, + @PathParam("name") String name, + @PathParam("oper") String oper, + @QueryParam("query") String query, + @QueryParam("access") String access, + @QueryParam("default-graph-uri") List defaut, + @QueryParam("named-graph-uri") List named, + @QueryParam("param") List param, + @QueryParam("mode") List mode, + @QueryParam("uri") List uri) { + + logger.info("getTriplesMDForGet"); + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, MARKDOWN_FORMAT); + } + // ---------------------------------------------------- // SPARQL QUERY - DESCRIBE and CONSTRUCT with HTTP GET // ---------------------------------------------------- @@ -541,10 +611,60 @@ public Response getRDFGraphJsonLDForGet(@jakarta.ws.rs.core.Context HttpServletR @QueryParam("mode") List mode, @QueryParam("uri") List uri) { - logger.info("getRDFGraphJsonLDForGet"); + System.out.println("getRDFGraphJsonLDForGet"); return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, JSONLD_FORMAT); } + @GET + @Produces({ N_TRIPLES }) + public Response getRDFGraphNTriplesForGet(@jakarta.ws.rs.core.Context HttpServletRequest request, + @PathParam("name") String name, + @PathParam("oper") String oper, + @QueryParam("query") String query, + @QueryParam("access") String access, + @QueryParam("default-graph-uri") List defaut, + @QueryParam("named-graph-uri") List named, + @QueryParam("param") List param, + @QueryParam("mode") List mode, + @QueryParam("uri") List uri) { + + logger.info("getRDFGraphNTriplesForGet"); + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, NTRIPLES_FORMAT); + } + + @GET + @Produces({ N_QUADS }) + public Response getRDFGraphNQuadsForGet(@jakarta.ws.rs.core.Context HttpServletRequest request, + @PathParam("name") String name, + @PathParam("oper") String oper, + @QueryParam("query") String query, + @QueryParam("access") String access, + @QueryParam("default-graph-uri") List defaut, + @QueryParam("named-graph-uri") List named, + @QueryParam("param") List param, + @QueryParam("mode") List mode, + @QueryParam("uri") List uri) { + + logger.info("getRDFGraphNQuadsForGet"); + + // Get the profiles from the Accept header + ArrayList profiles = getProfiles(request.getHeader("Accept")); + + for (String profile : profiles) { + if (profile.equals(this.CN10_SHA) || profile.equals(this.CN10_SHA256)) { + logger.info("Profile: " + profile); + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, RDFC10_FORMAT); + } + if (profile.equals(this.CN10_SHA384)) { + logger.info("Profile: " + profile); + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, + RDFC10_SHA384_FORMAT); + } + } + + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, NQUADS_FORMAT); + } + // ---------------------------------------------------- // SPARQL QUERY - SELECT and ASK with HTTP POST // ---------------------------------------------------- @@ -601,6 +721,43 @@ public Response getXMLForPostText(@jakarta.ws.rs.core.Context HttpServletRequest return getResultForPost(request, name, oper, uri, param, mode, query, access, defaut, named, TEXT_FORMAT); } + @POST + @Produces(HTML) + @Consumes(MediaType.APPLICATION_FORM_URLENCODED) + public Response getHTMLForPost(@jakarta.ws.rs.core.Context HttpServletRequest request, + @PathParam("name") String name, + @PathParam("oper") String oper, + @DefaultValue("") @FormParam("query") String query, + @FormParam("access") String access, + @FormParam("default-graph-uri") List defaut, + @FormParam("named-graph-uri") List named, + @FormParam("param") List param, + @FormParam("mode") List mode, + @FormParam("uri") List uri, + String message) { + + logger.info("getHTMLForPost"); + + query = getQuery(query, message); + + return new Transformer().queryPOSTHTML( + request, + oper, + fr.inria.corese.core.transform.Transformer.SPARQL, + null, + null, + null, + null, + HTML, + access, + query, + name, + null, + null, + defaut, + named); + } + /** * Default POST function (ie when there is no header Accept) * SPARQL service clause executed here by corese @@ -855,7 +1012,62 @@ public Response getRDFGraphJsonLDForPost(@jakarta.ws.rs.core.Context HttpServlet query = getQuery(query, update, message); logger.info("getRDFGraphJsonLDForPost"); return getResultForPost(request, name, oper, uri, param, mode, query, access, defaut, named, JSONLD_FORMAT); + } + + @POST + @Produces({ N_TRIPLES }) + @Consumes(MediaType.APPLICATION_FORM_URLENCODED) + public Response getRDFGraphNTriplesForPost(@jakarta.ws.rs.core.Context HttpServletRequest request, + @PathParam("name") String name, + @PathParam("oper") String oper, + @DefaultValue("") @FormParam("query") String query, + @DefaultValue("") @FormParam("update") String update, + @FormParam("access") String access, + @FormParam("default-graph-uri") List defaut, + @FormParam("named-graph-uri") List named, + @FormParam("param") List param, + @FormParam("mode") List mode, + @FormParam("uri") List uri, + String message) { + query = getQuery(query, update, message); + logger.info("getRDFGraphNTriplesForPost"); + return getResultForPost(request, name, oper, uri, param, mode, query, access, defaut, named, NTRIPLES_FORMAT); + } + + @POST + @Produces({ N_QUADS }) + @Consumes(MediaType.APPLICATION_FORM_URLENCODED) + public Response getRDFGraphNQuadsForPost(@jakarta.ws.rs.core.Context HttpServletRequest request, + @PathParam("name") String name, + @PathParam("oper") String oper, + @DefaultValue("") @FormParam("query") String query, + @DefaultValue("") @FormParam("update") String update, + @FormParam("access") String access, + @FormParam("default-graph-uri") List defaut, + @FormParam("named-graph-uri") List named, + @FormParam("param") List param, + @FormParam("mode") List mode, + @FormParam("uri") List uri, + String message) { + query = getQuery(query, update, message); + logger.info("getRDFGraphNQuadsForPost"); + + // Get the profiles from the Accept header + ArrayList profiles = getProfiles(request.getHeader("Accept")); + + for (String profile : profiles) { + if (profile.equals(this.CN10_SHA) || profile.equals(this.CN10_SHA256)) { + logger.info("Profile: " + profile); + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, RDFC10_FORMAT); + } + if (profile.equals(this.CN10_SHA384)) { + logger.info("Profile: " + profile); + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, + RDFC10_SHA384_FORMAT); + } + } + return myGetResult(request, name, oper, uri, param, mode, query, access, defaut, named, NQUADS_FORMAT); } // ---------------------------------------------------- diff --git a/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLResult.java b/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLResult.java index 7f83ac7904..44e36e6fad 100644 --- a/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLResult.java +++ b/corese-server/src/main/java/fr/inria/corese/server/webservice/SPARQLResult.java @@ -1,54 +1,57 @@ package fr.inria.corese.server.webservice; -import fr.inria.corese.server.webservice.message.LinkedResult; +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + import fr.inria.corese.compiler.federate.FederateVisitor; import fr.inria.corese.core.print.ResultFormat; +import fr.inria.corese.core.print.rdfc10.CanonicalRdf10.CanonicalizationException; import fr.inria.corese.kgram.core.Mappings; +import fr.inria.corese.server.webservice.message.LinkedResult; import fr.inria.corese.sparql.api.IDatatype; import fr.inria.corese.sparql.api.ResultFormatDef; import fr.inria.corese.sparql.datatype.DatatypeMap; import fr.inria.corese.sparql.exceptions.EngineException; import fr.inria.corese.sparql.triple.function.term.Binding; -import fr.inria.corese.sparql.triple.parser.Dataset; +import fr.inria.corese.sparql.triple.parser.Access; +import fr.inria.corese.sparql.triple.parser.Access.Level; import fr.inria.corese.sparql.triple.parser.Context; +import fr.inria.corese.sparql.triple.parser.Dataset; import fr.inria.corese.sparql.triple.parser.URLParam; -import fr.inria.corese.sparql.triple.parser.Access.Level; -import fr.inria.corese.sparql.triple.parser.Access; -import java.io.UnsupportedEncodingException; -import java.net.URLDecoder; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; -import jakarta.ws.rs.core.Response; import jakarta.servlet.http.HttpServletRequest; +import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.Response.ResponseBuilder; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; /** * Process sparql query, post process query result, generate query result format */ -public class SPARQLResult implements ResultFormatDef, URLParam { - +public class SPARQLResult implements ResultFormatDef, URLParam { + static private final Logger logger = LogManager.getLogger(SPARQLResult.class); private static final String headerAccept = "Access-Control-Allow-Origin"; private static final String ERROR_ENDPOINT = "Error while querying Corese SPARQL endpoint"; - private static final String OPER = "operation"; - private static final String URL = Context.URL; + private static final String OPER = "operation"; + private static final String URL = Context.URL; static final int ERROR = 500; private static SPARQLResult singleton; - + private HttpServletRequest request; QuerySolverVisitorServer visitor; - + static { - setSingleton(new SPARQLResult()); + setSingleton(new SPARQLResult()); + } + + SPARQLResult() { } - - - SPARQLResult(){} - + SPARQLResult(HttpServletRequest request) { setRequest(request); } @@ -56,39 +59,42 @@ public class SPARQLResult implements ResultFormatDef, URLParam { static TripleStore getTripleStore() { return SPARQLRestAPI.getTripleStore(); } - - static TripleStore getTripleStore (String name) { - if (name == null) { - return getTripleStore(); - } - return Manager.getEndpoint(name); + + static TripleStore getTripleStore(String name) { + if (name == null) { + return getTripleStore(); + } + return Manager.getEndpoint(name); } - - + /** * Specific endpoint function where format can be specified by format parameter - * Content-Type is set according to format parameter and what is returned by ResultFormat + * Content-Type is set according to format parameter and what is returned by + * ResultFormat * - * @name is a) the name of a specific triple store, b) undefined for standard sparql endpoint + * @name is a) the name of a specific triple store, b) undefined for standard + * sparql endpoint * @oper is sparql | federate | symbolic name defined in urlprofile.ttl * @uri is optional list of URI. use case: URL of shacl shape * @param is optional parameter in format: param=key~val;val - * @mode is such as mode=debug;link;log + * @mode is such as mode=debug;link;log * @access is a key that may give access to protected features - * @defaut and @named are graph name URI - * @format is json|xml to specify return format when there is no http header content - * @type is format specified by content negotiation http header (consider type otherwise format) + * @defaut and @named are graph name URI + * @format is json|xml to specify return format when there is no http header + * content + * @type is format specified by content negotiation http header (consider type + * otherwise format) * @transform is list of transformation such as st:map */ - public Response getResultFormat(String name, String oper, + public Response getResultFormat(String name, String oper, List uri, List param, List mode, - String query, String access, + String query, String access, List defaut, List named, - String format, int type, List transform) { - - try { + String format, int type, List transform) { + + try { logger.info("Endpoint URL: " + getRequest().getRequestURL()); - + query = getQuery(query, mode); if (query == null) { throw new EngineException("Undefined query parameter "); @@ -97,26 +103,33 @@ public Response getResultFormat(String name, String oper, beforeRequest(getRequest(), query); Dataset ds = createDataset(getRequest(), defaut, named, access); - + beforeParameter(ds, oper, uri, param, mode, transform); Mappings map = getTripleStore(name).query(getRequest(), query, ds); complete(map, ds.getContext()); afterParameter(ds, map); - - ResultFormat rf = getFormat(map, ds, format, type, transform); + + ResultFormat rf = getFormat(map, ds, format, type, transform); String res = rf.toString(); - + ResponseBuilder rb = Response.status(Response.Status.OK).header(headerAccept, "*"); - + if (format != null) { // real content type of result, possibly different from @Produces rb = rb.header("Content-Type", rf.getContentType()); } Response resp = rb.entity(res).build(); - - afterRequest(getRequest(), resp, query, map, res, ds); - + + afterRequest(getRequest(), resp, query, map, res, ds); + return resp; + } catch (CanonicalizationException ex) { + String errorMessage = "Unable to canonicalize the RDF data. " + ex.getMessage(); + logger.error(errorMessage); + return Response.status(ERROR) + .header(headerAccept, "*") + .entity(errorMessage) + .build(); } catch (EngineException ex) { logger.error("query:"); logger.error(query); @@ -125,14 +138,14 @@ public Response getResultFormat(String name, String oper, return Response.status(ERROR).header(headerAccept, "*").entity(message).build(); } } - + String getQuery(String query, List mode) { if (query == null && mode != null) { query = getContext().getDefaultValue(mode, QUERY); } return query; } - + /** * Post processing */ @@ -144,28 +157,28 @@ void complete(Mappings map, Context c) { } } if (c.hasValue(EXPLAIN)) { - Binding bind = (Binding) map.getBinding(); - if (bind != null && bind.getTrace().length()>0) { + Binding bind = (Binding) map.getBinding(); + if (bind != null && bind.getTrace().length() > 0) { LinkedResult lr = linkedResult(c, "explain"); lr.write(bind.getTrace().toString()); map.addLink(lr.getURL()); } } } - - /** - * Creates a Dataset based on a set of default or named graph URIs. - * For *strong* SPARQL compliance, use dataset.complete() before returning the dataset. + + /** + * Creates a Dataset based on a set of default or named graph URIs. + * For *strong* SPARQL compliance, use dataset.complete() before returning the + * dataset. * * @return a dataset - */ + */ Dataset createDataset(HttpServletRequest request, List defaut, List named, String access) { Dataset ds = null; - if (((defaut != null) && (!defaut.isEmpty())) + if (((defaut != null) && (!defaut.isEmpty())) || ((named != null) && (!named.isEmpty()))) { ds = Dataset.instance(defaut, named); - } - else { + } else { ds = new Dataset(); } boolean b = SPARQLRestAPI.hasKey(request, access); @@ -177,24 +190,23 @@ Dataset createDataset(HttpServletRequest request, List defaut, List uri, + Dataset beforeParameter(Dataset ds, String oper, List uri, List param, List mode, List transform) { if (oper != null) { ds.getContext().set(OPER, oper); List federation = new ArrayList<>(); switch (oper) { - + case FEDERATE: // From SPARQLService: var name is bound to d2kab // URL = http://corese.inria.fr/d2kab/federate @@ -202,49 +214,49 @@ Dataset beforeParameter(Dataset ds, String oper, List uri, // From SPARQL endpoint (alternative) mode and uri are bound // http://corese.inria.fr/sparql?mode=federate&uri=http://ns.inria.fr/federation/d2kab mode = leverage(mode); - //uri = leverage(uri); + // uri = leverage(uri); // declare federate mode for TripleStore query() mode.add(FEDERATE); // federation URL defined in /webapp/data/demo/fedprofile.ttl federation.add(ds.getContext().get(URL).getLabel()); defineFederation(ds, federation); - // additional parameters attached to URL in urlparameter.ttl + // additional parameters attached to URL in urlparameter.ttl break; - + case COMPILE: - // /test/compile?uri=http://myendpoint/sparql + // /test/compile?uri=http://myendpoint/sparql mode = leverage(mode); mode.add(FEDERATE); mode.add(COMPILE); federation.addAll(uri); defineFederation(ds, federation); break; - + case SPARQL: // URL = http://corese.inria.fr/id/sparql // when id is a federation: union of query results of endpoint of id federation // otherwise query triple store with name=id String surl = ds.getContext().get(URL).getLabel(); - + if (surl.contains("/federate/sparql")) { // federate query with graph index mode = leverage(mode); mode.add(FEDERATE); // authorize service clause -// Level level = Access.getQueryAccessLevel(true, true); -// ds.getCreateContext().setLevel(level); + // Level level = Access.getQueryAccessLevel(true, true); + // ds.getCreateContext().setLevel(level); logger.info("Federate query with graph index"); break; } - + String furl = surl; - + if (FederateVisitor.getFederation(furl) == null) { furl = surl.replace("/sparql", "/federate"); } - + if (FederateVisitor.getFederation(furl) != null) { - // federation is defined + // federation is defined mode = leverage(mode); mode.add(FEDERATE); mode.add(SPARQL); @@ -253,58 +265,58 @@ Dataset beforeParameter(Dataset ds, String oper, List uri, defineFederation(ds, federation); } break; - + // default: // other operations considered as sparql endpoint - // with server name if any + // with server name if any default: - // /map/sparql - + // /map/sparql + } - // get additional parameters attached to URL in urlprofile.ttl - //context(ds.getContext(), getContext()); + // get additional parameters attached to URL in urlprofile.ttl + // context(ds.getContext(), getContext()); } - - // get additional parameters attached to URL in urlprofile.ttl + + // get additional parameters attached to URL in urlprofile.ttl context(ds.getContext(), getContext()); - - // get default parameters attached to joker mode * in urlprofile.ttl - //ds.getContext().context(getContext(), STAR); - - if (uri!=null && !uri.isEmpty()) { - // list of URI given as parameter uri= + + // get default parameters attached to joker mode * in urlprofile.ttl + // ds.getContext().context(getContext(), STAR); + + if (uri != null && !uri.isEmpty()) { + // list of URI given as parameter uri= ds.getContext().set(URI, DatatypeMap.listResource(uri)); } - + if (param != null) { for (String kw : param) { // decode param=key~val;val ds.getContext().mode(getContext(), PARAM, decode(kw)); } } - + if (mode != null) { for (String kw : mode) { // decode mode=map ds.getContext().mode(getContext(), MODE, decode(kw)); } } - + if (!ds.getContext().hasValue(USER)) { // mode=user means skip mode=* - // get default parameters attached to joker mode * in urlprofile.ttl + // get default parameters attached to joker mode * in urlprofile.ttl ds.getContext().context(getContext(), STAR); } - - if (transform != null && ! transform.isEmpty()) { + + if (transform != null && !transform.isEmpty()) { ds.getContext().set(URLParam.TRANSFORM, DatatypeMap.newStringList(transform)); } - + beforeParameter(ds); - + return ds; } - + /** * urlprofile.ttl may predefine parameters for endpoint URL eg /psparql * complete Context accordingly as if it were URL parameters @@ -312,9 +324,6 @@ Dataset beforeParameter(Dataset ds, String oper, List uri, void context(Context c, Context gc) { c.context(gc, c.get(URL).getLabel()); } - - - /** * Server Context build from urlprofile.ttl @@ -323,13 +332,12 @@ void context(Context c, Context gc) { Context getContext() { return Profile.getProfile().getContext(); } - - + void defineFederation(Dataset ds, List federation) { ds.setUriList(federation); - //ds.getContext().set(FEDERATION, DatatypeMap.listResource(federation)); + // ds.getContext().set(FEDERATION, DatatypeMap.listResource(federation)); } - + String decode(String value) { try { return URLDecoder.decode(value, StandardCharsets.UTF_8.toString()); @@ -337,11 +345,11 @@ String decode(String value) { return value; } } - + List leverage(List name) { return (name == null) ? new ArrayList<>() : name; } - + /** * Record dataset from named in context for documentation purpose */ @@ -354,8 +362,8 @@ void beforeParameter(Dataset ds) { if (named.size() > 0) { ds.getContext().set(NAMED_GRAPH, named); } - } - + } + void afterParameter(Dataset ds, Mappings map) { if (ds.getContext().hasValue(TRACE)) { System.out.println("SPARQL endpoint"); @@ -368,42 +376,41 @@ void afterParameter(Dataset ds, Mappings map) { System.out.println(ft); } } - + QuerySolverVisitorServer getVisitor() { return visitor; } - + SPARQLResult setVisitor(QuerySolverVisitorServer vis) { visitor = vis; return this; } - + /** - * Visitor call LDScript event @beforeRequest @public function - * profile.ttl must load function definitions, + * Visitor call LDScript event @beforeRequest @public function + * profile.ttl must load function definitions, * e.g. * */ void beforeRequest(HttpServletRequest request, String query) { getVisitor().beforeRequest(request, query); } - + void afterRequest(HttpServletRequest request, String query, Mappings map) { getVisitor().afterRequest(request, query, map); } - + void afterRequest(HttpServletRequest request, Response resp, String query, Mappings map, String res, Dataset ds) { afterRequest(map, ds, res); getVisitor().afterRequest(request, resp, query, map, res); } - + void afterRequest(Mappings map, Dataset ds, String res) { if (ds.getContext().hasValue(TRACE)) { - System.out.println("service result: \n"+res); + System.out.println("service result: \n" + res); } } - - + ResultFormat getFormat(Mappings map, Dataset ds, String format, int type, List transformList) { // predefined parameter associated to URL/mode in urlparameter.ttl transformList = selectTransformation(ds.getContext(), getValue(ds.getContext(), TRANSFORM, transformList)); @@ -413,75 +420,77 @@ ResultFormat getFormat(Mappings map, Dataset ds, String format, int type, List transformList) { - logger.info("Transform: " + transformList); - - boolean link = ds.getContext().hasAnyValue(LINK, LINK_REST); - ResultFormat std ; - LinkedResult lr = null; - - if (link) { - lr = linkedResult(ds.getContext(), "std"); - // prepare (and return) std result with link to transform - // map will record link url of transform in function getFormatTransformList - // result format will be generated when returning HTTP result - std = getFormatSimple(map, ds, format, type); - // record url of std result document in case transform generate link to std result (cf mapper) - ds.getContext().add(Context.STL_LINK, DatatypeMap.newResource(lr.getURL())); - } - else { - // return transform result - // record std result in href document in case transform generate link href - int mytype = (type==ResultFormat.HTML_FORMAT) ? ResultFormat.UNDEF_FORMAT : type; - std = getFormatSimple(map, ds, format, mytype); - } - - Optional res = getFormatTransformList(map, ds, format, type, transformList); - if (res.isPresent()) { - // no link: return transformation result - return res.get(); - } - - if (link) { - // do it only now because map has recorded transform link - // generate std result document in case transform manage link (cf mapper) - lr.write(std.toString()); - } - // link: return query result - return std; + logger.info("Transform: " + transformList); + + boolean link = ds.getContext().hasAnyValue(LINK, LINK_REST); + ResultFormat std; + LinkedResult lr = null; + + if (link) { + lr = linkedResult(ds.getContext(), "std"); + // prepare (and return) std result with link to transform + // map will record link url of transform in function getFormatTransformList + // result format will be generated when returning HTTP result + std = getFormatSimple(map, ds, format, type); + // record url of std result document in case transform generate link to std + // result (cf mapper) + ds.getContext().add(Context.STL_LINK, DatatypeMap.newResource(lr.getURL())); + } else { + // return transform result + // record std result in href document in case transform generate link href + int mytype = (type == ResultFormat.HTML_FORMAT) ? ResultFormat.UNDEF_FORMAT : type; + std = getFormatSimple(map, ds, format, mytype); + } + + Optional res = getFormatTransformList(map, ds, format, type, transformList); + if (res.isPresent()) { + // no link: return transformation result + return res.get(); + } + + if (link) { + // do it only now because map has recorded transform link + // generate std result document in case transform manage link (cf mapper) + lr.write(std.toString()); + } + // link: return query result + return std; } - + /** * URLs of one request share the same key file name */ LinkedResult linkedResult(Context c, String name) { return new LinkedResult(name, "", c.getCreateKey()); } - + /** * Process transformations * When mode=link, add url of transformation result in map query result link - * and return empty + * and return empty * Otherwise return result of (first) transformation */ - Optional getFormatTransformList(Mappings map, Dataset ds, String format, int type, List transformList) { + Optional getFormatTransformList(Mappings map, Dataset ds, String format, int type, + List transformList) { ResultFormat fst = null; Context c = ds.getContext(); // prepare the list of linked result URL before all // each result may then contain link to these URLs List linkedResult = getLinkedResult(map, c, transformList); int i = 0; - + for (String transform : transformList) { ResultFormat res = getFormatTransform(map, ds, format, type, transform); if (fst == null) { @@ -494,31 +503,33 @@ Optional getFormatTransformList(Mappings map, Dataset ds, String f if (c.hasAnyValue(LINK, LINK_REST)) { // mode=link - // save transformation result in document and record URL of document in map result link + // save transformation result in document and record URL of document in map + // result link LinkedResult lr = linkedResult.get(i++); - lr.write(res.toString()); - logger.info(String.format("Transformation %s result in: %s", + lr.write(res.toString()); + logger.info(String.format("Transformation %s result in: %s", c.nsm().toPrefix(transform), lr.getURL())); } else { // no link: return result of first transformation return Optional.of(res); } } - + if (c.hasValue(LINK_REST)) { - // return result of first transformation (it may have generated links to other transformations) + // return result of first transformation (it may have generated links to other + // transformations) return Optional.of(fst); - } - else { + } else { // query result will be returned with link url to transformation result return Optional.empty(); } } - + /** * Prepare LinkedResult place holder list with file name and URL - * Each LinkedResult will be used to store a result in a document accessible by URL - * PRAGMA: map record link url + * Each LinkedResult will be used to store a result in a document accessible by + * URL + * PRAGMA: map record link url * It will be considered by ResultFormat std in * function getFormatTransform above * @@ -526,7 +537,7 @@ Optional getFormatTransformList(Mappings map, Dataset ds, String f List getLinkedResult(Mappings map, Context c, List transformList) { if (c.hasAnyValue(LINK, LINK_REST)) { List list = new ArrayList<>(); - + for (String name : transformList) { LinkedResult lr = linkedResult(c, getName(name)); list.add(lr); @@ -536,7 +547,7 @@ List getLinkedResult(Mappings map, Context c, List transfo } return null; } - + ResultFormat getFormatTransform(Mappings map, Dataset ds, String format, int type, String transform) { ResultFormat ft; if (type == UNDEF_FORMAT) { @@ -544,13 +555,13 @@ ResultFormat getFormatTransform(Mappings map, Dataset ds, String format, int typ } else { ft = ResultFormat.create(map, type, transform).init(ds); } - if (map.getBinding()!=null && ft.getBind()==null) { + if (map.getBinding() != null && ft.getBind() == null) { // share ldscript binding environment with transformer - ft.setBind((Binding)map.getBinding()); + ft.setBind((Binding) map.getBinding()); } return ft; } - + ResultFormat getFormatSimple(Mappings map, Dataset ds, String format, int type) { if (type == UNDEF_FORMAT) { return ResultFormat.create(map, format).init(ds); @@ -558,7 +569,7 @@ ResultFormat getFormatSimple(Mappings map, Dataset ds, String format, int type) return ResultFormat.create(map, type).init(ds); } } - + /** * predefined parameter associated to url/mode in urlprofile.ttl */ @@ -568,7 +579,7 @@ List getValue(Context ct, String name, List value) { } return ct.getStringList(name); } - + /** * select authorized transformations */ @@ -581,9 +592,7 @@ List selectTransformation(Context ct, List list) { // check authorized transformation return Access.selectNamespace(Access.Feature.LINKED_TRANSFORMATION, ct.getLevel(), alist); } - - - + /** * trans;trans -> list of trans * st:all -> st:xml st:json @@ -594,29 +603,27 @@ List prepare(List transformList) { System.out.println("server context: " + getContext()); } List list = new ArrayList<>(); - + for (String name : transformList) { if (name.contains(";")) { for (String key : name.split(";")) { getContext().prepare(key, list); } - } - else { + } else { getContext().prepare(name, list); } } return list; } - - + String getName(String transform) { if (transform.contains("#")) { - return transform.substring(1+transform.indexOf("#")); + return transform.substring(1 + transform.indexOf("#")); } - return transform.substring(1+transform.lastIndexOf("/")); + return transform.substring(1 + transform.lastIndexOf("/")); } - + public static SPARQLResult getSingleton() { return singleton; } diff --git a/corese-server/src/main/java/fr/inria/corese/server/webservice/SrvWrapper.java b/corese-server/src/main/java/fr/inria/corese/server/webservice/SrvWrapper.java index cdc0235a75..3c58d47169 100644 --- a/corese-server/src/main/java/fr/inria/corese/server/webservice/SrvWrapper.java +++ b/corese-server/src/main/java/fr/inria/corese/server/webservice/SrvWrapper.java @@ -1,11 +1,20 @@ package fr.inria.corese.server.webservice; import static fr.inria.corese.server.webservice.EmbeddedJettyServer.HOME_PAGE; + import java.io.File; import java.io.IOException; import java.util.List; -import jakarta.servlet.http.HttpServletRequest; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.glassfish.jersey.media.multipart.FormDataBodyPart; +import org.glassfish.jersey.media.multipart.FormDataParam; +import org.jsoup.Jsoup; +import org.jsoup.nodes.Document; +import org.jsoup.nodes.Element; + +import jakarta.servlet.http.HttpServletRequest; import jakarta.ws.rs.Consumes; import jakarta.ws.rs.FormParam; import jakarta.ws.rs.GET; @@ -17,13 +26,6 @@ import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.glassfish.jersey.media.multipart.FormDataBodyPart; -import org.glassfish.jersey.media.multipart.FormDataParam; -import org.jsoup.Jsoup; -import org.jsoup.nodes.Document; -import org.jsoup.nodes.Element; /** * This class enables to assign an URL to services because as services are @@ -48,26 +50,28 @@ public class SrvWrapper { @Path(pathRegex) @Produces("text/html") public Response transformGet( - @Context HttpServletRequest request, - @PathParam("path") String path, - @QueryParam("profile") String profile, // query + transform - @QueryParam("uri") String resource, // URI of resource focus - @QueryParam("mode") String mode, - @QueryParam("param") String param, - @QueryParam("arg") String arg, - @QueryParam("format") String format, - @QueryParam("access") String access, - @QueryParam("query") String query, // SPARQL query - @QueryParam("name") String name, // SPARQL query name (in webapp/query or path or URL) - @QueryParam("value") String value, // values clause that may complement query - @QueryParam("transform") String transform, // Transformation URI to post process result - @QueryParam("default-graph-uri") List defaultGraphUris, - @QueryParam("named-graph-uri") List namedGraphUris) { + @Context HttpServletRequest request, + @PathParam("path") String path, + @QueryParam("profile") String profile, // query + transform + @QueryParam("uri") String resource, // URI of resource focus + @QueryParam("mode") String mode, + @QueryParam("param") String param, + @QueryParam("arg") String arg, + @QueryParam("format") String format, + @QueryParam("access") String access, + @QueryParam("query") String query, // SPARQL query + @QueryParam("name") String name, // SPARQL query name (in webapp/query or path or URL) + @QueryParam("value") String value, // values clause that may complement query + @QueryParam("transform") String transform, // Transformation URI to post process result + @QueryParam("default-graph-uri") List defaultGraphUris, + @QueryParam("named-graph-uri") List namedGraphUris) { Response rs; if (path.equalsIgnoreCase("template")) { rs = new Transformer() - .queryGETHTML(request, profile, resource, mode, param, arg, format, access, query, name, value, transform, defaultGraphUris, namedGraphUris); + .queryGETHTML(request, Transformer.getTemplateService(), profile, resource, mode, param, arg, + format, + access, query, name, value, transform, defaultGraphUris, namedGraphUris); } else if (path.equalsIgnoreCase("spin/tospin")) { rs = new SPIN().toSPIN(query); } else if (path.equalsIgnoreCase("spin/tosparql")) { @@ -76,18 +80,19 @@ public Response transformGet( rs = new SDK().sdk(query, name, value); } else if (path.startsWith("tutorial")) { rs = new Tutorial() - .get(request, getService(path), profile, resource, mode, param, arg, format, query, name, value, transform, defaultGraphUris, namedGraphUris); - } else if (path.startsWith("service")) { - logger.info("service get"); - rs = new ServiceOnline() // processList vs get - .processList(request, getService(path), profile, resource, - mode, param, arg, format, access, query, name, - value, transform, defaultGraphUris, namedGraphUris); - } - else if (path.startsWith("process")) { + .get(request, getService(path), profile, resource, mode, param, arg, format, query, name, value, + transform, defaultGraphUris, namedGraphUris); + } else if (path.startsWith("service")) { + logger.info("service get"); + rs = new ServiceOnline() // processList vs get + .processList(request, getService(path), profile, resource, + mode, param, arg, format, access, query, name, + value, transform, defaultGraphUris, namedGraphUris); + } else if (path.startsWith("process")) { rs = new Processor().typecheck(resource, "std", transform, query, getService(path)); } else { - rs = Response.status(Response.Status.BAD_REQUEST).header(headerAccept, "*").entity("Can not get right service solver.").build(); + rs = Response.status(Response.Status.BAD_REQUEST).header(headerAccept, "*") + .entity("Can not get right service solver.").build(); } return Response.status(rs.getStatus()).header(headerAccept, "*").entity(wrapper(rs).toString()).build(); @@ -98,26 +103,28 @@ else if (path.startsWith("process")) { @Path(pathRegex) @Produces("text/html") public Response transformPost( - @Context HttpServletRequest request, - @PathParam("path") String path, - @FormParam("profile") String profile, // query + transform - @FormParam("uri") String resource, // URI of resource focus - @FormParam("mode") String mode, - @FormParam("param") String param, - @FormParam("arg") String arg, - @FormParam("format") String format, - @FormParam("access") String access, - @FormParam("query") String query, // SPARQL query - @FormParam("name") String name, // SPARQL query name (in webapp/query or path or URL) - @FormParam("value") String value, // values clause that may complement query - @FormParam("transform") String transform, // Transformation URI to post process result - @FormParam("default-graph-uri") List defaultGraphUris, - @FormParam("named-graph-uri") List namedGraphUris) { + @Context HttpServletRequest request, + @PathParam("path") String path, + @FormParam("profile") String profile, // query + transform + @FormParam("uri") String resource, // URI of resource focus + @FormParam("mode") String mode, + @FormParam("param") String param, + @FormParam("arg") String arg, + @FormParam("format") String format, + @FormParam("access") String access, + @FormParam("query") String query, // SPARQL query + @FormParam("name") String name, // SPARQL query name (in webapp/query or path or URL) + @FormParam("value") String value, // values clause that may complement query + @FormParam("transform") String transform, // Transformation URI to post process result + @FormParam("default-graph-uri") List defaultGraphUris, + @FormParam("named-graph-uri") List namedGraphUris) { Response rs; if (path.equalsIgnoreCase("template")) { - rs = new Transformer().queryPOSTHTML(request, profile, resource, mode, param, arg, format, access, query, name, value, transform, defaultGraphUris, namedGraphUris); + rs = new Transformer().queryPOSTHTML(request, Transformer.getTemplateService(), profile, resource, mode, + param, + arg, format, access, query, name, value, transform, defaultGraphUris, namedGraphUris); } else if (path.equalsIgnoreCase("spin/tospin")) { rs = new SPIN().toSPINPOST(query); } else if (path.equalsIgnoreCase("spin/tosparql")) { @@ -125,14 +132,17 @@ public Response transformPost( } else if (path.equalsIgnoreCase("sdk")) { rs = new SDK().sdk(query, name, value); } else if (path.startsWith("tutorial")) { - rs = new Tutorial().post(request, getService(path), profile, resource, mode, param, arg, format, query, name, value, transform, defaultGraphUris, namedGraphUris); + rs = new Tutorial().post(request, getService(path), profile, resource, mode, param, arg, format, query, + name, value, transform, defaultGraphUris, namedGraphUris); } else if (path.startsWith("service")) { rs = new ServiceOnline() - .post(request, getService(path), profile, resource, mode, param, arg, format, access, query, name, value, transform, defaultGraphUris, namedGraphUris); + .post(request, getService(path), profile, resource, mode, param, arg, format, access, query, name, + value, transform, defaultGraphUris, namedGraphUris); } else if (path.startsWith("process")) { rs = new Processor().typecheck(resource, "std", transform, query, getService(path)); } else { - rs = Response.status(Response.Status.BAD_REQUEST).header(headerAccept, "*").entity("Can not get right service solver.").build(); + rs = Response.status(Response.Status.BAD_REQUEST).header(headerAccept, "*") + .entity("Can not get right service solver.").build(); } return Response.status(rs.getStatus()).header(headerAccept, "*").entity(wrapper(rs).toString()).build(); @@ -143,26 +153,27 @@ public Response transformPost( @Path(pathRegex) @Produces("text/html") public Response transformPostMD( - @Context HttpServletRequest request, - @PathParam("path") String path, - @FormDataParam("profile") String profile, // query + transform - @FormDataParam("uri") String resource, // URI of resource focus - @FormDataParam("mode") String mode, // URI of resource focus - @FormDataParam("param") String param, // URI of resource focus - @FormDataParam("arg") String arg, - @FormDataParam("format") String format, // URI of resource focus - @FormDataParam("access") String access, - @FormDataParam("query") String query, // SPARQL query - @FormDataParam("name") String name, // SPARQL query name (in webapp/query or path or URL) - @FormDataParam("value") String value, // values clause that may complement query - @FormDataParam("transform") String transform, // Transformation URI to post process result - @FormDataParam("default-graph-uri") List defaultGraphUris, - @FormDataParam("named-graph-uri") List namedGraphUris) { + @Context HttpServletRequest request, + @PathParam("path") String path, + @FormDataParam("profile") String profile, // query + transform + @FormDataParam("uri") String resource, // URI of resource focus + @FormDataParam("mode") String mode, // URI of resource focus + @FormDataParam("param") String param, // URI of resource focus + @FormDataParam("arg") String arg, + @FormDataParam("format") String format, // URI of resource focus + @FormDataParam("access") String access, + @FormDataParam("query") String query, // SPARQL query + @FormDataParam("name") String name, // SPARQL query name (in webapp/query or path or URL) + @FormDataParam("value") String value, // values clause that may complement query + @FormDataParam("transform") String transform, // Transformation URI to post process result + @FormDataParam("default-graph-uri") List defaultGraphUris, + @FormDataParam("named-graph-uri") List namedGraphUris) { Response rs; if (path.equalsIgnoreCase("template")) { - rs = new Transformer().queryPOSTHTML_MD(request, profile, resource, mode, param, arg, format, access, query, name, value, transform, defaultGraphUris, namedGraphUris); + rs = new Transformer().queryPOSTHTML_MD(request, profile, resource, mode, param, arg, format, access, query, + name, value, transform, defaultGraphUris, namedGraphUris); } else if (path.equalsIgnoreCase("spin/tospin")) { rs = new SPIN().toSPINPOST_MD(query); } else if (path.equalsIgnoreCase("spin/tosparql")) { @@ -170,26 +181,29 @@ public Response transformPostMD( } else if (path.equalsIgnoreCase("sdk")) { rs = new SDK().sdkPostMD(query, name, value); } else if (path.startsWith("tutorial")) { - rs = new Tutorial().postMD(request, getService(path), profile, resource, mode, param, arg, format, query, name, value, transform, defaultGraphUris, namedGraphUris); + rs = new Tutorial().postMD(request, getService(path), profile, resource, mode, param, arg, format, query, + name, value, transform, defaultGraphUris, namedGraphUris); } else if (path.startsWith("service")) { rs = new ServiceOnline() - .postMD(request, getService(path), profile, resource, mode, param, arg, format, access, query, name, value, transform, defaultGraphUris, namedGraphUris); + .postMD(request, getService(path), profile, resource, mode, param, arg, format, access, query, name, + value, transform, defaultGraphUris, namedGraphUris); } else if (path.startsWith("process")) { rs = new Processor().typecheckPost_MD(resource, "std", transform, query, getService(path)); } else { - rs = Response.status(Response.Status.BAD_REQUEST).header(headerAccept, "*").entity("Can not get right service solver.").build(); + rs = Response.status(Response.Status.BAD_REQUEST).header(headerAccept, "*") + .entity("Can not get right service solver.").build(); } return Response.status(rs.getStatus()).header(headerAccept, "*").entity(wrapper(rs).toString()).build(); } - //Put the response text in the #content of home page + // Put the response text in the #content of home page private String wrapper(Response rs) { - //if not using ajax, donot wrap + // if not using ajax, donot wrap if (!SPARQLRestAPI.isAjax) { return rs.getEntity().toString(); } else { - String home = EmbeddedJettyServer.resourceURI.getPath() + "/" + HOME_PAGE;//get file path + String home = EmbeddedJettyServer.resourceURI.getPath() + "/" + HOME_PAGE;// get file path try { Document doc; doc = Jsoup.parse(new File(home), null); @@ -203,7 +217,7 @@ private String wrapper(Response rs) { } } - //get the string after first "/" + // get the string after first "/" private String getService(String s) { return (s == null || s.isEmpty()) ? "" : s.substring(s.indexOf("/") + 1); } diff --git a/corese-server/src/main/java/fr/inria/corese/server/webservice/Transformer.java b/corese-server/src/main/java/fr/inria/corese/server/webservice/Transformer.java index 4059f3f01a..5a39613787 100644 --- a/corese-server/src/main/java/fr/inria/corese/server/webservice/Transformer.java +++ b/corese-server/src/main/java/fr/inria/corese/server/webservice/Transformer.java @@ -68,6 +68,14 @@ static TripleStore getTripleStore() { return SPARQLRestAPI.getTripleStore(); } + static TripleStore getTripleStore(String name) { + return SPARQLRestAPI.getTripleStore(name); + } + + static String getTemplateService() { + return TEMPLATE_SERVICE; + } + Profile getProfile() { return Profile.getProfile(); } @@ -78,6 +86,7 @@ Profile getProfile() { @Produces("text/html") public Response queryPOSTHTML( @jakarta.ws.rs.core.Context HttpServletRequest request, + @FormParam("operation") String operation, // operation @FormParam("profile") String profile, // query + transform @FormParam("uri") String resource, // query + transform @FormParam("mode") String mode, @@ -92,7 +101,12 @@ public Response queryPOSTHTML( @FormParam("default-graph-uri") List from, @FormParam("named-graph-uri") List named) { - Param par = new Param(TEMPLATE_SERVICE, profile, transform, resource, name, query); + String service = "/" + name + "/" + operation; + if (name == null || name.isEmpty() || operation == null || operation.isEmpty()) { + service = TEMPLATE_SERVICE; + } + + Param par = new Param(service, profile, transform, resource, name, query); par.setValue(value); par.setMode(mode); par.setParam(param); @@ -101,7 +115,7 @@ public Response queryPOSTHTML( par.setKey(access); par.setDataset(from, named); par.setRequest(request); - return template(getTripleStore(), par); + return template(getTripleStore(name), par); } @POST @@ -139,6 +153,7 @@ public Response queryPOSTHTML_MD( @Produces("text/html") public Response queryGETHTML( @jakarta.ws.rs.core.Context HttpServletRequest request, + @QueryParam("operation") String operation, // operation @QueryParam("profile") String profile, // query + transform @QueryParam("uri") String resource, // URI of resource focus @QueryParam("mode") String mode, @@ -153,7 +168,12 @@ public Response queryGETHTML( @QueryParam("default-graph-uri") List defaultGraphUris, @QueryParam("named-graph-uri") List namedGraphUris) { - Param par = new Param(TEMPLATE_SERVICE, profile, transform, resource, name, query); + String service = "/" + name + "/" + operation; + if (name == null || name.isEmpty() || operation == null || operation.isEmpty()) { + service = TEMPLATE_SERVICE; + } + + Param par = new Param(service, profile, transform, resource, name, query); par.setValue(value); par.setMode(mode); par.setParam(param); @@ -162,7 +182,7 @@ public Response queryGETHTML( par.setKey(access); par.setDataset(namedGraphUris, namedGraphUris); par.setRequest(request); - return template(getTripleStore(), par); + return template(getTripleStore(name), par); } public Response template(TripleStore store, Param par) { diff --git a/corese-server/src/test/java/fr/inria/corese/server/webservice/GraphStoreProtocolTest.java b/corese-server/src/test/java/fr/inria/corese/server/webservice/GraphStoreProtocolTest.java new file mode 100644 index 0000000000..b018dbdd9c --- /dev/null +++ b/corese-server/src/test/java/fr/inria/corese/server/webservice/GraphStoreProtocolTest.java @@ -0,0 +1,404 @@ +package fr.inria.corese.server.webservice; + +import static fr.inria.corese.core.print.ResultFormat.TURTLE_TEXT; +import static fr.inria.corese.core.api.Loader.TURTLE_FORMAT; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.HttpURLConnection; +import java.util.LinkedList; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.load.Load; + +/** + * Test of the behavior of the corese server against graph store protocol requests. + * + * @author Pierre Maillot, P16 Wimmics INRIA I3S, 2024 + * @see https://www.w3.org/TR/2013/REC-sparql11-http-rdf-update-20130321/ + + */ +public class GraphStoreProtocolTest { + + + private static final Logger logger = LogManager.getLogger(GraphStoreProtocolTest.class); + + private static Process server; + + private static final String SERVER_URL = "http://localhost:8080/"; + private static final String GRAPH_STORE_ENDPOINT = SERVER_URL + "rdf-graph-store"; + + /** + * Start the server before running the tests. + * Loads a part of the DBpedia dataset in the server. + */ + @BeforeClass + public static void init() throws InterruptedException, IOException { + File turtleFile = new File("src/test/resources/data.ttl"); + String turtleFileAbsolutePath = turtleFile.getAbsolutePath(); + + File trigFile = new File("src/test/resources/data.trig"); + String trigFileAbsolutePath = trigFile.getAbsolutePath(); + + logger.info("starting in " + System.getProperty("user.dir")); + server = new ProcessBuilder().inheritIO().command( + "java", + "-jar", "./target/corese-server-4.5.1.jar", + "-lh", + "-su", + "-l", trigFileAbsolutePath, + "-l", turtleFileAbsolutePath).start(); + Thread.sleep(5000); + } + + @AfterClass + public static void shutdown() { + server.destroy(); + } + + @Test + public void getGraphStoreProtocolWithGraph() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(TURTLE_TEXT); + headers.add(acceptHeader); + + String urlQuery = GRAPH_STORE_ENDPOINT + "?" + SPARQLTestUtils.generateGraphStoreParameters("http://example.com/A"); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + Graph describeGraph = new Graph(); + Load load = Load.create(describeGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TURTLE_FORMAT); + + assertEquals(200, status); + assertEquals(1, describeGraph.size()); + } + + @Test + public void getGraphStoreProtocolWithDefault() throws Exception{ + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(TURTLE_TEXT); + headers.add(acceptHeader); + + String urlQuery = GRAPH_STORE_ENDPOINT + "?" + SPARQLTestUtils.generateGraphStoreParameters("default"); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + Graph describeGraph = new Graph(); + Load load = Load.create(describeGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TURTLE_FORMAT); + + assertEquals(200, status); + assertEquals(171, describeGraph.size()); + } + + @Test + public void getGraphStoreProtocolWithUnknownGraph() throws Exception{ + + boolean absenceTest = ! SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { ?x ?y ?z } }"); + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(TURTLE_TEXT); + headers.add(acceptHeader); + + String urlQuery = GRAPH_STORE_ENDPOINT + "?" + SPARQLTestUtils.generateGraphStoreParameters("http://example.com/Z"); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + Graph describeGraph = new Graph(); + Load load = Load.create(describeGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TURTLE_FORMAT); + + assertTrue(absenceTest); + assertEquals(404, status); + assertEquals(0, describeGraph.size()); + } + + @Test + public void putGraphStoreProtocolNewGraph() throws Exception { + + boolean absenceTest = ! SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { a } }"); + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Content-Type"); + acceptHeader.add(TURTLE_TEXT); + headers.add(acceptHeader); + + String urlQuery = GRAPH_STORE_ENDPOINT + "?" + SPARQLTestUtils.generateGraphStoreParameters("http://example.com/C"); + + String rdfPayload = "@prefix ex: . ex:C a ex:Thing ."; + + HttpURLConnection con = SPARQLTestUtils.putConnection(urlQuery, headers, rdfPayload); + + int status = con.getResponseCode(); + + con.disconnect(); + + boolean presenceTest = SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { a } }"); + + assertEquals(201, status); + assertTrue(presenceTest); + assertTrue(absenceTest); + } + + @Test + public void putGraphStoreProtocolExistingGraph() throws Exception { + + boolean absenceTest = ! SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { a } }"); + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Content-Type"); + acceptHeader.add(TURTLE_TEXT); + headers.add(acceptHeader); + + String urlQuery = GRAPH_STORE_ENDPOINT + "?" + SPARQLTestUtils.generateGraphStoreParameters("http://example.com/A"); + + String rdfPayload = "@prefix ex: . ex:C a ex:Thing ."; + + HttpURLConnection con = SPARQLTestUtils.putConnection(urlQuery, headers, rdfPayload); + + int status = con.getResponseCode(); + + con.disconnect(); + + boolean presenceTest = SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { a } }"); + + assertTrue(status == 200 || status == 204); + assertTrue(presenceTest); + assertTrue(absenceTest); + } + + @Test + public void deleteGraphStoreProtocol() throws Exception { + + boolean presenceTest = SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { ?s ?p ?o } }"); + + String urlQuery = GRAPH_STORE_ENDPOINT + "?" + SPARQLTestUtils.generateGraphStoreParameters("http://example.com/B"); + HttpURLConnection deleteCon = SPARQLTestUtils.deleteConnection(urlQuery); + + int status = deleteCon.getResponseCode(); + + deleteCon.disconnect(); + + boolean absenceTest = ! SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { ?s ?p ?o } }"); + + assertTrue(status == 200 || status == 204); + assertTrue(presenceTest); + assertTrue(absenceTest); + } + + @Test + public void deleteGraphStoreProtocolWithUnknownGraph() throws Exception { + + boolean presenceTest = ! SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { ?s ?p ?o } }"); + + String urlQuery = GRAPH_STORE_ENDPOINT + "?" + SPARQLTestUtils.generateGraphStoreParameters("http://example.com/Z"); + HttpURLConnection deleteCon = SPARQLTestUtils.deleteConnection(urlQuery); + + int status = deleteCon.getResponseCode(); + + deleteCon.disconnect(); + + boolean absenceTest = ! SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { ?s ?p ?o } }"); + + assertEquals(404, status); + assertTrue(presenceTest); + assertTrue(absenceTest); + } + + @Test + public void postGraphStoreProtocolNewGraph() throws Exception { + + boolean absenceTest = ! SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { a } }"); + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Content-Type"); + acceptHeader.add(TURTLE_TEXT); + headers.add(acceptHeader); + + String urlQuery = GRAPH_STORE_ENDPOINT + "?" + SPARQLTestUtils.generateGraphStoreParameters("http://example.com/C"); + + String rdfPayload = "@prefix ex: . ex:C a ex:Thing ."; + + HttpURLConnection con = SPARQLTestUtils.postConnection(urlQuery, headers, rdfPayload); + + int status = con.getResponseCode(); + + con.disconnect(); + + boolean presenceTest = SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { a } }"); + + assertEquals(201, status); + assertTrue(presenceTest); + assertTrue(absenceTest); + } + + @Test + public void postGraphStoreProtocolExistingGraph() throws Exception { + + boolean absenceTest = ! SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { a } }"); + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Content-Type"); + acceptHeader.add(TURTLE_TEXT); + headers.add(acceptHeader); + + String urlQuery = GRAPH_STORE_ENDPOINT + "?" + SPARQLTestUtils.generateGraphStoreParameters("http://example.com/A"); + + String rdfPayload = "@prefix ex: . ex:C a ex:Thing ."; + + HttpURLConnection con = SPARQLTestUtils.postConnection(urlQuery, headers, rdfPayload); + + int status = con.getResponseCode(); + + con.disconnect(); + + boolean presenceTest = SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { a } }"); + + assertTrue(status == 200 || status == 204); + assertTrue(presenceTest); + assertTrue(absenceTest); + } + + @Test + public void headGraphStoreProtocolWithDefault() throws Exception{ + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(TURTLE_TEXT); + headers.add(acceptHeader); + + String urlQuery = GRAPH_STORE_ENDPOINT + "?" + SPARQLTestUtils.generateGraphStoreParameters("default"); + HttpURLConnection con = SPARQLTestUtils.headConnection(urlQuery); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(0, content.toString().length()); + } + + @Test + public void headGraphStoreProtocolWithGraph() throws Exception{ + + boolean presenceTest = SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { ?x ?y ?z } }"); + + String urlQuery = GRAPH_STORE_ENDPOINT + "?" + SPARQLTestUtils.generateGraphStoreParameters("http://example.com/A"); + HttpURLConnection con = SPARQLTestUtils.headConnection(urlQuery); + + BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertTrue(presenceTest); + assertEquals(200, status); + assertEquals(0, content.toString().length()); + } + + @Test + public void headGraphStoreProtocolWithUnknownGraph() throws Exception{ + + boolean absenceTest = ! SPARQLTestUtils.sendSPARQLAsk("ASK { GRAPH { ?x ?y ?z } }"); + + String urlQuery = GRAPH_STORE_ENDPOINT + "?" + SPARQLTestUtils.generateGraphStoreParameters("http://example.com/Z"); + HttpURLConnection con = SPARQLTestUtils.headConnection(urlQuery); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertTrue(absenceTest); + assertEquals(404, status); + assertEquals(0, content.toString().length()); + } + +} diff --git a/corese-server/src/test/java/fr/inria/corese/server/webservice/HttpIT.java b/corese-server/src/test/java/fr/inria/corese/server/webservice/HttpIT.java deleted file mode 100644 index f27b470819..0000000000 --- a/corese-server/src/test/java/fr/inria/corese/server/webservice/HttpIT.java +++ /dev/null @@ -1,103 +0,0 @@ -package fr.inria.corese.server.webservice; - -import fr.inria.corese.kgram.core.Mapping; -import fr.inria.corese.kgram.core.Mappings; -import fr.inria.corese.sparql.exceptions.EngineException; -import fr.inria.corese.core.load.LoadException; -import fr.inria.corese.core.load.Service; - -import org.junit.BeforeClass; -import org.junit.Test; - -import jakarta.ws.rs.client.Client; -import jakarta.ws.rs.client.ClientBuilder; -import jakarta.ws.rs.client.WebTarget; - -import java.io.IOException; - -import static org.junit.Assert.assertEquals; - -/** - * @author Olivier Corby, Wimmics INRIA I3S, 2015 - */ -public class HttpIT { - private boolean isDebug; - private static Process server; - - @BeforeClass - public static void init() throws InterruptedException, IOException - { - System.out.println( "starting in " + System.getProperty( "user.dir" ) ); -// server = new ProcessBuilder().inheritIO().command( -// "/usr/bin/java", -// "-jar", "./target/corese-server-4.0.1-SNAPSHOT-jar-with-dependencies.jar", -// "-lh", -// "-l", "./target/classes/webapp/data/dbpedia/dbpedia.ttl" -// ).start(); - Thread.sleep( 5000 ); - } - -// @AfterClass -// public static void shutdown() -// { -// server.destroy(); -// } - - @Test - public void test() throws LoadException, EngineException - { - Service serv = new Service( "http://localhost:8080/sparql" ); - String q = "select * where {?x ?p ?y} limit 10"; - Mappings map = serv.select( q ); - for (Mapping m: map) { - System.out.println(map); - } - assertEquals( 10, map.size() ); - } - - @Test - public void test2() - { - String service = "http://localhost:8080/template"; - Client client = ClientBuilder.newClient(); - WebTarget target = client.target( service ); - String res = target.queryParam( "profile", "st:dbedit" ).request().get( String.class ); - assertEquals( true, res.length() > 17000 ); - assertEquals( true, res.contains( "Front yougoslave de la Seconde Guerre mondiale" ) ); - System.out.println( res.length() ); - } - - - @Test - public void test3() - { - String service = "http://localhost:8080/template"; - Client client = ClientBuilder.newClient(); - WebTarget target = client.target( service ); - String res = target.queryParam( "profile", "st:dbpedia" ) - .queryParam( "uri", "http://fr.dbpedia.org/resource/Jimmy_Page" ) - .request() - .get( String.class ); - assertEquals( true, res.contains( "Led Zeppelin" ) ); - } - - @Test - public void test4() - { - String service = "http://localhost:8080/tutorial/cdn"; - Client client = ClientBuilder.newClient(); - WebTarget target = client.target( service ); - String res = target.request().get( String.class ); - assertEquals( true, res.contains( "Siècle" ) ); - } - - @Test - public void test5() - { - String service = "http://localhost:8080/process/owlrl"; - Client client = ClientBuilder.newClient(); - WebTarget target = client.target( service ); - String res = target.queryParam( "uri", "/data/primer.owl" ).request().get( String.class ); - assertEquals( true, res.contains( "Statement not supported in an Equivalent Class Expression" ) ); - } -} diff --git a/corese-server/src/test/java/fr/inria/corese/server/webservice/HttpServerTest.java b/corese-server/src/test/java/fr/inria/corese/server/webservice/HttpServerTest.java new file mode 100644 index 0000000000..0ae72c6d9c --- /dev/null +++ b/corese-server/src/test/java/fr/inria/corese/server/webservice/HttpServerTest.java @@ -0,0 +1,187 @@ +package fr.inria.corese.server.webservice; + +import fr.inria.corese.kgram.core.Mapping; +import fr.inria.corese.kgram.core.Mappings; +import fr.inria.corese.sparql.exceptions.EngineException; +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.load.Load; +import fr.inria.corese.core.load.LoadException; +import fr.inria.corese.core.load.Service; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import jakarta.ws.rs.client.Client; +import jakarta.ws.rs.client.ClientBuilder; +import jakarta.ws.rs.client.WebTarget; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.UnsupportedEncodingException; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.ProtocolException; +import java.net.URL; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * Test of the behavior of the corese server against HTTP requests. + * + * Tests: + * - Does the server answers to a simple HTTP GET request? + * - Does the server returns HTTP headers with the appropriate values? + * + * @author Pierre Maillot, P16 Wimmics INRIA I3S, 2024 + * @author Olivier Corby, Wimmics INRIA I3S, 2015 + */ +public class HttpServerTest { + + private static final Logger logger = LogManager.getLogger(HttpServerTest.class); + + private static Process server; + + private static final String SERVER_URL = "http://localhost:8080/"; + private static final String SPARQL_ENDPOINT_URL = SERVER_URL + "sparql"; + + /** + * Get a connection to a server. + * + * @param url server URL + * @param headers HTTP headers + * @return + * @throws MalformedURLException + * @throws IOException + * @throws ProtocolException + */ + private HttpURLConnection getConnection(String url, Map headers) + throws MalformedURLException, IOException, ProtocolException { + URL u = new URL(url); + HttpURLConnection con = (HttpURLConnection) u.openConnection(); + con.setRequestMethod("GET"); + con.setConnectTimeout(5000); + con.setReadTimeout(5000); + con.setInstanceFollowRedirects(true); + for (Map.Entry entry : headers.entrySet()) { + con.setRequestProperty(entry.getKey(), entry.getValue()); + } + return con; + } + + private HttpURLConnection postConnection(String url, Map headers, String body) + throws MalformedURLException, IOException, ProtocolException { + URL u = new URL(url); + HttpURLConnection con = (HttpURLConnection) u.openConnection(); + con.setRequestMethod("POST"); + con.setConnectTimeout(5000); + con.setReadTimeout(5000); + con.setInstanceFollowRedirects(true); + for (Map.Entry entry : headers.entrySet()) { + con.setRequestProperty(entry.getKey(), entry.getValue()); + } + con.setDoOutput(true); + con.getOutputStream().write(body.getBytes()); + return con; + } + + private String generateSPARQLQueryUrl(String query, Map optionalParameters) { + try { + String result = SPARQL_ENDPOINT_URL + "?query=" + URLEncoder.encode(query, StandardCharsets.UTF_8.toString()); + if (!optionalParameters.isEmpty()) { + result += "&" + optionalParameters.entrySet().stream() + .map(e -> e.getKey() + "=" + e.getValue()).collect(Collectors.joining("&")); + } + return result; + } catch (UnsupportedEncodingException e) { + logger.error(e); + return null; + } + } + + private String generateSPARQLQueryUrl(String query) { + return generateSPARQLQueryUrl(query, new HashMap<>()); + } + + /** + * Start the server before running the tests. + * Loads a part of the DBpedia dataset in the server. + */ + @BeforeClass + public static void init() throws InterruptedException, IOException { + System.out.println("starting in " + System.getProperty("user.dir")); + server = new ProcessBuilder().inheritIO().command( + "java", + "-jar", "./target/corese-server-4.5.1.jar", + "-lh", + "-l", "./target/classes/webapp/data/dbpedia/dbpedia.ttl").start(); + Thread.sleep(5000); + } + + @AfterClass + public static void shutdown() { + server.destroy(); + } + + @Test + public void test() throws LoadException, EngineException { + Service serv = new Service("http://localhost:8080/sparql"); + String q = "select * where {?x ?p ?y} limit 10"; + Mappings map = serv.select(q); + for (Mapping m : map) { + System.out.println(map); + } + assertEquals(10, map.size()); + } + + // @Test + // public void test2() { + // String service = "http://localhost:8080/template"; + // Client client = ClientBuilder.newClient(); + // WebTarget target = client.target(service); + // String res = target.queryParam("profile", "st:dbedit").request().get(String.class); + // assertEquals(true, res.length() > 17000); + // assertEquals(true, res.contains("Front yougoslave de la Seconde Guerre mondiale")); + // System.out.println(res.length()); + // } + + // @Test + // public void test3() { + // String service = "http://localhost:8080/template"; + // Client client = ClientBuilder.newClient(); + // WebTarget target = client.target(service); + // String res = target.queryParam("profile", "st:dbpedia") + // .queryParam("uri", "http://fr.dbpedia.org/resource/Jimmy_Page") + // .request() + // .get(String.class); + // assertEquals(true, res.contains("Led Zeppelin")); + // } + + // @Test + // public void test4() { + // String service = "http://localhost:8080/tutorial/cdn"; + // Client client = ClientBuilder.newClient(); + // WebTarget target = client.target(service); + // String res = target.request().get(String.class); + // assertEquals(true, res.contains("Siècle")); + // } + + // @Test + // public void test5() { + // String service = "http://localhost:8080/process/owlrl"; + // Client client = ClientBuilder.newClient(); + // WebTarget target = client.target(service); + // String res = target.queryParam("uri", "/data/primer.owl").request().get(String.class); + // assertEquals(true, res.contains("Statement not supported in an Equivalent Class Expression")); + // } +} diff --git a/corese-server/src/test/java/fr/inria/corese/server/webservice/HttpsIT.java b/corese-server/src/test/java/fr/inria/corese/server/webservice/HttpsIT.java deleted file mode 100644 index f19155594f..0000000000 --- a/corese-server/src/test/java/fr/inria/corese/server/webservice/HttpsIT.java +++ /dev/null @@ -1,118 +0,0 @@ -package fr.inria.corese.server.webservice; - -import fr.inria.corese.core.load.Service; -import fr.inria.corese.kgram.core.Mapping; -import fr.inria.corese.kgram.core.Mappings; -import fr.inria.corese.sparql.triple.parser.URLServer; - -import org.junit.BeforeClass; -import org.junit.Test; - -import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManager; -import javax.net.ssl.X509TrustManager; -import jakarta.ws.rs.client.Client; -import jakarta.ws.rs.client.ClientBuilder; -import jakarta.ws.rs.client.WebTarget; -import java.io.IOException; -import java.security.cert.CertificateException; -import java.security.cert.X509Certificate; -import java.util.logging.Logger; - -import static org.junit.Assert.assertEquals; - -/** - * @author Olivier Corby, Wimmics INRIA I3S, 2015 - */ -public class HttpsIT { - private static final Logger logger = Logger.getLogger(HttpsIT.class.getName()); - private boolean isDebug = true; - private static Process server; - private static final String SERVER_URL = "https://localhost:8443/"; - private static final String SPARQL_ENDPOINT_URL = SERVER_URL + "sparql"; - private static final String TEMPLATE_URL = SERVER_URL + "template"; - private static final String CDN_URL = SERVER_URL + "tutorial/cdn"; - private static final String ORLRL_URL = SERVER_URL + "process/owlrl"; - private static ClientBuilder clientBuilder; - @BeforeClass - public static void init() throws InterruptedException, IOException, Exception - { - SSLContext sslcontext = SSLContext.getInstance("TLS"); - - sslcontext.init(null, new TrustManager[]{new X509TrustManager() { - public void checkClientTrusted(X509Certificate[] arg0, String arg1) throws CertificateException {} - public void checkServerTrusted(X509Certificate[] arg0, String arg1) throws CertificateException {} - public X509Certificate[] getAcceptedIssuers() { return new X509Certificate[0];} - }}, new java.security.SecureRandom()); - - clientBuilder = ClientBuilder.newBuilder() - .sslContext(sslcontext) - .hostnameVerifier((s1, s2) -> true); - logger.info( "starting in " + System.getProperty( "user.dir" ) ); - - // The server is launched using a bash script in src/test/resources/launch_ssl_server.sh - // The launch and shutdown operations are done by maven (see corese_server/pom.xml). - Thread.sleep( 5000 ); - } - - @Test - public void test() throws Exception - { - Service serv = new Service( new URLServer(SPARQL_ENDPOINT_URL) , clientBuilder ); - - String q = "select * where {?x ?p ?y} limit 10"; - Mappings map = serv.select( q ); - for ( Mapping m : map ) - { - logger.info( m.toString() ); - } - assertEquals( 10, map.size() ); - } - - @Test - public void test2() - { - String service = TEMPLATE_URL; - Client client = clientBuilder.build(); - WebTarget target = client.target( service ); - String res = target.queryParam( "profile", "st:dbedit" ).request().get( String.class ); - assertEquals( true, res.length() > 17000 ); - assertEquals( true, res.contains( "Front yougoslave de la Seconde Guerre mondiale" ) ); - logger.info( "" + res.length() ); - } - - - @Test - public void test3() - { - String service = TEMPLATE_URL; - Client client = clientBuilder.build(); - WebTarget target = client.target( service ); - String res = target.queryParam( "profile", "st:dbpedia" ) - .queryParam( "uri", "http://fr.dbpedia.org/resource/Jimmy_Page" ) - .request() - .get( String.class ); - assertEquals( true, res.contains( "Led Zeppelin" ) ); - } - - @Test - public void test4() - { - String service = CDN_URL; - Client client = clientBuilder.build(); - WebTarget target = client.target( service ); - String res = target.request().get( String.class ); - assertEquals( true, res.contains( "Siècle" ) ); - } - - - @Test - public void test5() - { - String service = ORLRL_URL; - Client client = clientBuilder.build(); - WebTarget target = client.target( service ); - String res = target.queryParam( "uri", "/data/primer.owl" ).request().get( String.class ); - assertEquals( true, res.contains( "Statement not supported in an Equivalent Class Expression" ) ); - } -} diff --git a/corese-server/src/test/java/fr/inria/corese/server/webservice/HttpsServerTest.java b/corese-server/src/test/java/fr/inria/corese/server/webservice/HttpsServerTest.java new file mode 100644 index 0000000000..30f9124b02 --- /dev/null +++ b/corese-server/src/test/java/fr/inria/corese/server/webservice/HttpsServerTest.java @@ -0,0 +1,127 @@ +package fr.inria.corese.server.webservice; + +import fr.inria.corese.core.load.Service; +import fr.inria.corese.kgram.core.Mapping; +import fr.inria.corese.kgram.core.Mappings; +import fr.inria.corese.sparql.triple.parser.URLServer; + +import org.junit.BeforeClass; +import org.junit.Test; + +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; +import jakarta.ws.rs.client.Client; +import jakarta.ws.rs.client.ClientBuilder; +import jakarta.ws.rs.client.WebTarget; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.ProtocolException; +import java.net.URL; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.util.HashMap; +import java.util.Map; +import java.util.logging.Logger; + +import static org.junit.Assert.assertEquals; + +/** + * @author Olivier Corby, Wimmics INRIA I3S, 2015 + */ +public class HttpsServerTest { + private static final Logger logger = Logger.getLogger(HttpsServerTest.class.getName()); + private boolean isDebug = true; + private static Process server; + private static final String SERVER_URL = "https://localhost:8443/"; + private static final String SPARQL_ENDPOINT_URL = SERVER_URL + "sparql"; + private static final String TEMPLATE_URL = SERVER_URL + "template"; + private static final String CDN_URL = SERVER_URL + "tutorial/cdn"; + private static final String ORLRL_URL = SERVER_URL + "process/owlrl"; + private static ClientBuilder clientBuilder; + + @BeforeClass + public static void init() throws InterruptedException, IOException, Exception { + SSLContext sslcontext = SSLContext.getInstance("TLS"); + + sslcontext.init(null, new TrustManager[] { new X509TrustManager() { + public void checkClientTrusted(X509Certificate[] arg0, String arg1) throws CertificateException { + } + + public void checkServerTrusted(X509Certificate[] arg0, String arg1) throws CertificateException { + } + + public X509Certificate[] getAcceptedIssuers() { + return new X509Certificate[0]; + } + } }, new java.security.SecureRandom()); + + clientBuilder = ClientBuilder.newBuilder() + .sslContext(sslcontext) + .hostnameVerifier((s1, s2) -> true); + logger.info("starting in " + System.getProperty("user.dir")); + + // The server is launched using a bash script in + // src/test/resources/launch_ssl_server.sh + // The launch and shutdown operations are done by maven (see + // corese_server/pom.xml). + Thread.sleep(5000); + } + + @Test + public void test() throws Exception { + Service serv = new Service(new URLServer(SPARQL_ENDPOINT_URL), clientBuilder); + + String q = "select * where {?x ?p ?y} limit 10"; + Mappings map = serv.select(q); + for (Mapping m : map) { + logger.info(m.toString()); + } + assertEquals(10, map.size()); + } + + @Test + public void test2() { + String service = TEMPLATE_URL; + Client client = clientBuilder.build(); + WebTarget target = client.target(service); + String res = target.queryParam("profile", "st:dbedit").request().get(String.class); + assertEquals(true, res.length() > 17000); + assertEquals(true, res.contains("Front yougoslave de la Seconde Guerre mondiale")); + logger.info("" + res.length()); + } + + @Test + public void test3() { + String service = TEMPLATE_URL; + Client client = clientBuilder.build(); + WebTarget target = client.target(service); + String res = target.queryParam("profile", "st:dbpedia") + .queryParam("uri", "http://fr.dbpedia.org/resource/Jimmy_Page") + .request() + .get(String.class); + assertEquals(true, res.contains("Led Zeppelin")); + } + + @Test + public void test4() { + String service = CDN_URL; + Client client = clientBuilder.build(); + WebTarget target = client.target(service); + String res = target.request().get(String.class); + assertEquals(true, res.contains("Siècle")); + } + + @Test + public void test5() { + String service = ORLRL_URL; + Client client = clientBuilder.build(); + WebTarget target = client.target(service); + String res = target.queryParam("uri", "/data/primer.owl").request().get(String.class); + assertEquals(true, res.contains("Statement not supported in an Equivalent Class Expression")); + } +} diff --git a/corese-server/src/test/java/fr/inria/corese/server/webservice/SPARQLEndpointDescriptionTest.java b/corese-server/src/test/java/fr/inria/corese/server/webservice/SPARQLEndpointDescriptionTest.java new file mode 100644 index 0000000000..59a5307791 --- /dev/null +++ b/corese-server/src/test/java/fr/inria/corese/server/webservice/SPARQLEndpointDescriptionTest.java @@ -0,0 +1,248 @@ +package fr.inria.corese.server.webservice; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.HttpURLConnection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.load.Load; +import static fr.inria.corese.core.print.ResultFormat.RDF_XML; +import static fr.inria.corese.core.print.ResultFormat.TURTLE_TEXT; +import static fr.inria.corese.core.api.Loader.RDFXML_FORMAT; +import static fr.inria.corese.core.api.Loader.TURTLE_FORMAT; + +/** + * Test of the behavior of the corese server against HTTP requests. + * + * Tests: + * - Is there an RDF void description available at /.well-known/void? + * - Is there a SPARQL endpoint available at /sparql? + * - Does the sparql endpoint answers to a simple SPARQL query? + * SPARQL: + * - Are every SPARQL query types supported? + * - Are every features of the SPARQL query language supported? + * - Are the limits of the SPARQL query language respected? + * - Is the timeout of the query respected ? + * + * @see SPARQL 1.1 + * Protocol + * + * @author Pierre Maillot, P16 Wimmics INRIA I3S, 2024 + */ +public class SPARQLEndpointDescriptionTest { + + private static final Logger logger = LogManager.getLogger(SPARQLEndpointDescriptionTest.class); + + private static Process server; + + private static final String SERVER_URL = "http://localhost:8080/"; + private static final String SPARQL_ENDPOINT_URL = SERVER_URL + "sparql"; + + /** + * Start the server before running the tests. + * Loads a part of the DBpedia dataset in the server. + */ + @BeforeClass + public static void init() throws InterruptedException, IOException { + File turtleFile = new File("src/test/resources/data.ttl"); + String turtleFileAbsolutePath = turtleFile.getAbsolutePath(); + + File trigFile = new File("src/test/resources/data.trig"); + String trigFileAbsolutePath = trigFile.getAbsolutePath(); + + System.out.println("starting in " + System.getProperty("user.dir")); + server = new ProcessBuilder().inheritIO().command( + "java", + "-jar", "./target/corese-server-4.5.1.jar", + "-lh", + "-l", turtleFileAbsolutePath, + "-l", trigFileAbsolutePath).start(); + Thread.sleep(5000); + } + + @AfterClass + public static void shutdown() { + server.destroy(); + } + + /** + * Is there an RDF document with a description of the SPARQL endpoint available + * at /.well-known/void? + * + * @throws Exception + */ + @Test + public void sparqlWellKnownVoidXMLRDF() throws Exception { + String sparqlEndpoint = SPARQL_ENDPOINT_URL + "/.well-known/void"; + + List> headers = new LinkedList<>(); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add(RDF_XML); + headers.add(contentTypeHeader); + + HttpURLConnection con = SPARQLTestUtils.getConnection(sparqlEndpoint, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph voidGraph = new Graph(); + Load load = Load.create(voidGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, RDFXML_FORMAT); + + assertEquals(status, 200); + assertEquals(con.getContentType(), RDF_XML); + assertTrue(voidGraph.size() > 0); + } + + /** + * Is there an RDF document with a description of the SPARQL endpoint available + * at /.well-known/void? + * + * @throws Exception + */ + @Test + public void sparqlWellKnownVoidTurtleRDF() throws Exception { + String sparqlEndpoint = SPARQL_ENDPOINT_URL + "/.well-known/void"; + + List> headers = new LinkedList<>(); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add(TURTLE_TEXT); + headers.add(contentTypeHeader); + + HttpURLConnection con = SPARQLTestUtils.getConnection(sparqlEndpoint, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph voidGraph = new Graph(); + Load load = Load.create(voidGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TURTLE_FORMAT); + + assertEquals(status, 200); + assertEquals(con.getContentType(), RDF_XML); + assertTrue(voidGraph.size() > 0); + } + + /** + * Is there an RDF document with a description of the SPARQL endpoint available + * at /.well-known/void? + * + * @throws Exception + */ + @Test + public void wellKnownVoidXMLRDF() throws Exception { + String sparqlEndpoint = SERVER_URL + ".well-known/void"; + + List> headers = new LinkedList<>(); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add(RDF_XML); + headers.add(contentTypeHeader); + + HttpURLConnection con = SPARQLTestUtils.getConnection(sparqlEndpoint, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph voidGraph = new Graph(); + Load load = Load.create(voidGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, RDFXML_FORMAT); + + assertEquals(status, 200); + assertEquals(con.getContentType(), RDF_XML); + assertTrue(voidGraph.size() > 0); + } + + /** + * Is there an RDF document with a description of the SPARQL endpoint available + * at /.well-known/void? + * + * @throws Exception + */ + @Test + public void wellKnownVoidTurtleRDF() throws Exception { + String sparqlEndpoint = SERVER_URL + ".well-known/void"; + + List> headers = new LinkedList<>(); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add(TURTLE_TEXT); + headers.add(contentTypeHeader); + + HttpURLConnection con = SPARQLTestUtils.getConnection(sparqlEndpoint, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph voidGraph = new Graph(); + Load load = Load.create(voidGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TURTLE_FORMAT); + + assertEquals(status, 200); + assertEquals(con.getContentType(), RDF_XML); + assertTrue(voidGraph.size() > 0); + } +} diff --git a/corese-server/src/test/java/fr/inria/corese/server/webservice/SPARQLEndpointQueryTest.java b/corese-server/src/test/java/fr/inria/corese/server/webservice/SPARQLEndpointQueryTest.java new file mode 100644 index 0000000000..e0e87e21d2 --- /dev/null +++ b/corese-server/src/test/java/fr/inria/corese/server/webservice/SPARQLEndpointQueryTest.java @@ -0,0 +1,2546 @@ +package fr.inria.corese.server.webservice; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.HttpURLConnection; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.load.Load; +import fr.inria.corese.core.load.result.SPARQLJSONResult; +import fr.inria.corese.core.load.result.SPARQLResult; +import fr.inria.corese.kgram.core.Mappings; + +import static fr.inria.corese.core.print.ResultFormat.JSON_LD; +import static fr.inria.corese.core.print.ResultFormat.NT_TEXT; +import static fr.inria.corese.core.print.ResultFormat.N_QUADS; +import static fr.inria.corese.core.print.ResultFormat.N_TRIPLES; +import static fr.inria.corese.core.print.ResultFormat.RDF_XML; +import static fr.inria.corese.core.print.ResultFormat.SPARQL_RESULTS_CSV; +import static fr.inria.corese.core.print.ResultFormat.SPARQL_RESULTS_JSON; +import static fr.inria.corese.core.print.ResultFormat.SPARQL_RESULTS_MD; +import static fr.inria.corese.core.print.ResultFormat.SPARQL_RESULTS_TSV; +import static fr.inria.corese.core.print.ResultFormat.SPARQL_RESULTS_XML; +import static fr.inria.corese.core.print.ResultFormat.TRIG; +import static fr.inria.corese.core.print.ResultFormat.TURTLE; +import static fr.inria.corese.core.print.ResultFormat.TURTLE_TEXT; +import static fr.inria.corese.core.api.Loader.JSONLD_FORMAT; +import static fr.inria.corese.core.api.Loader.NQUADS_FORMAT; +import static fr.inria.corese.core.api.Loader.NT_FORMAT; +import static fr.inria.corese.core.api.Loader.RDFXML_FORMAT; +import static fr.inria.corese.core.api.Loader.TRIG_FORMAT; +import static fr.inria.corese.core.api.Loader.TURTLE_FORMAT; +import static jakarta.ws.rs.core.MediaType.TEXT_HTML; + +/** + * Test of the behavior of the corese server against SPARQL queries. + * + * Tests: + * - Is there an RDF void description available at /.well-known/void? + * - Is there a SPARQL endpoint available at /sparql? + * - Does the sparql endpoint answers to a simple SPARQL query? For every output + * format: + * - application/sparql-results+xml + * - application/sparql-results+json + * - text/csv + * - text/tab-separated-values + * - text/markdown + * - text/turtle + * - application/rdf+xml + * - application/trig + * - application/ld+json + * - application/n-triples + * - application/n-quads + * - rdf-canon#sha-256 + * - rdf-canon#sha-384 + * SPARQL: + * - Are every SPARQL query types supported? + * - Are every features of the SPARQL query language supported? + * - Are the limits of the SPARQL query language respected? + * - Is the timeout of the query respected ? + * + * @see SPARQL 1.1 + * Protocol + * + * @author Pierre Maillot, P16 Wimmics INRIA I3S, 2024 + */ +public class SPARQLEndpointQueryTest { + + private static final Logger logger = LogManager.getLogger(SPARQLEndpointQueryTest.class); + + private static Process server; + + private static final String SERVER_URL = "http://localhost:8080/"; + private static final String SPARQL_ENDPOINT_URL = SERVER_URL + "sparql"; + + /** + * Start the server before running the tests. + * Loads a part of the DBpedia dataset in the server. + */ + @BeforeClass + public static void init() throws InterruptedException, IOException { + File turtleFile = new File("src/test/resources/data.ttl"); + String turtleFileAbsolutePath = turtleFile.getAbsolutePath(); + + File trigFile = new File("src/test/resources/data.trig"); + String trigFileAbsolutePath = trigFile.getAbsolutePath(); + + System.out.println("starting in " + System.getProperty("user.dir")); + server = new ProcessBuilder().inheritIO().command( + "java", + "-jar", "./target/corese-server-4.5.1.jar", + "-lh", + "-l", turtleFileAbsolutePath, + "-l", trigFileAbsolutePath).start(); + Thread.sleep(5000); + } + + @AfterClass + public static void shutdown() { + server.destroy(); + } + + /** + * Does the endpoint answer to SELECT a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlSelectResultsXMLEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_XML); + headers.add(acceptHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_XML, con.getContentType()); + } + + /** + * Does the endpoint answer to a SELECT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlSelectResultsXMLEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_XML); + headers.add(acceptHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_XML, con.getContentType()); + } + + /** + * Does the endpoint answer to a SELECT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlSelectResultsXMLEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_XML); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_XML, con.getContentType()); + } + + /** + * Does the endpoint answer to SELECT a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlSelectCSVEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_CSV); + headers.add(acceptHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_CSV, con.getContentType()); + } + + /** + * Does the endpoint answer to a SELECT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlSelectCSVEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_CSV); + headers.add(acceptHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_CSV, con.getContentType()); + } + + /** + * Does the endpoint answer to a SELECT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlSelectCSVEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_CSV); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_CSV, con.getContentType()); + } + + /** + * Does the endpoint answer to SELECT a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlSelectTSVEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_TSV); + headers.add(acceptHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_TSV, con.getContentType()); + } + + /** + * Does the endpoint answer to a SELECT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlSelectTSVEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_TSV); + headers.add(acceptHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_TSV, con.getContentType()); + } + + /** + * Does the endpoint answer to a SELECT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlSelectTSVEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_TSV); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_TSV, con.getContentType()); + } + + /** + * Does the endpoint answer to SELECT a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlSelectMarkdownEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_MD); + headers.add(acceptHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_MD, con.getContentType()); + } + + /** + * Does the endpoint answer to a SELECT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlSelectMarkdownEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_MD); + headers.add(acceptHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_MD, con.getContentType()); + } + + /** + * Does the endpoint answer to a SELECT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlSelectMarkdownEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_MD); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_MD, con.getContentType()); + } + + /** + * Does the endpoint answer to SELECT a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlSelectJSONEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_JSON); + headers.add(acceptHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_JSON, con.getContentType()); + } + + /** + * Does the endpoint answer to a SELECT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlSelectJSONEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_JSON); + headers.add(acceptHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_JSON, con.getContentType()); + } + + /** + * Does the endpoint answer to a SELECT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlSelectJSONEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_JSON); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_JSON, con.getContentType()); + } + + /** + * Does the endpoint answer to ASK a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlAskRDFXMLEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_XML); + headers.add(acceptHeader); + + String query = "ASK {?x ?p ?y}"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_XML, con.getContentType()); + } + + /** + * Does the endpoint answer to a ASK query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlAskRDFXMLEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_XML); + headers.add(acceptHeader); + + String query = "ASK {?x ?p ?y}"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_XML, con.getContentType()); + } + + /** + * Does the endpoint answer to a ASK query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlAskRDFXMLEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_XML); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "ASK {?x ?p ?y}"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_XML, con.getContentType()); + } + + /** + * Does the endpoint answer to ASK a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlAskCSVEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_CSV); + headers.add(acceptHeader); + + String query = "ASK {?x ?p ?y}"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_CSV, con.getContentType()); + } + + /** + * Does the endpoint answer to a ASK query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlAskCSVEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_CSV); + headers.add(acceptHeader); + + String query = "ASK {?x ?p ?y}"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_CSV, con.getContentType()); + } + + /** + * Does the endpoint answer to a ASK query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlAskCSVEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_CSV); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "ASK {?x ?p ?y}"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_CSV, con.getContentType()); + } + + /** + * Does the endpoint answer to ASK a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlAskTSVEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_TSV); + headers.add(acceptHeader); + + String query = "ASK {?x ?p ?y}"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_TSV, con.getContentType()); + } + + /** + * Does the endpoint answer to a ASK query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlAskTSVEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_TSV); + headers.add(acceptHeader); + + String query = "ASK {?x ?p ?y}"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_TSV, con.getContentType()); + } + + /** + * Does the endpoint answer to a ASK query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlAskTSVEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_TSV); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "ASK {?x ?p ?y}"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_TSV, con.getContentType()); + } + + /** + * Does the endpoint answer to ASK a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlAskMarkdownEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_MD); + headers.add(acceptHeader); + + String query = "ASK {?x ?p ?y}"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_MD, con.getContentType()); + } + + /** + * Does the endpoint answer to a ASK query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlAskMarkdownEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_MD); + headers.add(acceptHeader); + + String query = "ASK {?x ?p ?y}"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_MD, con.getContentType()); + } + + /** + * Does the endpoint answer to a ASK query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlAskMarkdownEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_MD); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "ASK {?x ?p ?y}"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_MD, con.getContentType()); + } + + /** + * Does the endpoint answer to ASK a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlAskJSONEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_JSON); + headers.add(acceptHeader); + + String query = "ASK {?x ?p ?y}"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_JSON, con.getContentType()); + } + + /** + * Does the endpoint answer to a ASK query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlAskJSONEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_JSON); + headers.add(acceptHeader); + + String query = "ASK {?x ?p ?y}"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_JSON, con.getContentType()); + } + + /** + * Does the endpoint answer to a ASK query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlAskJSONEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_JSON); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "ASK {?x ?p ?y}"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_JSON, con.getContentType()); + } + + + /** + * Does the endpoint answer to CONSTRUCT a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlConstructTurtleEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(TURTLE_TEXT); + headers.add(acceptHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TURTLE_FORMAT); + + assertEquals(200, status); + assertEquals(TURTLE_TEXT, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to a CONSTRUCT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlConstructTurtleEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(TURTLE_TEXT); + headers.add(acceptHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TURTLE_FORMAT); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(TURTLE_TEXT, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to a CONSTRUCT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlConstructTurtleEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(TURTLE_TEXT); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TURTLE_FORMAT); + + assertEquals(200, status); + assertEquals(TURTLE_TEXT, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to CONSTRUCT a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlConstructRDFXMLEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(RDF_XML); + headers.add(acceptHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, RDFXML_FORMAT); + + assertEquals(200, status); + assertEquals(RDF_XML, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to a CONSTRUCT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlConstructRDFXMLEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(RDF_XML); + headers.add(acceptHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, RDFXML_FORMAT); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(RDF_XML, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to a CONSTRUCT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlConstructRDFXMLEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(RDF_XML); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, RDFXML_FORMAT); + + assertEquals(200, status); + assertEquals(RDF_XML, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to CONSTRUCT a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlConstructTrigEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(TRIG); + headers.add(acceptHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TRIG_FORMAT); + + assertEquals(200, status); + assertEquals(TRIG, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to a CONSTRUCT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlConstructTrigEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(TRIG); + headers.add(acceptHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TRIG_FORMAT); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(TRIG, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to a CONSTRUCT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlConstructTrigEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(TRIG); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TRIG_FORMAT); + + assertEquals(200, status); + assertEquals(TRIG, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to CONSTRUCT a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlConstructJSONLDEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(JSON_LD); + headers.add(acceptHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, JSONLD_FORMAT); + + assertEquals(200, status); + assertEquals(JSON_LD, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to a CONSTRUCT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlConstructJSONLDEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(JSON_LD); + headers.add(acceptHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, JSONLD_FORMAT); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(JSON_LD, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to a CONSTRUCT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlConstructJSONLDEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(JSON_LD); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, JSONLD_FORMAT); + + assertEquals(200, status); + assertEquals(JSON_LD, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to CONSTRUCT a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlConstructNTriplesEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(N_TRIPLES); + headers.add(acceptHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TURTLE_FORMAT); + + assertEquals(200, status); + assertEquals(N_TRIPLES, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to a CONSTRUCT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlConstructNTriplesEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(N_TRIPLES); + headers.add(acceptHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TURTLE_FORMAT); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(N_TRIPLES, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to a CONSTRUCT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlConstructNTriplesEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(N_TRIPLES); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TURTLE_FORMAT); + + assertEquals(200, status); + assertEquals(N_TRIPLES, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to CONSTRUCT a query via GET? + * + * @throws Exception + */ + @Test + public void sparqlConstructNQuadsEndpointGet() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(N_QUADS); + headers.add(acceptHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, NQUADS_FORMAT); + + assertEquals(200, status); + assertEquals(N_QUADS, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to a CONSTRUCT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlConstructNQuadsEndpointUrlEncodedPost() throws Exception { + + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(N_QUADS); + headers.add(acceptHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + HttpURLConnection con = SPARQLTestUtils.postUrlencodedConnection(SPARQL_ENDPOINT_URL, headers, + SPARQLTestUtils.generateSPARQLQueryParameters(query)); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, NQUADS_FORMAT); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(N_QUADS, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Does the endpoint answer to a CONSTRUCT query via URL-encoded POST? + * + * @throws Exception + */ + @Test + public void sparqlConstructNQuadsEndpointUnencodedPost() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(N_QUADS); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-query"); + headers.add(acceptHeader); + headers.add(contentTypeHeader); + + String query = "CONSTRUCT { ?x ?p ?y } WHERE { ?x ?p ?y } LIMIT 1"; + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, NQUADS_FORMAT); + + assertEquals(200, status); + assertEquals(N_QUADS, con.getContentType()); + assertEquals(1, constructGraph.size()); + } + + /** + * Is there an HTML page available at /sparql? + * + * @throws Exception + */ + @Test + public void sparqlEndpointHtml() throws Exception { + List> headers = new LinkedList<>(); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add(TEXT_HTML); + headers.add(contentTypeHeader); + + HttpURLConnection con = SPARQLTestUtils.getConnection(SPARQL_ENDPOINT_URL, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(TEXT_HTML, con.getContentType()); + } + + @Test + public void sparqlEndpointSelectRDFXML() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_XML); + headers.add(acceptHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Mappings queryResults = SPARQLResult.create().parseString(content.toString()); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_XML, con.getContentType()); + assertTrue(queryResults.size() > 0); + } + + @Test + public void sparqlEndpointSelectJSON() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_JSON); + headers.add(acceptHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Mappings queryResults = SPARQLJSONResult.create().parseString(content.toString()); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_JSON, con.getContentType()); + assertTrue(queryResults.size() > 0); + } + + @Test + public void sparqlEndpointSelectCSV() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_CSV); + headers.add(acceptHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer resultString = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + resultString.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_CSV, con.getContentType()); + assertTrue(resultString.toString().contains("x,p,y")); + } + + @Test + public void sparqlEndpointSelectTSV() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_TSV); + headers.add(acceptHeader); + + String query = "select * where {?x ?p ?y} limit 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer resultString = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + resultString.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_TSV, con.getContentType()); + assertTrue(resultString.toString().contains("?x\t?p\t?y")); + } + + @Test + public void sparqlEndpointAskRDFXML() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_XML); + headers.add(acceptHeader); + + String query = "ASK {?x ?p ?y}"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Mappings queryResults = SPARQLResult.create().parseString(content.toString()); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_XML, con.getContentType()); + assertTrue(queryResults.size() > 0); + } + + @Test + public void sparqlEndpointAskJSON() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_JSON); + headers.add(acceptHeader); + + String query = "ASK {?x ?p ?y}"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + SPARQLJSONResult.create().parseString(content.toString()); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_JSON, con.getContentType()); + } + + @Test + public void sparqlEndpointAskCSV() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_CSV); + headers.add(acceptHeader); + + String query = "ASK {?x ?p ?y}"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer resultString = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + resultString.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_CSV, con.getContentType()); + assertEquals("true", resultString.toString()); + } + + @Test + public void sparqlEndpointAskTSV() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_TSV); + headers.add(acceptHeader); + + String query = "ASK {?x ?p ?y}"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer resultString = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + resultString.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_TSV, con.getContentType()); + assertEquals("true", resultString.toString()); + } + + @Test + public void sparqlEndpointConstructRDFXML() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(RDF_XML); + headers.add(acceptHeader); + + String query = "construct {?x ?p ?y} where {?x ?p ?y} limit 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, RDFXML_FORMAT); + + assertEquals(200, status); + assertEquals(RDF_XML, con.getContentType()); + assertTrue(constructGraph.size() > 0); + } + + @Test + public void sparqlEndpointDescribeRDFXML() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(RDF_XML); + headers.add(acceptHeader); + + String query = "describe ?x where {?x ?p ?y} limit 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph describeGraph = new Graph(); + Load load = Load.create(describeGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, RDFXML_FORMAT); + + assertEquals(200, status); + assertEquals(RDF_XML, con.getContentType()); + assertTrue(describeGraph.size() > 0); + } + + @Test + public void sparqlEndpointConstructTurtle() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(TURTLE_TEXT); + headers.add(acceptHeader); + + String query = "construct {?x ?p ?y} where {?x ?p ?y} limit 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TURTLE_FORMAT); + + assertEquals(200, status); + assertEquals(TURTLE_TEXT, con.getContentType()); + assertTrue(constructGraph.size() > 0); + } + + @Test + public void sparqlEndpointDescribeTurtle() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(TURTLE_TEXT); + headers.add(acceptHeader); + + String query = "describe ?x where {?x ?p ?y} limit 1"; + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + + con.disconnect(); + + Graph describeGraph = new Graph(); + Load load = Load.create(describeGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, TURTLE_FORMAT); + + assertEquals(200, status); + assertEquals(TURTLE_TEXT, con.getContentType()); + assertTrue(describeGraph.size() > 0); + } + + /** + * Default graph in the HTTP protocol taken into account? + * + * @throws Exception + */ + @Test + public void sparqlEndpointOneDefaultGraph() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_XML); + headers.add(acceptHeader); + + // Should only return 1 result + String query = "select DISTINCT ?x where { ?x ?p ?y } limit 10"; + List> parameters = new ArrayList<>(); + parameters.add(new ArrayList()); + parameters.get(0).add("default-graph-uri"); + parameters.get(0).add("http://example.com/nothing"); + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query, parameters); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + con.disconnect(); + + Mappings queryResults = SPARQLResult.create().parseString(content.toString()); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_XML, con.getContentType()); + assertEquals(1, queryResults.size()); + assertEquals("http://example.com/nothing", queryResults.get(0).getNode("?x").getLabel()); + } + + /** + * Default graph in the HTTP protocol taken into account? + * + * @throws Exception + */ + @Test + public void sparqlEndpointMultipleDefaultGraphs() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_XML); + headers.add(acceptHeader); + + // Should only return 1 result + String query = "select DISTINCT ?x where { ?x ?p ?y } ORDER BY ?x limit 10"; + List> parameters = new ArrayList<>(); + parameters.add(new ArrayList()); + parameters.get(0).add("default-graph-uri"); + parameters.get(0).add("http://example.com/nothing"); + parameters.add(new ArrayList()); + parameters.get(1).add("default-graph-uri"); + parameters.get(1).add("http://example.com/A"); + parameters.add(new ArrayList()); + parameters.get(2).add("default-graph-uri"); + parameters.get(2).add("http://example.com/B"); + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query, parameters); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + con.disconnect(); + + Mappings queryResults = SPARQLResult.create().parseString(content.toString()); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_XML, con.getContentType()); + assertEquals(3, queryResults.size()); + assertEquals("http://example.com/A", queryResults.get(0).getNode("?x").getLabel()); + assertEquals("http://example.com/B", queryResults.get(1).getNode("?x").getLabel()); + assertEquals("http://example.com/nothing", queryResults.get(2).getNode("?x").getLabel()); + } + + /** + * Named graphs in the HTTP protocol taken into account? + * + * @throws Exception + */ + @Test + public void sparqlEndpointOneNamedGraph() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_XML); + headers.add(acceptHeader); + + // Should only return 2 results and + // + String query = "select DISTINCT ?x where { GRAPH ?g { ?x a ?type } } limit 20"; + List> parameters = new ArrayList<>(); + parameters.add(new ArrayList()); + parameters.get(0).add("default-graph-uri"); + parameters.get(0).add("http://example.com/nothing"); + parameters.add(new ArrayList()); + parameters.get(1).add("named-graph-uri"); + parameters.get(1).add("http://example.com/A"); + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query, parameters); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + con.disconnect(); + + Mappings queryResults = SPARQLResult.create().parseString(content.toString()); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_XML, con.getContentType()); + assertEquals(1, queryResults.size()); + assertEquals("http://example.com/A", queryResults.get(0).getNode("?x").getLabel()); + } + + /** + * In a conflict between protocol named graphs and query named graphs, the + * protocol named graphs should be taken into account. + * + * @see SPARQL + * Protocol + * + * @throws Exception + */ + @Test + public void sparqlEndpointNamedGraphsAmbiguous() throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_XML); + headers.add(acceptHeader); + + // Should only return ex:A that is the only named graph in the protocol. Should + // not returns ex:B specified in the query. + String query = "select DISTINCT ?x FROM NAMED where { GRAPH ?g { ?x a ?type } } limit 20"; + List> parameters = new ArrayList<>(); + parameters.add(new ArrayList()); + parameters.get(0).add("default-graph-uri"); + parameters.get(0).add("http://example.com/nothing"); + parameters.add(new ArrayList()); + parameters.get(1).add("named-graph-uri"); + parameters.get(1).add("http://example.com/A"); + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query, parameters); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + int status = con.getResponseCode(); + con.disconnect(); + + Mappings queryResults = SPARQLResult.create().parseString(content.toString()); + + assertEquals(200, status); + assertEquals(SPARQL_RESULTS_XML, con.getContentType()); + assertEquals(1, queryResults.size()); + assertEquals("http://example.com/A", queryResults.get(0).getNode("?x").getLabel()); + } +} diff --git a/corese-server/src/test/java/fr/inria/corese/server/webservice/SPARQLEndpointUpdateTest.java b/corese-server/src/test/java/fr/inria/corese/server/webservice/SPARQLEndpointUpdateTest.java new file mode 100644 index 0000000000..a877272b25 --- /dev/null +++ b/corese-server/src/test/java/fr/inria/corese/server/webservice/SPARQLEndpointUpdateTest.java @@ -0,0 +1,145 @@ +package fr.inria.corese.server.webservice; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; +import java.net.HttpURLConnection; +import java.util.LinkedList; +import java.util.List; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * Test of the behavior of the corese server against SPARQL Updates. + * + * @author Pierre Maillot, P16 Wimmics INRIA I3S, 2024 + * @see https://www.w3.org/TR/2013/REC-sparql11-protocol-20130321/#update-operation + + */ +public class SPARQLEndpointUpdateTest { + + + private static final Logger logger = LogManager.getLogger(SPARQLEndpointUpdateTest.class); + + private static Process server; + + private static final String SERVER_URL = "http://localhost:8080/"; + private static final String SPARQL_ENDPOINT_URL = SERVER_URL + "sparql"; + + /** + * Start the server before running the tests. + * Loads a part of the DBpedia dataset in the server. + */ + @BeforeClass + public static void init() throws InterruptedException, IOException { + File turtleFile = new File("src/test/resources/data.ttl"); + String turtleFileAbsolutePath = turtleFile.getAbsolutePath(); + + File trigFile = new File("src/test/resources/data.trig"); + String trigFileAbsolutePath = trigFile.getAbsolutePath(); + + logger.info("starting in " + System.getProperty("user.dir")); + server = new ProcessBuilder().inheritIO().command( + "java", + "-jar", "./target/corese-server-4.5.1.jar", + "-lh", + "-l", turtleFileAbsolutePath, + "-l", trigFileAbsolutePath, + "-su").start(); + Thread.sleep(5000); + } + + @AfterClass + public static void shutdown() { + server.destroy(); + } + + /** + * Test the insertion of a triple in the server using a POST request with a URL-encoded body. + * @throws Exception + */ + @Test + public void postUrlencodedUpdateTest() throws Exception { + String query = "INSERT DATA { }"; + String body = SPARQLTestUtils.generateSPARQLUpdateParameters(query); + List> headers = new LinkedList<>(); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/x-www-form-urlencoded"); + headers.add(contentTypeHeader); + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, body); + int responseCode = con.getResponseCode(); + con.disconnect(); + + assertEquals(200, responseCode); + } + + /** + * Test the insertion of a triple in the server using a POST request with a SPARQL Update body. + * @throws Exception + */ + @Test + public void postUpdateTest() throws Exception { + String query = "INSERT DATA { }"; + List> headers = new LinkedList<>(); + List contentTypeHeader = new LinkedList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/sparql-update"); + headers.add(contentTypeHeader); + HttpURLConnection con = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, headers, query); + int responseCode = con.getResponseCode(); + con.disconnect(); + + // Send a query to check if the instance was inserted + String askQuery = "ASK { }"; + boolean askResult = SPARQLTestUtils.sendSPARQLAsk(askQuery); + + assertTrue(responseCode >= 200 && responseCode < 400); + assertTrue(askResult); + } + + /** + * Test the insertion of a triple in the server using a POST request with a URL-encoded body. + * @throws Exception + */ + @Test + public void usingNamedGraphUpdateTest() throws Exception { + // Insert a new instance in ex:A + String updateQuery = "PREFIX owl: INSERT { a owl:Thing } WHERE { a owl:Thing }"; + List> updateParameters = new LinkedList<>(); + List graphParameter = new LinkedList<>(); + graphParameter.add("using-graph-uri"); + graphParameter.add("http://example.com/A"); + updateParameters.add(graphParameter); + List> updateHeaders = new LinkedList<>(); + List contentTypeFormUrlEncodedHeader = new LinkedList<>(); + contentTypeFormUrlEncodedHeader.add("Content-Type"); + contentTypeFormUrlEncodedHeader.add("application/sparql-update"); + updateHeaders.add(contentTypeFormUrlEncodedHeader); + HttpURLConnection updateCon = SPARQLTestUtils.postConnection(SPARQL_ENDPOINT_URL, updateHeaders, updateQuery); + int updateResponseCode = updateCon.getResponseCode(); + updateCon.disconnect(); + + // Should be present in the dataset as it is loaded + String askQueryABaseline = "PREFIX owl: ASK { GRAPH { a owl:Thing } }"; + boolean askResultABaseline = SPARQLTestUtils.sendSPARQLAsk(askQueryABaseline); + // Should have been created by the update + String askQueryA = "PREFIX owl: ASK { GRAPH { a owl:Thing } }"; + boolean askResultA = SPARQLTestUtils.sendSPARQLAsk(askQueryA); + // Should not be present in the dataset + String askQueryB = "PREFIX owl: ASK { GRAPH { a owl:Thing } }"; + boolean askResultB = SPARQLTestUtils.sendSPARQLAsk(askQueryB); + + assertEquals(200, updateResponseCode); + assertTrue(updateResponseCode >= 200 && updateResponseCode < 400); + assertTrue(askResultABaseline); + assertTrue(askResultA); + assertFalse(askResultB); + } +} diff --git a/corese-server/src/test/java/fr/inria/corese/server/webservice/SPARQLTestUtils.java b/corese-server/src/test/java/fr/inria/corese/server/webservice/SPARQLTestUtils.java new file mode 100644 index 0000000000..1ef1e5f4e0 --- /dev/null +++ b/corese-server/src/test/java/fr/inria/corese/server/webservice/SPARQLTestUtils.java @@ -0,0 +1,237 @@ +package fr.inria.corese.server.webservice; + +import static fr.inria.corese.core.api.Loader.RDFXML_FORMAT; +import static fr.inria.corese.core.print.ResultFormat.RDF_XML; +import static fr.inria.corese.core.print.ResultFormat.SPARQL_RESULTS_CSV; +import static fr.inria.corese.core.print.ResultFormat.SPARQL_RESULTS_XML; + +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.UnsupportedEncodingException; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.ProtocolException; +import java.net.URL; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.load.Load; +import fr.inria.corese.core.load.result.SPARQLResult; +import fr.inria.corese.kgram.core.Mappings; + +public class SPARQLTestUtils { + + private static final Logger logger = LogManager.getLogger(SPARQLTestUtils.class); + + private static final String SERVER_URL = "http://localhost:8080/"; + private static final String SPARQL_ENDPOINT_URL = SERVER_URL + "sparql"; + + /** + * Get a connection to a server. + * + * @param url server URL + * @param headers HTTP headers + * @return + * @throws MalformedURLException + * @throws IOException + * @throws ProtocolException + */ + + public static HttpURLConnection methodConnection(String method, String url, List> headers) + throws IOException { + URL u = new URL(url); + HttpURLConnection con = (HttpURLConnection) u.openConnection(); + con.setRequestMethod(method); + con.setConnectTimeout(5000); + con.setReadTimeout(5000); + con.setInstanceFollowRedirects(true); + for (List header : headers) { + con.setRequestProperty(header.get(0), header.get(1)); + } + return con; + } + + public static HttpURLConnection getConnection(String url, List> headers) + throws IOException { + return methodConnection("GET", url, headers); + } + + public static HttpURLConnection postConnection(String url, List> headers, String body) + throws IOException { + HttpURLConnection con = methodConnection("POST", url, headers); + con.setDoOutput(true); + con.getOutputStream().write(body.getBytes()); + return con; + } + + public static HttpURLConnection postUrlencodedConnection(String url, List> headers, String body) + throws IOException { + List> newHeaders = new ArrayList<>(headers); + List contentTypeHeader = new ArrayList<>(); + contentTypeHeader.add("Content-Type"); + contentTypeHeader.add("application/x-www-form-urlencoded"); + newHeaders.add(contentTypeHeader); + return postConnection(url, newHeaders, body); + } + + public static HttpURLConnection putConnection(String url, List> headers, String body) + throws IOException { + HttpURLConnection con = methodConnection("PUT", url, headers); + con.setDoOutput(true); + con.getOutputStream().write(body.getBytes()); + return con; + } + + public static HttpURLConnection deleteConnection(String url, List> headers) + throws IOException { + return methodConnection("DELETE", url, headers); + } + + public static HttpURLConnection deleteConnection(String url) + throws IOException { + return deleteConnection( url, new ArrayList<>()); + } + + public static HttpURLConnection headConnection(String url, List> headers) + throws IOException { + return methodConnection("HEAD", url, headers); + } + + public static HttpURLConnection headConnection(String url) + throws IOException { + return headConnection(url, new ArrayList<>()); + } + + public static String generateSPARQLQueryParameters(String query, List> optionalParameters) { + return generateSPARQLParameters("query", query, optionalParameters); + } + + public static String generateSPARQLQueryParameters(String query) { + return generateSPARQLQueryParameters(query, new ArrayList<>()); + } + + public static String generateSPARQLUpdateParameters(String query, List> optionalParameters) { + return generateSPARQLParameters("update", query, optionalParameters); + } + + public static String generateSPARQLUpdateParameters(String query) { + return generateSPARQLUpdateParameters(query, new ArrayList<>()); + } + + public static String generateGraphStoreParameters(String query) { + return generateGraphStoreParameters(query, new ArrayList<>()); + } + + public static String generateGraphStoreParameters(String query, List> optionalParameters) { + return generateSPARQLParameters("graph", query, optionalParameters); + } + + private static String generateSPARQLParameters(String firstKeyword, String query, + List> optionalParameters) { + try { + String result = firstKeyword + "=" + URLEncoder.encode(query, StandardCharsets.UTF_8.toString()); + if (optionalParameters.size() > 0) { + for (Iterator> itParam = optionalParameters.iterator(); itParam.hasNext();) { + List p = itParam.next(); + if (p.size() == 2) { + result += "&" + p.get(0) + "=" + URLEncoder.encode(p.get(1), StandardCharsets.UTF_8.toString()); + } else if (p.size() == 1) { + result += "&" + p.get(0); + } + } + } + return result; + } catch (UnsupportedEncodingException e) { + logger.error(e); + return null; + } + } + + public static Mappings sendSPARQLSelect(String query) throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_XML); + headers.add(acceptHeader); + + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + + con.disconnect(); + + return SPARQLResult.create().parseString(content.toString()); + } + + public static boolean sendSPARQLAsk(String query) throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(SPARQL_RESULTS_CSV); + headers.add(acceptHeader); + + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer resultString = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + resultString.append(inputLine); + } + in.close(); + + con.disconnect(); + + return Boolean.parseBoolean(resultString.toString()); + } + + public static Graph sendSPARQLConstructDescribe(String query) throws Exception { + List> headers = new LinkedList<>(); + List acceptHeader = new LinkedList<>(); + acceptHeader.add("Accept"); + acceptHeader.add(RDF_XML); + headers.add(acceptHeader); + + String urlQuery = SPARQL_ENDPOINT_URL + "?" + SPARQLTestUtils.generateSPARQLQueryParameters(query); + HttpURLConnection con = SPARQLTestUtils.getConnection(urlQuery, headers); + + BufferedReader in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuffer content = new StringBuffer(); + while ((inputLine = in.readLine()) != null) { + content.append(inputLine); + } + in.close(); + con.disconnect(); + + Graph constructGraph = new Graph(); + Load load = Load.create(constructGraph); + InputStream inputStream = new ByteArrayInputStream(content.toString().getBytes()); + load.parse(inputStream, RDFXML_FORMAT); + + return constructGraph; + } + +} diff --git a/corese-server/src/test/java/fr/inria/corese/server/webservice/SudokuIT.java b/corese-server/src/test/java/fr/inria/corese/server/webservice/SudokuIT.java deleted file mode 100644 index 86e34cce89..0000000000 --- a/corese-server/src/test/java/fr/inria/corese/server/webservice/SudokuIT.java +++ /dev/null @@ -1,63 +0,0 @@ -package fr.inria.corese.server.webservice; - -import org.junit.AfterClass; -import static org.junit.Assert.assertTrue; -import org.junit.BeforeClass; -import org.junit.Test; -import org.openqa.selenium.By; -import org.openqa.selenium.WebDriver; -import org.openqa.selenium.WebElement; -import org.openqa.selenium.firefox.FirefoxDriver; - -public class SudokuIT { - - private WebDriver driver; - - @BeforeClass - public void before() { - WebDriver driver = new FirefoxDriver(); - } - - @AfterClass - public void after() { - driver.quit(); - } - /** - * Check whether the reset button empties the grid. - */ - @Test - public void resetButtonEmptyGrid() { - driver.navigate().to("http://localhost:8080/"); - WebElement miscMenu = driver.findElement(By.xpath("//a[@id='MiscEntry']")); - miscMenu.click(); - WebElement sudokuEntry = driver.findElement(By.id("SudokuEntry")); - sudokuEntry.click(); - WebElement resetButton = driver.findElement(By.id("reset")); - resetButton.click(); - assertTrue("The sudoku grid should be empty after pressing the reset button.", isGridEmpty()); - } - - @Test - public void submitButtonNotEmptyGrid() { - driver.navigate().to("http://localhost:8080/"); - WebElement miscMenu = driver.findElement(By.xpath("//a[@id='MiscEntry']")); - miscMenu.click(); - WebElement sudokuEntry = driver.findElement(By.id("SudokuEntry")); - sudokuEntry.click(); - WebElement resetButton = driver.findElement(By.id("submit")); - resetButton.click(); - assertTrue("The sudoku grid should not be empty after pressing the submit button.", !isGridEmpty()); - } - - private boolean isGridEmpty() { - for (char line = 'a'; line < 'j'; line++) { - for (int col = 1; col < 10; col++) { - WebElement tile = driver.findElement(By.id("" + line + col)); - if (!tile.getAttribute("value").isEmpty()) { - return false; - } - } - } - return true; - } -} diff --git a/corese-server/src/test/resources/data.trig b/corese-server/src/test/resources/data.trig new file mode 100644 index 0000000000..e5d9c6980d --- /dev/null +++ b/corese-server/src/test/resources/data.trig @@ -0,0 +1,19 @@ +@prefix ex: . +@prefix o: . +@prefix owl: . +@prefix p: . +@prefix r: . +@prefix rdfs: . +@prefix xsd: . + +ex:A { + ex:A a owl:Thing . +} + +ex:B { + ex:B a owl:Thing . +} + +ex:nothing { + ex:nothing a owl:Nothing . +} diff --git a/corese-server/src/test/resources/data.ttl b/corese-server/src/test/resources/data.ttl new file mode 100644 index 0000000000..e117b2863d --- /dev/null +++ b/corese-server/src/test/resources/data.ttl @@ -0,0 +1,210 @@ +@prefix p: . +@prefix o: . +@prefix w: . +@prefix r: . +@prefix xsd: . +@prefix rdfs: . + + a ; + ; + ; + "http://fr.wikipedia.org/wiki/Andorre" ; + "42.5"^^xsd:float ; + "1.51667"^^xsd:float ; + rdfs:label "Andorre"@fr . + + a ; + ; + ; + "http://fr.wikipedia.org/wiki/Kaliningrad" ; + "54.7333"^^xsd:double ; + "20.4833"^^xsd:float ; + rdfs:comment "Kaliningrad (en russe : Калининград), anciennement Kœnigsberg (en allemand : Königsberg ; en vieux-prussien : Twangste, Kunnegsgarbs, Knigsberg ; en lituanien : Karaliaučius ; en polonais : Królewiec), est une ville de Russie, située dans une exclave territoriale, l\'oblast de Kaliningrad, totalement isolée du territoire russe (jusqu\'en 1945 « Prusse-Orientale ») au bord de la mer Baltique, entre la Pologne et la Lituanie. Sa population s\'élève à 441 376 habitants en 2013."@fr ; + rdfs:label "Kaliningrad"@fr . + + a ; + ; + ; + "http://fr.wikipedia.org/wiki/Liechtenstein" ; + "47.145"^^xsd:float ; + "9.55389"^^xsd:float ; + rdfs:label "Liechtenstein"@fr . + + a ; + ; + ; + "http://fr.wikipedia.org/wiki/Llívia" ; + "42.4645"^^xsd:double ; + "1.982"^^xsd:float ; + rdfs:comment "Llívia (nom officiel en catalan), est une ville espagnole située en Cerdagne, dans la partie orientale des Pyrénées. Son territoire de 12,83 km2 présente la particularité d\'être enclavé à l’intérieur du département français des Pyrénées-Orientales, à 100 km environ à l’ouest de Perpignan. Elle fait partie de la province de Gérone et de la comarque de Basse-Cerdagne."@fr ; + rdfs:label "Llívia"@fr . + + + + a ; + ; + ; + "http://fr.wikipedia.org/wiki/Monaco" ; + "43.7326"^^xsd:float ; + "7.41822"^^xsd:float ; + rdfs:label "Monaco"@fr . + + a ; + ; + ; + "http://fr.wikipedia.org/wiki/Saint-Marin" ; + "43.9347"^^xsd:float ; + "12.4461"^^xsd:float ; + rdfs:label "Saint-Marin"@fr . + + a ; + ; + ; + "http://fr.wikipedia.org/wiki/Vatican" ; + "41.9022"^^xsd:float ; + "12.4534"^^xsd:float ; + rdfs:label "Vatican"@fr . + + + + a ; + ; + ; + p:latitude 35.888668 ; p:longitude -5.3212738 ; + "http://fr.wikipedia.org/wiki/Ceuta" ; + rdfs:comment "Ceuta (du latin Septem Fratres ; Abyla dans l\'Antiquité ou Sebta سبتة en berbère et en arabe) est une ville autonome espagnole formant une encoche sur la côte nord du Maroc en Afrique."@fr ; + rdfs:label "Ceuta"@fr . + + + a ; + ; + ; + p:latitude 36.13705 ; p:longitude -5.34782 ; + "http://fr.wikipedia.org/wiki/Gibraltar" ; + rdfs:comment "Gibraltar (de l\'arabe « Djebel Tariq » (جبل طارق), « le mont de Tariq » du nom de Tariq ibn Ziyad), est un territoire britannique d\'outre-mer, situé au sud de la péninsule Ibérique, en bordure du détroit de Gibraltar qui relie la Méditerranée à l\'océan Atlantique. Il correspond au rocher de Gibraltar et à ses environs immédiats et est séparé de l\'Espagne par une frontière de 1,2 kilomètres. Gibraltar est possession du Royaume-Uni depuis 1704."@fr ; + rdfs:label "Gibraltar"@fr . + + + a ; + ; + ; + p:latitude 35.28276 ; p:longitude -2.94582 ; + "http://fr.wikipedia.org/wiki/Melilla" ; + rdfs:comment "Melilla (en berbère: Mřič (Mritch) [« la blanche »], ou Mliliya en darija) est une ville autonome espagnole située sur la côte nord-ouest de l\'Afrique, en face de la péninsule Ibérique, appartenant à la région géographique du Rif oriental et formant une enclave dans le territoire marocain. Administrée en tant que partie de la province de Malaga avant le 14 mars 1995, elle détient depuis le statut d\'une ville autonome, assez proche de celui d\'une communauté autonome espagnole."@fr ; + rdfs:label "Melilla"@fr . + + a ; + ; + ; + p:latitude 44.382810070345585 ; p:longitude 4.990425095893443 ; + "http://fr.wikipedia.org/wiki/Enclave_des_papes" ; + rdfs:comment "L\'enclave des papes est un espace géographique correspondant au Canton de Valréas, enclavé dans la Drôme, par les cantons de Nyons et de Saint-Paul-Trois-Châteaux et séparé du Vaucluse dont il fait partie.↑ Se note aussi « Enclave des Papes » bien que, s\'il s\'agit d\'un nom propre, il peut prendre une majuscule, mais un nom propre ne s\'accorde pas en nombre, c\'est-à-dire ne doit pas être mis au pluriel."@fr ; + rdfs:label "Enclave des papes"@fr . + + + a ; + ; + ; + "http://fr.wikipedia.org/wiki/Büsingen_am_Hochrhein" ; + "47.6969"^^xsd:double ; + "8.69028"^^xsd:float ; + rdfs:comment "Büsingen (Büsingen am Hochrhein en allemand, c’est-à-dire : « Büsingen sur le Rhin supérieur ») est une commune allemande du Bade-Wurtemberg dépendant administrativement de l\'arrondissement de Constance et enclavée dans le territoire de la Suisse, dont la plupart des 1 449 habitants[réf."@fr ; + rdfs:label "Büsingen am Hochrhein"@fr . + + a ; + ; + ; + "http://fr.wikipedia.org/wiki/Jungholz" ; + "47.5736"^^xsd:double ; + "10.4472"^^xsd:float ; + rdfs:comment "Jungholz est une commune du Tyrol autrichien, dans l\'ouest du pays. Elle a la particularité d\'être presque entièrement enclavée en territoire allemand et de n\'être reliée au reste de l\'Autriche que par un point."@fr ; + rdfs:label "Jungholz"@fr . + + + r:Baarle-Nassau rdfs:label "Baerle-Nassau"@fr . + +r:Baerle-Duc a o:Place ; + o:dbpedia r:Baerle-Duc ; + o:thumbnail ; + o:wikiPageWikiLink r:Baarle-Nassau ; + o:wikiPageWikiLink r:Hoogstraten ; + o:wikiPageWikiLink r:Merksplas ; + o:wikiPageWikiLink r:Turnhout ; + o:wikipedia "http://fr.wikipedia.org/wiki/Baerle-Duc" ; + p:est r:Baarle-Nassau ; + p:latitude "51.4333"^^xsd:double ; + p:longitude "4.91667"^^xsd:double ; + p:nord r:Baarle-Nassau ; + p:ouest r:Baarle-Nassau ; + p:ouest r:Hoogstraten ; + p:sud r:Merksplas ; + p:sudEst r:Turnhout ; + rdfs:comment "Baerle-Duc (en néerlandais : Baarle-Hertog) est une commune néerlandophone de Belgique située en Région flamande dans la province d’Anvers et située partiellement dans la province néerlandaise du Brabant-Septentrional. Elle se caractérise par le fait unique qu’une partie de son territoire est enclavé en territoire néerlandais, enchevêtré en parcelles discontinues dans la commune néerlandaise de Baarle-Nassau. Baerle-Duc et la commune néerlandaise de Baarle-Nassau forment le village de Baarle."@fr ; + rdfs:label "Baerle-Duc"@fr . + +r:Hoogstraten rdfs:label "Hoogstraten"@fr . + + + +r:Merksplas rdfs:label "Merksplas"@fr . + +r:Turnhout rdfs:label "Turnhout"@fr . + + + + +r:Sastavci a o:Place ; + o:dbpedia r:Sastavci ; + o:thumbnail ; + o:wikipedia "http://fr.wikipedia.org/wiki/Sastavci" ; + p:latitude "43.5582"^^xsd:double ; + p:longitude "19.4249"^^xsd:float ; + rdfs:comment "Sastavci (en serbe : Саставци) est un village de Bosnie-Herzégovine. Il est complètement entouré par la Serbie, ce qui en fait une enclave."@fr ; + rdfs:label "Sastavci"@fr . + + + r:Brezovica_Žumberačka a o:Place ; + o:dbpedia r:Brezovica_Žumberačka ; + o:wikipedia "http://fr.wikipedia.org/wiki/Brezovica_Žumberačka" ; + p:latitude "45.41"^^xsd:double ; + p:longitude "15.18"^^xsd:float ; + rdfs:comment "Brezovica Žumberačka est un village situé dans la municipalité d\'Ozalj et le comitat de Karlovac, en Croatie. Deux parties du village y sont enclavées en Slovénie."@fr ; + rdfs:label "Brezovica Žumberačka"@fr . + + r:Akrotiri_et_Dhekelia a o:Place ; + o:dbpedia r:Akrotiri_et_Dhekelia ; + o:thumbnail ; + o:wikipedia "http://fr.wikipedia.org/wiki/Akrotiri_et_Dhekelia" ; + p:latitude "34.5833"^^xsd:float ; + p:longitude "32.9833"^^xsd:float ; + rdfs:comment "Akrotiri (Ακρωτήρι en grec ; appelé également Episkopi Garrison) et Dhekelia (Δεκέλεια en grec ; comprenant également le village de Ayios Nikolaos) représentent le dernier exemple de bases militaires souveraines (Sovereign Base Areas) britanniques à Chypre."@fr ; + rdfs:label "Akrotiri et Dhekelia"@fr . + + + a o:Place ; + o:dbpedia ; + o:thumbnail ; + o:wikipedia "http://fr.wikipedia.org/wiki/Campione_d\'Italia" ; + p:latitude "45.95"^^xsd:double ; + p:longitude "8.96667"^^xsd:float ; + rdfs:comment "Campione d\'Italia, appelée à l\'origine Campione, est une commune appartenant à la province de Côme (Lombardie, Italie). Située au bord du lac de Lugano dans le massif montagneux des Alpes, elle présente la particularité d\'être une enclave italienne en territoire suisse.↑ (it) Popolazione residente e bilancio demografico sur le site de l\'ISTAT."@fr ; + rdfs:label "Campione d\'Italia"@fr . + + a o:Place ; + o:dbpedia ; + o:thumbnail ; + o:wikipedia "http://fr.wikipedia.org/wiki/San\'kovo-Medvezh\'e" ; + p:latitude "52.4833"^^xsd:float ; + p:longitude "31.55"^^xsd:float ; + rdfs:comment "Sankovo-Medvezhye (russe : посёлок Саньково и посёлок Медвежье, biélorusse : Санькова-Мядзьвежжа) est une exclave russe dans le territoire Biélorusse d\'une superficie de 4,5 km². Elle est située dans l\'est du raïon de Dobrouch à 5 km du village russe de Dobrodeyevka et seulement à 800 m de la frontière russe.D\'un point de vue administratif, Sankovo-Medvezhye est une partie du district de Zlynkovsky dans l\'oblast de Briansk."@fr ; + rdfs:label "San\'kovo-Medvezh\'e"@fr . + + + r:Vennbahn a o:Place ; + o:dbpedia r:Vennbahn ; + o:thumbnail ; + o:wikipedia "http://fr.wikipedia.org/wiki/Vennbahn" ; + p:latitude "50.424"^^xsd:float ; + p:longitude "6.221"^^xsd:float ; + rdfs:comment "La Vennbahn (littéralement ligne de chemin de fer des Fagnes) est une ligne de chemin de fer belge (Lignes 48 et 49) dont le parcours pénètre par endroits sur le territoire allemand. Toutefois, en vertu du traité de Versailles, la voie ferrée ainsi que ses stations et installations se trouvent entièrement sous la souveraineté de la Belgique et font donc partie de son territoire, créant ainsi plusieurs enclaves de l\'Allemagne du côté ouest de la ligne de chemin de fer,."@fr ; + rdfs:label "Vennbahn"@fr . \ No newline at end of file diff --git a/corese-unit-test/pom.xml b/corese-unit-test/pom.xml index 0acb8d9939..e848ce4f12 100644 --- a/corese-unit-test/pom.xml +++ b/corese-unit-test/pom.xml @@ -1,5 +1,7 @@ - + 4.0.0 @@ -67,6 +69,19 @@ 4.8.0 + + org.apache.logging.log4j + log4j-slf4j18-impl + + + org.apache.logging.log4j + log4j-api + + + org.apache.logging.log4j + log4j-core + + org.apache.jena jena-arq @@ -79,12 +94,14 @@ - + maven-clean-plugin 3.1.0 - + maven-resources-plugin 3.0.2 @@ -113,7 +130,8 @@ maven-deploy-plugin 2.8.2 - + maven-site-plugin 3.7.1 diff --git a/corese-unit-test/src/main/java/fr/inria/corese/engine/QGVisitor.java b/corese-unit-test/src/main/java/fr/inria/corese/engine/QGVisitor.java deleted file mode 100644 index 87ed7ca96c..0000000000 --- a/corese-unit-test/src/main/java/fr/inria/corese/engine/QGVisitor.java +++ /dev/null @@ -1,88 +0,0 @@ -package fr.inria.corese.engine; - -import java.util.HashMap; - -import fr.inria.corese.core.Graph; -import fr.inria.corese.core.api.QueryGraphVisitor; -import fr.inria.corese.core.edge.EdgeImpl; -import fr.inria.corese.sparql.triple.parser.ASTQuery; -import fr.inria.corese.compiler.parser.NodeImpl; -import fr.inria.corese.kgram.api.core.Edge; -import fr.inria.corese.kgram.api.core.Node; -import fr.inria.corese.kgram.core.Query; - -/** - * Example of Query Graph Visitor that replace blank nodes by variables - * and select * - * - */ -public class QGVisitor implements QueryGraphVisitor { - - static final String VAR = "?_kg_var_"; - int count = 0; - - Table table; - - class Table extends HashMap { - - } - - QGVisitor(){ - table = new Table(); - } - - - - - public ASTQuery visit(ASTQuery ast) { - ast.setSelectAll(true); - return ast; - } - - - public Edge visit(Edge ent) { - - if (! (ent.getEdge() instanceof EdgeImpl)){ - return ent; - } - - EdgeImpl edge = (EdgeImpl) ent.getEdge(); - - for (int i = 0; iInstructions for + * submitting implementation reports + * + */ +public class EarlRepportGenerator { + + private static final Logger logger = org.apache.logging.log4j.LogManager.getLogger(EarlRepportGenerator.class); + + private final Graph graph; + // eg "2023-01-25T10:18:04-08:00" + private final DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssXXX"); + + private final String authorUri = "https://team.inria.fr/wimmics"; + private final String authorName = "Wimmics Team"; + + private final String softwareUri = "https://github.com/Wimmics/corese"; + private final String softwareName = "Corese"; + private final String softwareDescription = "Software platform implementing and extending the standards of the Semantic Web."; + private final String softwareLicense = "http://www.cecill.info/licences/Licence_CeCILL-C_V1-en.html"; + private final String softwareHomepage = "https://project.inria.fr/corese/"; + private final String softwareMailingList = "mailto:corese-users@inria.fr"; + private final String softwareDownload = "https://project.inria.fr/corese/download/"; + private final String softwareBugDatabase = "https://github.com/Wimmics/corese/issues"; + private final String softwareBlog = "https://github.com/Wimmics/corese/discussions/"; + private final String softwareProgrammingLanguage = "Java"; + + private final String releaseURI = "fc1825918302fec47852dc1f73ad1175c84fd7d1"; + private final String releaseDate = "2024-04-11"; + + private final Path reportDir = Path.of("corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf"); + private final Path inputReportPath = reportDir.resolve("testReport.csv"); + private final Path outputReportPath = reportDir.resolve("earlReport.ttl"); + + /** + * Constructor for the EarlRepportGenerator class. + */ + public EarlRepportGenerator() { + this.graph = Graph.create(); + } + + /** + * Generates the EARL report and writes it to the specified output directory. + * + * @param outputDir the output directory where to write the EARL report + */ + public void generate() { + + // Insert the document description in the graph + execSPARQL(insertQueryDescribeDocument()); + + // Insert the developer description in the graph + execSPARQL(insertQueryDescribeDeveloper()); + + // Insert the software description in the graph + execSPARQL(insertQueryDescribeSoftware()); + + // Insert the release description in the graph + execSPARQL(insertQueryDescribeRelease()); + + // Generate the EARL report in turtle format + TripleFormat format = TripleFormat.create(graph, this.getNSM()); + format.addPrefix = false; + + // Add the test results to the EARL report + try { + // read line by line the test report file + // for each line, add the test result to the EARL report + + for (String line : Files.readAllLines(inputReportPath)) { + String[] values = line.split(","); + String testUri = values[0]; + String testTime = values[1]; + String testResult = values[2]; + + execSPARQL(insertQueryDescribeTestResult(testUri, testTime, testResult)); + } + + } catch (IOException e) { + logger.error("Error while reading test report file: " + inputReportPath.toString(), e); + e.printStackTrace(); + } + + // Write the EARL report to the output directory + try { + format.write(outputReportPath.toString()); + } catch (IOException e) { + logger.error("Error while writing EARL report to file: " + outputReportPath.toString(), e); + e.printStackTrace(); + } + + } + + /** + * Returns a NSManager with the prefixes used in the EARL report. + * + * @return a NSManager with the prefixes used in the EARL report + */ + private NSManager getNSM() { + NSManager nsm = NSManager.create(); + nsm.setRecord(true); + nsm.definePrefix("earl", "http://www.w3.org/ns/earl#"); + nsm.definePrefix("dc", "http://purl.org/dc/terms/"); + nsm.definePrefix("foaf", "http://xmlns.com/foaf/0.1/"); + nsm.definePrefix("xsd", "http://www.w3.org/2001/XMLSchema#"); + nsm.definePrefix("doap", "http://usefulinc.com/ns/doap#"); + return nsm; + } + + /** + * Executes a SPARQL query on the graph. + * + * @param query the SPARQL query to execute + */ + private void execSPARQL(String query) { + QueryProcess exec = QueryProcess.create(graph); + try { + exec.query(query); + } catch (EngineException e) { + logger.error("Error while executing SPARQL query: " + query, e); + e.printStackTrace(); + } + } + + /** + * Builds a SPARQL query to insert the document description in the graph. + * + * @return a SPARQL query to insert the document description in the graph + */ + private String insertQueryDescribeDocument() { + + // Calculate the current date and time + String now = this.dtf.format(ZonedDateTime.now()); + + // Build the SPARQL query + StringBuilder sb = new StringBuilder(); + sb.append("PREFIX earl: \n"); + sb.append("PREFIX dc: \n"); + sb.append("PREFIX foaf: \n"); + sb.append("PREFIX xsd: \n"); + sb.append("INSERT DATA {\n"); + sb.append(" <> foaf:primaryTopic <").append(softwareUri).append("> ;\n"); + sb.append(" dc:issued \"").append(now).append("\"^^xsd:dateTime ;\n"); + sb.append(" foaf:maker <").append(authorUri).append("> .\n"); + sb.append("}\n"); + return sb.toString(); + } + + /** + * Builds a SPARQL query to insert the developer description in the graph. + * + * @return a SPARQL query to insert the developer description in the graph + */ + private String insertQueryDescribeDeveloper() { + StringBuilder sb = new StringBuilder(); + sb.append("PREFIX earl: \n"); + sb.append("PREFIX foaf: \n"); + sb.append("INSERT DATA {\n"); + sb.append(" <").append(authorUri).append("> a foaf:Person , earl:Assertor ;\n"); + sb.append(" foaf:name \"").append(authorName).append("\" ;\n"); + + sb.append("}\n"); + return sb.toString(); + } + + /** + * Builds a SPARQL query to insert the software description in the graph. + * + * @return a SPARQL query to insert the software description in the graph + */ + private String insertQueryDescribeSoftware() { + StringBuilder sb = new StringBuilder(); + sb.append("PREFIX earl: \n"); + sb.append("PREFIX doap: \n"); + sb.append("INSERT DATA {\n"); + sb.append(" <").append(softwareUri).append("> a doap:Project, earl:Software, earl:TestSubject ;\n"); + sb.append(" doap:name \"").append(softwareName).append("\" ;\n"); + sb.append(" doap:release <").append(softwareUri).append("/commit/").append(releaseURI).append("> ;\n"); + sb.append(" doap:developer <").append(authorUri).append("> ;\n"); + sb.append(" doap:homepage <").append(softwareHomepage).append("> ;\n"); + sb.append(" doap:description \"").append(softwareDescription).append("\"@en ;\n"); + sb.append(" doap:license <").append(softwareLicense).append("> ;\n"); + sb.append(" doap:download-page <").append(softwareDownload).append("> ;\n"); + sb.append(" doap:bug-database <").append(softwareBugDatabase).append("> ;\n"); + sb.append(" doap:mailing-list <").append(softwareMailingList).append("> ;\n"); + sb.append(" doap:blog <").append(softwareBlog).append("> ;\n"); + sb.append(" doap:programming-language \"").append(softwareProgrammingLanguage).append("\" .\n"); + + sb.append("}\n"); + return sb.toString(); + } + + /** + * Builds a SPARQL query to insert the release description in the graph. + * + * @return a SPARQL query to insert the release description in the graph + */ + private String insertQueryDescribeRelease() { + StringBuilder sb = new StringBuilder(); + sb.append("PREFIX doap: \n"); + sb.append("INSERT DATA {\n"); + sb.append(" <").append(softwareUri).append("/commit/").append(releaseURI).append("> doap:name \"") + .append(softwareName).append(" #").append(releaseURI.substring(0, 7)).append("\" ;\n"); + sb.append(" doap:revision \"#").append(releaseURI.substring(0, 7)).append("\" ;\n"); + sb.append(" doap:created \"").append(releaseDate).append("\"^^xsd:date ;\n"); + sb.append("}\n"); + return sb.toString(); + } + + /** + * Builds a SPARQL query to insert the test result in the graph. + * + * @param testUri the URI of the test + * @param testTime the time when the test was executed + * @param testResult the result of the test + * @return a SPARQL query to insert the test result in the graph + */ + private String insertQueryDescribeTestResult(String testUri, String testTime, String testResult) { + StringBuilder sb = new StringBuilder(); + sb.append("PREFIX earl: \n"); + sb.append("PREFIX dc: \n"); + sb.append("PREFIX xsd: \n"); + sb.append("INSERT DATA {\n"); + sb.append(" [ a earl:Assertion ;\n"); + sb.append(" earl:assertedBy <").append(authorUri).append("> ;\n"); + sb.append(" earl:subject <").append(softwareUri).append("> ;\n"); + sb.append(" earl:test <").append(testUri).append("> ;\n"); + sb.append(" earl:result [ a earl:TestResult ;\n"); + sb.append(" earl:outcome ").append("<").append(testResult).append(">").append(" ;\n"); + sb.append(" dc:date \"").append(testTime).append("\"^^xsd:dateTime\n"); + sb.append(" ] ;\n"); + sb.append(" earl:mode earl:automatic\n"); + sb.append(" ] .\n"); + sb.append("}\n"); + return sb.toString(); + } + + public static void main(String[] args) { + EarlRepportGenerator earlRepportGenerator = new EarlRepportGenerator(); + earlRepportGenerator.generate(); + } + +} \ No newline at end of file diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/JUnitTestFileGenerator.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/JUnitTestFileGenerator.java new file mode 100644 index 0000000000..7d7bb3f621 --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/JUnitTestFileGenerator.java @@ -0,0 +1,272 @@ +package fr.inria.corese.w3cJunitTestsGenerator; + +import java.io.IOException; +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.LocalDate; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.IW3cTest; + +/** + * Generates a JUnit test file for the W3C test suite. + */ +public class JUnitTestFileGenerator { + + private static final Logger logger = LogManager.getLogger(JUnitTestFileGenerator.class); + + private final URI manifestUri; + private final String testName; + private final List tests; + private final Path exportPath; + + public JUnitTestFileGenerator(String testName, URI manifestUri, Path exportPath, List tests) { + this.testName = testName; + this.manifestUri = manifestUri; + this.exportPath = exportPath.resolve(testName); + this.tests = tests; + } + + /** + * Generates a JUnit test file for the W3C test suite. + */ + public void generate() { + + // Initialize directories + Path testDirectory = this.createDirectory(this.exportPath); + + // Generate file test + String fileName = testName + "Test.java"; + Path testFile = this.generateTestFile(testDirectory, fileName); + + // Write test file + try { + Files.write(testFile, this.generateTestFileContent(testFile.toString(), fileName).getBytes()); + logger.info("Wrote test file: " + testFile); + } catch (IOException e) { + logger.error("Failed to write test file: " + testFile, e); + } + + } + + /** + * Creates a directory at the specified path if it does not already exist. + * + * @param directoryPath The path to the directory to create. + * @param directoryType The type of directory to create. + */ + private Path createDirectory(Path directoryPath) { + if (!Files.exists(directoryPath)) { + try { + Files.createDirectories(directoryPath); + logger.info("Created directory: " + directoryPath); + } catch (IOException e) { + logger.error("Failed to create directory: " + directoryPath, e); + } + } + return directoryPath; + } + + /** + * Generates a test file at the specified path if it does not already exist. + * + * @param testDirectory The directory where the test file should be generated. + * @param fileName The name of the test file. + * @return The path to the test file. + */ + private Path generateTestFile(Path testDirectory, String fileName) { + Path filePath = testDirectory.resolve(fileName); + + if (Files.exists(filePath)) { + return filePath; + } + + try { + Files.createFile(filePath); + logger.info("Created test file: " + filePath); + } catch (IOException e) { + logger.error("Failed to create test file: " + filePath, e); + } + return filePath; + } + + /** + * Generates the content of the test file. + * + * @param path The path of the file. + * @param fileName The name of the file. + * @return The content of the test file. + */ + private String generateTestFileContent(String path, String fileName) { + StringBuilder content = new StringBuilder(); + + // Package + content.append(this.getPackage(path, fileName)); + content.append("\n"); + content.append("\n"); + + // Imports + Set imports = new HashSet<>(); + for (IW3cTest test : tests) { + imports.addAll(test.getImports()); + } + imports.addAll(this.defineImports()); + imports.stream().sorted().forEach(imp -> content.append("import ").append(imp).append(";\n")); + content.append("\n"); + + // Class comment + DateTimeFormatter timeFormatter = DateTimeFormatter.ofPattern("HH:mm:ss"); + ZonedDateTime nowWithZone = ZonedDateTime.now(); + + content.append("/**\n"); + content.append(" * Auto-generated JUnit test file for the W3C test suite: "); + content.append(manifestUri); + content.append("\n"); + content.append(" * This file was automatically generated by JUnitTestFileGenerator.java.\n"); + content.append(" * Generation date: "); + content.append(LocalDate.now()); + content.append(", Time: "); + content.append(nowWithZone.format(timeFormatter)); + content.append(" "); + content.append(nowWithZone.getZone()); + content.append("\n"); + content.append(" */\n"); + + // Class declaration + content.append("public class "); + content.append(fileName.substring(0, fileName.indexOf("."))); + content.append(" {"); + content.append("\n"); + content.append("\n"); + + // Watcher + content.append(this.generateWatcher()); + + // Test methods + for (IW3cTest test : tests) { + content.append(test.generate()); + content.append("\n"); + } + + // End of class + content.append("}"); + + return content.toString(); + } + + /** + * Returns the package declaration based on the given path and file name. + * + * @param path The path of the file. + * @param fileName The name of the file. + * @return The package declaration. + */ + private String getPackage(String path, String fileName) { + String packagePath = path.substring(path.indexOf("java") + 5) + .replace("/", ".") + .replace("." + fileName, ""); + return "package " + packagePath + ";"; + } + + /** + * Generates the watcher for the test file. + * + * @return The watcher for the test file. + */ + private String generateWatcher() { + StringBuilder watcher = new StringBuilder(); + + // Create a file testReport.csv in the directory of the test file + Path relativePathToResultCsv = Paths.get(System.getProperty("user.dir")).relativize(exportPath) + .resolve("testReport.csv"); + // Remove the first directory from the path + relativePathToResultCsv = relativePathToResultCsv.subpath(1, relativePathToResultCsv.getNameCount()); + watcher.append(" private static final String TEST_REPORT_FILE = " + + "Paths.get(System.getProperty(\"user.dir\")).resolve(\"" + relativePathToResultCsv.toString() + + "\").toString();\n"); + watcher.append(" private static final String MANIFEST_URI = \"" + + manifestUri.toString().substring(0, manifestUri.toString().lastIndexOf(".")) + "\";\n"); + watcher.append(" private static final String EARL = \"http://www.w3.org/ns/earl#\";\n"); + watcher.append("\n"); + + // Function to write the test report to the file testReport.csv + // Format: manifestUri#testName, datetime, http://www.w3.org/ns/earl#status + watcher.append(" /**\n"); + watcher.append(" * Writes the test report to the file testReport.csv.\n"); + watcher.append(" *\n"); + watcher.append(" * @param testName The name of the test.\n"); + watcher.append(" * @param success The status of the test.\n"); + watcher.append(" */\n"); + watcher.append(" private void writeTestReport(String testName, String success) {\n"); + watcher.append(" try {\n"); + watcher.append(" Path testReportPath = Paths.get(TEST_REPORT_FILE);\n"); + watcher.append( + " DateTimeFormatter dtf = DateTimeFormatter.ofPattern(\"yyyy-MM-dd'T'HH:mm:ssXXX\");\n"); + watcher.append( + " Files.write(testReportPath, (MANIFEST_URI + \"#\" + testName + \",\" + dtf.format(ZonedDateTime.now()) + \",\" + EARL + success + \"\\n\").getBytes(), StandardOpenOption.APPEND);\n"); + watcher.append(" } catch (IOException e) {\n"); + watcher.append(" e.printStackTrace();\n"); + watcher.append(" }\n"); + watcher.append(" }\n"); + watcher.append("\n"); + + watcher.append(" @Rule\n"); + watcher.append(" public TestWatcher watcher = new TestWatcher() {\n"); + watcher.append("\n"); + watcher.append(" @Override\n"); + watcher.append(" protected void failed(Throwable e, Description description) {\n"); + watcher.append(" writeTestReport(description.getMethodName(), \"failed\");\n"); + watcher.append(" }\n"); + watcher.append("\n"); + watcher.append(" @Override\n"); + watcher.append(" protected void succeeded(Description description) {\n"); + watcher.append(" writeTestReport(description.getMethodName(), \"passed\");\n"); + watcher.append(" }\n"); + watcher.append("\n"); + watcher.append(" @Override\n"); + watcher.append(" protected void skipped(AssumptionViolatedException e, Description description) {\n"); + watcher.append(" writeTestReport(description.getMethodName(), \"untested\");\n"); + watcher.append(" }\n"); + watcher.append(" };\n"); + watcher.append("\n"); + watcher.append(" // Create and clear the test report file\n"); + watcher.append(" @BeforeClass\n"); + watcher.append(" public static void createTestReportFile() {\n"); + watcher.append(" try {\n"); + watcher.append(" Path testReportPath = Paths.get(TEST_REPORT_FILE);\n"); + watcher.append(" Files.write(testReportPath, \"\".getBytes());\n"); + watcher.append(" } catch (IOException e) {\n"); + watcher.append(" e.printStackTrace();\n"); + watcher.append(" }\n"); + watcher.append(" }\n"); + watcher.append("\n"); + + return watcher.toString(); + } + + private Set defineImports() { + Set imports = new HashSet<>(); + imports.add("java.nio.file.Path"); + imports.add("java.nio.file.Paths"); + imports.add("java.nio.file.Files"); + imports.add("java.nio.file.StandardOpenOption"); + imports.add("org.junit.Rule"); + imports.add("org.junit.rules.TestWatcher"); + imports.add("org.junit.runner.Description"); + imports.add("org.junit.AssumptionViolatedException"); + imports.add("org.junit.BeforeClass"); + imports.add("java.time.format.DateTimeFormatter"); + imports.add("java.time.ZonedDateTime"); + return imports; + } + +} diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/Main.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/Main.java new file mode 100644 index 0000000000..c2baba4ab6 --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/Main.java @@ -0,0 +1,43 @@ +package fr.inria.corese.w3cJunitTestsGenerator; + +import java.net.URI; +import java.nio.file.Path; +import java.nio.file.Paths; + +/** + * The Main class serves as the entry point for the application. + * It is responsible for initializing and executing the W3cTestsGenerator based + * on predefined paths. + */ +public class Main { + + // Define base directory using system's current directory + private static final Path BASE_PATH = Paths.get(System.getProperty("user.dir")); + + // Specify paths for tests, resources, and the manifest within the project + // structure + private static final Path TESTS_PATH_DIR = BASE_PATH + .resolve("corese-unit-test/src/test/java/fr/inria/corese/w3c"); + + /** + * Main method to execute the application. + * It creates and runs a W3cTestsGenerator with specified directories and + * manifest file. + * + * @param args Command line arguments (not used) + */ + public static void main(String[] args) { + generateW3cTests("canonicalRdf", "https://w3c.github.io/rdf-canon/tests/manifest.ttl"); + } + + /** + * Initializes and runs the W3cTestsGenerator for generating W3C tests. + * + * @param testName The name of the test suite to generate tests for. + * @param manifestPath The path to the manifest file. + */ + private static void generateW3cTests(String testName, String manifestUri) { + W3cTestsGenerator generator = new W3cTestsGenerator(testName, URI.create(manifestUri), TESTS_PATH_DIR); + generator.generate(); + } +} diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/W3cTestsGenerator.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/W3cTestsGenerator.java new file mode 100644 index 0000000000..11260ab058 --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/W3cTestsGenerator.java @@ -0,0 +1,144 @@ +package fr.inria.corese.w3cJunitTestsGenerator; + +import java.net.URI; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.load.Load; +import fr.inria.corese.core.query.QueryProcess; +import fr.inria.corese.kgram.core.Mapping; +import fr.inria.corese.kgram.core.Mappings; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.IW3cTest; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.factory.W3cTestFactory; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.factory.W3cTestFactory.TestCreationException; + +/** + * Generates JUnit test cases from W3C test manifest files. + */ +public class W3cTestsGenerator { + + private static final Logger logger = LogManager.getLogger(W3cTestsGenerator.class); + + private final URI manifestUri; + private final Path testsPath; + private final String testName; + + /** + * Constructs a new W3cTestsGenerator with the specified test name, manifest + * file path and test directory path. + * + * @param testName The name of the test. + * @param manifestUri The URI of the manifest file. + * @param testsPath The path to tests directory. + */ + public W3cTestsGenerator(String testName, URI manifestUri, Path testsPath) { + this.testName = testName; + this.manifestUri = manifestUri; + this.testsPath = testsPath; + } + + /** + * Generates JUnit test cases from the W3C test manifest file. + */ + public void generate() { + // Load manifest file + Graph graph = loadManifest(); + + // Generate list of test cases + List testCases = getListOfTestCases(graph); + + // Generate JUnit test file + JUnitTestFileGenerator generator = new JUnitTestFileGenerator(testName, manifestUri, testsPath, testCases); + generator.generate(); + } + + //////////////////////// + // Load manifest file // + //////////////////////// + + /** + * Loads the W3C test manifest file into a graph. + * + * @return The graph containing the manifest file. + */ + private Graph loadManifest() { + logger.info("Loading manifest file: " + manifestUri); + Graph graph = Graph.create(); + graph.init(); + Load loader = Load.create(graph); + + try { + loader.parse(manifestUri.toString()); + } catch (Exception e) { + logger.error("Error loading manifest file: " + manifestUri, e); + System.exit(1); + } + + return graph; + } + + //////////////////////////// + // Get list of test cases // + //////////////////////////// + + /** + * Gets the list of test cases from the specified graph. + * + * @param graph The graph containing the test cases. + * @return The list of test cases. + */ + private List getListOfTestCases(Graph graph) { + QueryProcess exec = QueryProcess.create(graph); + String query = buildTestCasesQuery(); + Mappings mappings; + + try { + mappings = exec.query(query); + } catch (Exception e) { + logger.error("Error executing query.", e); + return new ArrayList<>(); + } + + if (mappings == null) { + logger.warn("Query returned null mappings."); + return new ArrayList<>(); + } + + List testCases = new ArrayList<>(); + for (Mapping mapping : mappings) { + String test = mapping.getValue("?test").getLabel(); + String type = mapping.getValue("?type").getLabel(); + try { + testCases.add(W3cTestFactory.createW3cTest(test, type, exec)); + } catch (TestCreationException e) { + logger.error("Error creating test: " + test, e); + System.exit(1); + } + } + + logger.info("Loaded " + testCases.size() + " test cases."); + return testCases; + } + + /** + * Builds a query to retrieve the test cases from the manifest file. + * + * @return The query to retrieve the test cases. + */ + private String buildTestCasesQuery() { + return "PREFIX mf: \n" + + "PREFIX rdf: \n" + + "PREFIX rdfs: \n" + + "\n" + + "SELECT ?type ?test WHERE {\n" + + " ?manifest a mf:Manifest .\n" + + " ?manifest mf:entries/rdf:rest*/rdf:first ?test .\n" + + " ?test rdf:type ?type .\n" + + "} ORDER BY ?test"; + } +} diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/IW3cTest.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/IW3cTest.java new file mode 100644 index 0000000000..4ca5dc8b7c --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/IW3cTest.java @@ -0,0 +1,23 @@ +package fr.inria.corese.w3cJunitTestsGenerator.w3cTests; + +import java.util.Set; + +/** + * Interface for W3C tests. + */ +public interface IW3cTest { + + /** + * Returns the set of imports required for the W3C test. + * + * @return the set of imports + */ + public Set getImports(); + + /** + * Generates the junit test for the W3C test. + * + * @return the junit test in string format + */ + public String generate(); +} diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/factory/W3cTestFactory.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/factory/W3cTestFactory.java new file mode 100644 index 0000000000..222af0443e --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/factory/W3cTestFactory.java @@ -0,0 +1,148 @@ +package fr.inria.corese.w3cJunitTestsGenerator.w3cTests.factory; + +import java.net.URI; +import java.util.Map; +import java.util.Optional; + +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; +import fr.inria.corese.core.query.QueryProcess; +import fr.inria.corese.kgram.core.Mappings; +import fr.inria.corese.sparql.exceptions.EngineException; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.IW3cTest; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.implementations.RDFC10EvalTest; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.implementations.RDFC10MapTest; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.implementations.RDFC10NegativeEvalTest; + +/** + * Factory for creating W3C tests. + */ +public class W3cTestFactory { + + /** + * Map of test type URIs to test types. + */ + private static final Map typeMap = Map.of( + "https://w3c.github.io/rdf-canon/tests/vocab#RDFC10EvalTest", TestType.RDFC10EvalTest, + "https://w3c.github.io/rdf-canon/tests/vocab#RDFC10MapTest", TestType.RDFC10MapTest, + "https://w3c.github.io/rdf-canon/tests/vocab#RDFC10NegativeEvalTest", TestType.RDFC10NegativeEvalTest); + + /** + * Enumeration of test types. + */ + public enum TestType { + RDFC10EvalTest, + RDFC10MapTest, + RDFC10NegativeEvalTest + } + + /** + * Creates a W3C test from the specified test name, type URI, and query process. + * + * @param test The name of the test. + * @param typeUri The URI of the test type. + * @param queryProcess The query process. + * @return The W3C test. + * @throws TestCreationException If an error occurs while creating the test. + */ + public static IW3cTest createW3cTest(String test, String typeUri, QueryProcess queryProcess) + throws TestCreationException { + String query = buildTestDetailQuery(test); + Mappings mappings = executeQuery(queryProcess, query) + .orElseThrow(() -> new TestCreationException("Failed to retrieve test details for: " + test)); + + TestType type = typeMap.get(typeUri); + if (type == null) { + throw new TestCreationException("Unsupported test type URI: " + typeUri); + } + + String name = mappings.getValue("?name").getLabel(); + String comment = mappings.getValue("?comment") != null ? mappings.getValue("?comment").getLabel() : ""; + + HashAlgorithm hashAlgorithm = null; + + if (mappings.getValue("?hashAlgorithm") != null) { + switch (mappings.getValue("?hashAlgorithm").getLabel()) { + case "SHA256": + hashAlgorithm = HashAlgorithm.SHA_256; + break; + case "SHA384": + hashAlgorithm = HashAlgorithm.SHA_384; + break; + default: + throw new TestCreationException( + "Unsupported hash algorithm: " + mappings.getValue("?hashAlgorithm").getLabel()); + } + } + + switch (type) { + case RDFC10EvalTest: + return new RDFC10EvalTest( + test, + name, + comment, + URI.create(mappings.getValue("?action").getLabel()), + URI.create(mappings.getValue("?result").getLabel()), + hashAlgorithm); + case RDFC10MapTest: + return new RDFC10MapTest( + test, + name, + comment, + URI.create(mappings.getValue("?action").getLabel()), + URI.create(mappings.getValue("?result").getLabel()), + hashAlgorithm); + case RDFC10NegativeEvalTest: + return new RDFC10NegativeEvalTest( + test, + name, + comment, + URI.create(mappings.getValue("?action").getLabel())); + default: + throw new TestCreationException("Unsupported test type: " + type); + } + } + + /** + * Builds a query to retrieve the test details from the manifest file. + * + * @return The query to retrieve the test details. + */ + private static String buildTestDetailQuery(String test) { + return "PREFIX mf: \n" + + "PREFIX rdfc: \n" + + "PREFIX rdfs: \n" + + "SELECT ?name ?comment ?action ?result WHERE {" + + " <" + test + "> mf:name ?name ;" + + " mf:action ?action ." + + " optional { <" + test + "> mf:result ?result } ." + + " optional { <" + test + "> rdfs:comment ?comment } ." + + " optional { <" + test + "> rdfc:hashAlgorithm ?hashAlgorithm } ." + + "}"; + } + + /** + * Executes the specified query using the specified query process. + * + * @param queryProcess The query process. + * @param query The query to execute. + * @return The mappings resulting from the query execution, or an empty optional + * if an error occurs. + */ + private static Optional executeQuery(QueryProcess queryProcess, String query) { + try { + return Optional.ofNullable(queryProcess.query(query)); + } catch (EngineException e) { + e.printStackTrace(); + return Optional.empty(); + } + } + + /** + * Exception thrown when an error occurs while creating a test. + */ + public static class TestCreationException extends Exception { + public TestCreationException(String message) { + super(message); + } + } +} diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10EvalTest.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10EvalTest.java new file mode 100644 index 0000000000..d5f9a34585 --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10EvalTest.java @@ -0,0 +1,94 @@ +package fr.inria.corese.w3cJunitTestsGenerator.w3cTests.implementations; + +import java.net.URI; +import java.util.Set; + +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.IW3cTest; + +/** + * Represents a test for the RDFC10EvalTest type. + */ +public class RDFC10EvalTest implements IW3cTest { + + private String test; + private String name; + private String comment; + + private URI actionFile; + + private URI resultFile; + + private HashAlgorithm hashAlgorithm; + + public RDFC10EvalTest(String testUri, String name, String comment, URI actionUri, URI resultUri, + HashAlgorithm hashAlgorithm) { + this.test = testUri.split("#")[1]; + this.name = name; + this.comment = comment; + this.actionFile = actionUri; + this.resultFile = resultUri; + this.hashAlgorithm = hashAlgorithm; + } + + @Override + public Set getImports() { + return Set.of("fr.inria.corese.core.Graph", + "fr.inria.corese.core.load.Load", + "fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm", + "fr.inria.corese.core.print.CanonicalRdf10Format", + "static org.junit.Assert.assertEquals", + "java.io.IOException", + "java.net.URISyntaxException", + "org.junit.Test", + "fr.inria.corese.core.load.LoadException", + "java.net.URL", + "java.util.Scanner"); + } + + @Override + public String generate() { + StringBuilder sb = new StringBuilder(); + + // Header of the test + sb.append(" // ").append(this.name).append("\n"); + if (!this.comment.isEmpty()) { + sb.append(" // ").append(this.comment).append("\n"); + } + sb.append(" @Test\n"); + sb.append(" public void ").append(test); + sb.append("() throws IOException, LoadException, URISyntaxException {\n"); + + // Test body + sb.append(" // Create graph and load action file\n"); + sb.append(" Graph graph = Graph.create();\n"); + sb.append(" Load ld = Load.create(graph);\n"); + sb.append(" ld.parse(\"").append(actionFile).append("\");\n"); + sb.append("\n"); + sb.append(" // Create canonical RDF 1.0 format and convert graph to string\n"); + if (hashAlgorithm != null && hashAlgorithm != HashAlgorithm.SHA_256) { + sb.append(" CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph, HashAlgorithm.") + .append(hashAlgorithm).append(");\n"); + } else { + sb.append(" CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph);\n"); + } + sb.append(" String result = rdfc10.toString();\n"); + sb.append("\n"); + sb.append(" // Load expected result file\n"); + sb.append(" URL url = new URL(\"").append(resultFile).append("\");\n"); + sb.append(" Scanner scanner = new Scanner(url.openStream(), \"UTF-8\");\n"); + sb.append(" scanner.useDelimiter(\"\\\\A\");\n"); + sb.append(" String expected = scanner.hasNext() ? scanner.next() : \"\";\n"); + sb.append(" scanner.close();\n"); + sb.append("\n"); + + // Test assertion + sb.append(" assertEquals(expected, result);\n"); + + // Footer of the test + sb.append(" }\n"); + + return sb.toString(); + } + +} diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10MapTest.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10MapTest.java new file mode 100644 index 0000000000..2f90d8e03d --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10MapTest.java @@ -0,0 +1,92 @@ +package fr.inria.corese.w3cJunitTestsGenerator.w3cTests.implementations; + +import java.net.URI; +import java.util.Set; + +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.IW3cTest; + +/** + * Represents a test for the RDFC10MapTest type. + */ +public class RDFC10MapTest implements IW3cTest { + + private String test; + private String name; + private String comment; + + private URI actionFile; + + private URI resultFile; + + private HashAlgorithm hashAlgorithm; + + public RDFC10MapTest(String testUri, String name, String comment, URI actionUri, URI resultUri, + HashAlgorithm hashAlgorithm) { + this.test = testUri.split("#")[1]; + this.name = name; + this.comment = comment; + this.actionFile = actionUri; + this.resultFile = resultUri; + this.hashAlgorithm = hashAlgorithm; + } + + @Override + public Set getImports() { + return Set.of("fr.inria.corese.core.Graph", + "fr.inria.corese.core.load.Load", + "fr.inria.corese.core.print.CanonicalRdf10Format", + "fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm", + "java.net.URL", + "com.fasterxml.jackson.core.type.TypeReference", + "com.fasterxml.jackson.databind.ObjectMapper", + "fr.inria.corese.core.load.LoadException", + "java.io.IOException", + "org.junit.Test", + "static org.junit.Assert.assertEquals", + "java.util.Map"); + + } + + @Override + public String generate() { + StringBuilder sb = new StringBuilder(); + + // Header of the test + sb.append(" // ").append(this.name).append("\n"); + if (!this.comment.isEmpty()) { + sb.append(" // ").append(this.comment).append("\n"); + } + sb.append(" @Test\n"); + sb.append(" public void ").append(test); + sb.append("() throws LoadException, IOException {\n"); + + // Test body + sb.append(" // Create graph and load action file\n"); + sb.append(" Graph graph = Graph.create();\n"); + sb.append(" Load ld = Load.create(graph);\n"); + sb.append(" ld.setRenameBlankNode(false);\n"); + sb.append(" ld.parse(\"").append(actionFile).append("\");\n"); + sb.append("\n"); + sb.append(" // Create canonical RDF 1.0 format and get map of issued identifiers\n"); + if (hashAlgorithm != null && hashAlgorithm != HashAlgorithm.SHA_256) { + sb.append(" CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph, HashAlgorithm.") + .append(hashAlgorithm).append(");\n"); + } else { + sb.append(" CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph);\n"); + } + sb.append(" Map result = rdfc10.getIssuedIdentifiersMap();\n"); + sb.append("\n"); + sb.append(" // Load map from result json file\n"); + sb.append(" URL url = new URL(\"").append(resultFile).append("\");\n"); + sb.append( + " Map expect = new ObjectMapper().readValue(url, new TypeReference>(){});\n"); + sb.append("\n"); + sb.append(" // Compare the two maps\n"); + sb.append(" assertEquals(expect, result);\n"); + sb.append(" }\n"); + + return sb.toString(); + } + +} diff --git a/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10NegativeEvalTest.java b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10NegativeEvalTest.java new file mode 100644 index 0000000000..ae6e7840c6 --- /dev/null +++ b/corese-unit-test/src/main/java/fr/inria/corese/w3cJunitTestsGenerator/w3cTests/implementations/RDFC10NegativeEvalTest.java @@ -0,0 +1,68 @@ +package fr.inria.corese.w3cJunitTestsGenerator.w3cTests.implementations; + +import java.net.URI; +import java.util.Set; + +import fr.inria.corese.w3cJunitTestsGenerator.w3cTests.IW3cTest; + +/** + * Represents a test for the RDFC10NegativeEvalTest type. + */ +public class RDFC10NegativeEvalTest implements IW3cTest { + + private String test; + private String name; + private String comment; + + private URI actionFile; + + public RDFC10NegativeEvalTest(String testUri, String name, String comment, URI actionUri) { + this.test = testUri.split("#")[1]; + this.name = name; + this.comment = comment; + this.actionFile = actionUri; + } + + @Override + public Set getImports() { + return Set.of( + "fr.inria.corese.core.print.rdfc10.CanonicalRdf10.CanonicalizationException", + "java.io.IOException", + "fr.inria.corese.core.load.LoadException", + "fr.inria.corese.core.Graph", + "fr.inria.corese.core.load.Load", + "fr.inria.corese.core.print.CanonicalRdf10Format", + "org.junit.Test"); + } + + @Override + public String generate() { + StringBuilder sb = new StringBuilder(); + + // Header of the test + sb.append(" // ").append(this.name).append("\n"); + if (!this.comment.isEmpty()) { + sb.append(" // ").append(this.comment).append("\n"); + } + sb.append(" @Test(expected = CanonicalizationException.class)\n"); + sb.append(" public void ").append(test); + sb.append("() throws IOException, LoadException {\n"); + + // Test body + sb.append(" // Create graph and load action file\n"); + sb.append(" Graph graph = Graph.create();\n"); + sb.append(" Load ld = Load.create(graph);\n"); + sb.append(" ld.parse(\"").append(actionFile).append("\");\n"); + sb.append("\n"); + sb.append(" // Attempt to create canonical RDF 1.0 format, expecting a failure\n"); + sb.append(" CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph);\n"); + sb.append(" // This line should trigger the CanonicalizationException\n"); + sb.append(" rdfc10.toString();\n"); + + // Footer of the test + sb.append(" }\n"); + + return sb.toString(); + } + +} diff --git a/corese-unit-test/src/main/java/module-info.java b/corese-unit-test/src/main/java/module-info.java index 821b5d80a7..b83259263e 100644 --- a/corese-unit-test/src/main/java/module-info.java +++ b/corese-unit-test/src/main/java/module-info.java @@ -13,6 +13,5 @@ requires transitive org.apache.jena.iri; requires com.google.common; requires org.slf4j; - - opens fr.inria.corese.engine; + requires org.apache.logging.log4j; } \ No newline at end of file diff --git a/corese-unit-test/src/main/resources/log4j2.xml b/corese-unit-test/src/main/resources/log4j2.xml index 7f2a9bcb44..b810acdb3d 100644 --- a/corese-unit-test/src/main/resources/log4j2.xml +++ b/corese-unit-test/src/main/resources/log4j2.xml @@ -1,17 +1,22 @@ - + - + - - + + - - + - + + + + + + + + \ No newline at end of file diff --git a/corese-unit-test/src/test/java/fr/inria/corese/engine/TestQuery1.java b/corese-unit-test/src/test/java/fr/inria/corese/engine/TestQuery1.java index 6c47c504ae..7df5d59918 100644 --- a/corese-unit-test/src/test/java/fr/inria/corese/engine/TestQuery1.java +++ b/corese-unit-test/src/test/java/fr/inria/corese/engine/TestQuery1.java @@ -35,7 +35,6 @@ import fr.inria.corese.core.producer.DataFilter; import fr.inria.corese.core.producer.DataFilterFactory; import fr.inria.corese.core.query.QueryEngine; -import fr.inria.corese.core.query.QueryGraph; import fr.inria.corese.core.query.QueryProcess; import fr.inria.corese.core.transform.Loader; import fr.inria.corese.core.transform.Transformer; @@ -8657,67 +8656,6 @@ public void testRelax() { } - /** - * Create a Query graph from an RDF Graph Execute the query Use case: find - * similar Graphs (cf Corentin) - */ - - public void testQueryGraph() { - - Graph graph = createGraph(); - QueryProcess exec = QueryProcess.create(graph); - - String init = "prefix : " - + "" - + "insert data {" - + ":a :p :b, :c ." - + ":b :q :d " - + ":c :q :d " - + ":d :p :e " - + ":e :q :f " - + "" - + "} "; - - String cons = "prefix : " - + "" - + "construct {?x :p []}" - + "where {?x :p ?y}"; - - String init2 = "prefix : " - + "" - + "insert data {" - + ":a :p [] ." - + "}"; - - try { - // create a graph - exec.query(init); - - // create a copy where triple objects (values) are Blank Nodes (aka Variables) - // consider the copy as a Query Graph and execute it - Mappings map = exec.queryGraph(cons); - - assertEquals("Results", 4, map.size()); - - Graph g2 = createGraph(); - QueryProcess exec2 = QueryProcess.create(g2); - exec2.query(init2); - - QueryGraph qg = QueryGraph.create(g2); - new QGVisitor(); - // qg.setVisitor(vis); - qg.setConstruct(true); - map = exec.query(qg); - - Graph res = exec.getGraph(map); - assertEquals("Results", 2, res.size()); - - } catch (EngineException e) { - e.printStackTrace(); - } - - } - @Test public void testOption() { diff --git a/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java b/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java new file mode 100644 index 0000000000..f068da9001 --- /dev/null +++ b/corese-unit-test/src/test/java/fr/inria/corese/w3c/canonicalRdf/canonicalRdfTest.java @@ -0,0 +1,1970 @@ +package fr.inria.corese.w3c.canonicalRdf; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import fr.inria.corese.core.Graph; +import fr.inria.corese.core.load.Load; +import fr.inria.corese.core.load.LoadException; +import fr.inria.corese.core.print.CanonicalRdf10Format; +import fr.inria.corese.core.print.rdfc10.CanonicalRdf10.CanonicalizationException; +import fr.inria.corese.core.print.rdfc10.HashingUtility.HashAlgorithm; +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.Map; +import java.util.Scanner; +import org.junit.AssumptionViolatedException; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestWatcher; +import org.junit.runner.Description; +import static org.junit.Assert.assertEquals; + +/** + * Auto-generated JUnit test file for the W3C test suite: https://w3c.github.io/rdf-canon/tests/manifest.ttl + * This file was automatically generated by JUnitTestFileGenerator.java. + * Generation date: 2024-07-01, Time: 19:20:37 Europe/Paris + */ +public class canonicalRdfTest { + + private static final String TEST_REPORT_FILE = Paths.get(System.getProperty("user.dir")).resolve("src/test/java/fr/inria/corese/w3c/canonicalRdf/testReport.csv").toString(); + private static final String MANIFEST_URI = "https://w3c.github.io/rdf-canon/tests/manifest"; + private static final String EARL = "http://www.w3.org/ns/earl#"; + + /** + * Writes the test report to the file testReport.csv. + * + * @param testName The name of the test. + * @param success The status of the test. + */ + private void writeTestReport(String testName, String success) { + try { + Path testReportPath = Paths.get(TEST_REPORT_FILE); + DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssXXX"); + Files.write(testReportPath, (MANIFEST_URI + "#" + testName + "," + dtf.format(ZonedDateTime.now()) + "," + EARL + success + "\n").getBytes(), StandardOpenOption.APPEND); + } catch (IOException e) { + e.printStackTrace(); + } + } + + @Rule + public TestWatcher watcher = new TestWatcher() { + + @Override + protected void failed(Throwable e, Description description) { + writeTestReport(description.getMethodName(), "failed"); + } + + @Override + protected void succeeded(Description description) { + writeTestReport(description.getMethodName(), "passed"); + } + + @Override + protected void skipped(AssumptionViolatedException e, Description description) { + writeTestReport(description.getMethodName(), "untested"); + } + }; + + // Create and clear the test report file + @BeforeClass + public static void createTestReportFile() { + try { + Path testReportPath = Paths.get(TEST_REPORT_FILE); + Files.write(testReportPath, "".getBytes()); + } catch (IOException e) { + e.printStackTrace(); + } + } + + // simple id + @Test + public void test001c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test001-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test001-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // duplicate property iri values + @Test + public void test002c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test002-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test002-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // bnode + @Test + public void test003c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test003-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test003-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // bnode (map test) + @Test + public void test003m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test003-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test003-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // bnode plus embed w/subject + @Test + public void test004c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test004-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test004-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // bnode plus embed w/subject (map test) + @Test + public void test004m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test004-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test004-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // bnode embed + @Test + public void test005c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test005-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test005-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // bnode embed (map test) + @Test + public void test005m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test005-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test005-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // multiple rdf types + @Test + public void test006c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test006-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test006-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // single subject complex + @Test + public void test008c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test008-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test008-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // multiple subjects - complex + @Test + public void test009c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test009-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test009-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // type + @Test + public void test010c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test010-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test010-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // type-coerced type + @Test + public void test011c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test011-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test011-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // type-coerced type, cycle + @Test + public void test013c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test013-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test013-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // check types + @Test + public void test014c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test014-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test014-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - dual link - embed + @Test + public void test016c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test016-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test016-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - dual link - embed (map test) + @Test + public void test016m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test016-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test016-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // blank node - dual link - non-embed + @Test + public void test017c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test017-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test017-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - dual link - non-embed (map test) + @Test + public void test017m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test017-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test017-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // blank node - self link + @Test + public void test018c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test018-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test018-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - self link (map test) + @Test + public void test018m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test018-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test018-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // blank node - disjoint self links + @Test + public void test019c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test019-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test019-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - diamond + @Test + public void test020c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test020-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test020-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - diamond (map test) + @Test + public void test020m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test020-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test020-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // blank node - circle of 2 + @Test + public void test021c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test021-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test021-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 2 + @Test + public void test022c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test022-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test022-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - circle of 3 + @Test + public void test023c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test023-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test023-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (0-1-2) + @Test + public void test024c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test024-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test024-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (0-2-1) + @Test + public void test025c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test025-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test025-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (1-0-2) + @Test + public void test026c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test026-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test026-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (1-2-0) + @Test + public void test027c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test027-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test027-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (2-1-0) + @Test + public void test028c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test028-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test028-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (2-0-1) + @Test + public void test029c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test029-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test029-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - point at circle of 3 + @Test + public void test030c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test030-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test030-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - point at circle of 3 (map test) + @Test + public void test030m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test030-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test030-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // disjoint identical subgraphs (1) + @Test + public void test033c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test033-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test033-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // disjoint identical subgraphs (2) + @Test + public void test034c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test034-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test034-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // reordered w/strings (1) + @Test + public void test035c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test035-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test035-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // reordered w/strings (2) + @Test + public void test036c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test036-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test036-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // reordered 4 bnodes, reordered 2 properties (1) + @Test + public void test038c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test038-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test038-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // reordered 4 bnodes, reordered 2 properties (2) + @Test + public void test039c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test039-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test039-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // reordered 6 bnodes (1) + @Test + public void test040c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test040-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test040-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // literal with language + @Test + public void test043c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test043-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test043-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // poison – evil (1) + // A poison graph which is computable given defined limits. + @Test + public void test044c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test044-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test044-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // poison – evil (2) + // A poison graph which is computable given defined limits. + @Test + public void test045c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test045-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test045-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // poison – evil (3) + // A poison graph which is computable given defined limits. + @Test + public void test046c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test046-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test046-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // deep diff (1) + @Test + public void test047c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test047-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test047-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // deep diff (1) (map test) + @Test + public void test047m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test047-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test047-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // deep diff (2) + @Test + public void test048c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test048-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test048-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // deep diff (2) (map test) + @Test + public void test048m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test048-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test048-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // @list + // RDF Collections using rdf:first/rest ladders. + @Test + public void test053c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test053-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test053-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // @list (map test) + // RDF Collections using rdf:first/rest ladders. + @Test + public void test053m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test053-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test053-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // t-graph + @Test + public void test054c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test054-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test054-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // simple reorder (1) + @Test + public void test055c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test055-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test055-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // simple reorder (1) (map test) + @Test + public void test055m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test055-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test055-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // simple reorder (2) + @Test + public void test056c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test056-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test056-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // simple reorder (2) (map test) + @Test + public void test056m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test056-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test056-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // unnamed graph + @Test + public void test057c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test057-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test057-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // unnamed graph (map test) + @Test + public void test057m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test057-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test057-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // unnamed graph with blank node objects + @Test + public void test058c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test058-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test058-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // n-quads parsing + @Test + public void test059c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test059-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test059-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // n-quads escaping + @Test + public void test060c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test060-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test060-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // n-quads escaping (map test) + @Test + public void test060m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test060-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test060-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // same literal value with multiple languages + @Test + public void test061c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test061-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test061-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // same literal value with multiple datatypes + @Test + public void test062c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test062-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test062-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - diamond (with _:b) + // This duplicates #test020, but uses _:b as a blank node prefix + @Test + public void test063c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test063-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test063-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - diamond (with _:b) (map test) + // This duplicates #test020, but uses _:b as a blank node prefix + @Test + public void test063m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test063-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test063-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // blank node - double circle of 3 (0-1-2, reversed) + @Test + public void test064c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test064-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test064-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (0-2-1, reversed) + @Test + public void test065c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test065-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test065-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (1-0-2, reversed) + @Test + public void test066c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test066-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test066-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (1-2-0, reversed) + @Test + public void test067c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test067-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test067-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (2-1-0, reversed) + @Test + public void test068c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test068-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test068-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - double circle of 3 (2-0-1, reversed) + @Test + public void test069c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test069-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test069-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // dataset - isomorphic default and iri named + // Isomorphic graphs in default and IRI named graph + @Test + public void test070c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test070-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test070-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // dataset - isomorphic default and iri named (map test) + // Isomorphic graphs in default and IRI named graph + @Test + public void test070m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test070-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test070-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // dataset - isomorphic default and node named + // Isomorphic graphs in default and blank node named graph + @Test + public void test071c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test071-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test071-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // dataset - isomorphic default and node named (map test) + // Isomorphic graphs in default and blank node named graph + @Test + public void test071m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test071-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test071-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // dataset - shared blank nodes + // Blank nodes shared in default and named graph + @Test + public void test072c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test072-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test072-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // dataset - shared blank nodes (map test) + // Blank nodes shared in default and named graph + @Test + public void test072m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test072-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test072-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // dataset - referencing graph name + // Default graph with blank node shared with graph name + @Test + public void test073c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test073-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test073-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // dataset - referencing graph name (map test) + // Default graph with blank node shared with graph name + @Test + public void test073m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test073-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test073-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // poison - Clique Graph (negative test) + // A 10-node Clique of blank node resources all inter-related. + @Test(expected = CanonicalizationException.class) + public void test074c() throws IOException, LoadException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test074-in.nq"); + + // Attempt to create canonical RDF 1.0 format, expecting a failure + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + // This line should trigger the CanonicalizationException + rdfc10.toString(); + } + + // blank node - diamond (uses SHA-384) + // Same as test020 except for using SHA-384 + @Test + public void test075c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test075-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph, HashAlgorithm.SHA_384); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test075-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // blank node - diamond (uses SHA-384) (map test) + // Same as test020 except for using SHA-384 + @Test + public void test075m() throws LoadException, IOException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.setRenameBlankNode(false); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test075-in.nq"); + + // Create canonical RDF 1.0 format and get map of issued identifiers + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph, HashAlgorithm.SHA_384); + Map result = rdfc10.getIssuedIdentifiersMap(); + + // Load map from result json file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test075-rdfc10map.json"); + Map expect = new ObjectMapper().readValue(url, new TypeReference>(){}); + + // Compare the two maps + assertEquals(expect, result); + } + + // duplicate ground triple in input + // The duplicate triples must be removed + @Test + public void test076c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test076-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test076-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + + // duplicate triple with blank node in input + // The duplicate triples must be removed + @Test + public void test077c() throws IOException, LoadException, URISyntaxException { + // Create graph and load action file + Graph graph = Graph.create(); + Load ld = Load.create(graph); + ld.parse("https://w3c.github.io/rdf-canon/tests/rdfc10/test077-in.nq"); + + // Create canonical RDF 1.0 format and convert graph to string + CanonicalRdf10Format rdfc10 = CanonicalRdf10Format.create(graph); + String result = rdfc10.toString(); + + // Load expected result file + URL url = new URL("https://w3c.github.io/rdf-canon/tests/rdfc10/test077-rdfc10.nq"); + Scanner scanner = new Scanner(url.openStream(), "UTF-8"); + scanner.useDelimiter("\\A"); + String expected = scanner.hasNext() ? scanner.next() : ""; + scanner.close(); + + assertEquals(expected, result); + } + +} \ No newline at end of file diff --git a/corese-unit-test/src/test/resources/data/rdf-star-main/Gemfile.lock b/corese-unit-test/src/test/resources/data/rdf-star-main/Gemfile.lock index a08b3b5caf..1bf1106cc2 100644 --- a/corese-unit-test/src/test/resources/data/rdf-star-main/Gemfile.lock +++ b/corese-unit-test/src/test/resources/data/rdf-star-main/Gemfile.lock @@ -44,7 +44,7 @@ GEM multi_json (1.15.0) net-http-persistent (4.0.1) connection_pool (~> 2.2) - rack (2.2.6.4) + rack (2.2.8.1) rake (13.0.6) rdf (3.1.15) hamster (~> 3.0) diff --git a/corese-unit-test/src/test/resources/log4j2.xml b/corese-unit-test/src/test/resources/log4j2.xml index 4cff86b10d..d88ae47d73 100644 --- a/corese-unit-test/src/test/resources/log4j2.xml +++ b/corese-unit-test/src/test/resources/log4j2.xml @@ -1,17 +1,21 @@ - + - + - - + + + + + + - + - + \ No newline at end of file diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 0000000000..bc08b9612f --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,17 @@ +# Generated files for docs +generated/ +bak/ +build/ + +source/* +!source/**/*.rst +!source/**/*.py +!source/install.md +!source/Doxyfile + + +!source/rest_api/ +!source/cli_ref/ + +!source/_templates/ +!source/_static/ diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000000..50125ba179 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,43 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile symbolic_links + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +# As a prerequisite create symbolic links to the subdirectories +# of /docs in the /docs/source directory. This is done to allow +# the Makefile to include all the MARKDOWN files in the /docs +# directory in the documentation rendered as HTML by Sphinx. +symbolic_links: + cd $(SOURCEDIR); \ + echo "Creating a symbolic link to sub-dirs"; \ + for dir in "getting started" "rdf4j" "corese-python" "federation" "storage" "advanced"; do \ + if [ ! -d "$$dir" ]; then \ + ln -sfv "../$$dir" "$$dir"; \ + echo "--- $$dir created"; \ + else echo "--- $$dir already exists"; \ + fi; \ + done; \ + \ + echo "Creating a symbolic link to docker-build"; \ + if [ ! -d "docker" ]; then \ + ln -sfv "../../corese-server/build-docker" "docker"; \ + echo "--- docker created"; \ + else echo "--- docker already exists"; \ + fi; \ + cd - diff --git a/docs/corese-python/Corese-library with Python.md b/docs/corese-python/Corese-library with Python.md index 5f6e9cf58a..074dbff988 100644 --- a/docs/corese-python/Corese-library with Python.md +++ b/docs/corese-python/Corese-library with Python.md @@ -4,8 +4,8 @@ 1. Install java and python 2. Install python dependencies `pip install --user py4j` -3. Download [corese-library-python-4.5.0.jar](https://github.com/Wimmics/corese/releases/download/release-4.5.0/corese-library-python-4.5.0.jar) -4. Place in the same directory `corese-library-python-4.5.0.jar` and your code `myCode.py` +3. Download [corese-library-python-4.5.1.jar](https://github.com/Wimmics/corese/releases/download/release-4.5.1/corese-library-python-4.5.1.jar) +4. Place in the same directory `corese-library-python-4.5.1.jar` and your code `myCode.py` 5. Run with `python myCode.py` ## 2. Template @@ -22,7 +22,7 @@ from py4j.java_gateway import JavaGateway # Start java gateway java_process = subprocess.Popen( - ['java', '-jar', '-Dfile.encoding=UTF-8', 'corese-library-python-4.5.0.jar']) + ['java', '-jar', '-Dfile.encoding=UTF-8', 'corese-library-python-4.5.1.jar']) sleep(1) gateway = JavaGateway() @@ -59,7 +59,7 @@ from py4j.java_gateway import JavaGateway # Start java gateway java_process = subprocess.Popen( - ['java', '-jar', '-Dfile.encoding=UTF-8', 'corese-library-python-4.5.0.jar']) + ['java', '-jar', '-Dfile.encoding=UTF-8', 'corese-library-python-4.5.1.jar']) sleep(1) gateway = JavaGateway() @@ -76,9 +76,14 @@ atexit.register(exit_handler) # Import of class Graph = gateway.jvm.fr.inria.corese.core.Graph Load = gateway.jvm.fr.inria.corese.core.load.Load -Transformer = gateway.jvm.fr.inria.corese.core.transform.Transformer QueryProcess = gateway.jvm.fr.inria.corese.core.query.QueryProcess RDF = gateway.jvm.fr.inria.corese.core.logic.RDF +TripleFormat = gateway.jvm.fr.inria.corese.core.print.TripleFormat +RDFFormat = gateway.jvm.fr.inria.corese.core.print.RDFFormat +JSONLDFormat = gateway.jvm.fr.inria.corese.core.print.JSONLDFormat +NTripleFormat = gateway.jvm.fr.inria.corese.core.print.NTripleFormat +NQuadsFormat = gateway.jvm.fr.inria.corese.core.print.NQuadsFormat +Shacl = gateway.jvm.fr.inria.corese.core.shacl.Shacl ############### # Build Graph # @@ -118,22 +123,58 @@ def sparqlQuery(graph, query): exec = QueryProcess.create(graph) return exec.query(query) +######### +# SHACL # +######### + +def shaclValidation(graph, shacl): + """Run a SHACL validation on a graph + + :param graph: the graph on which the SHACL validation is executed + :param shacl: the SHACL graph + :returns: SHACL validation report + """ + shacl = Shacl(graph, shacl) + result = shacl.eval() + return result ################# # Load / Export # ################# -def exportToFile(graph, format, path): +def serialize(graph, format): """Export a graph to a file :param graph: graph to export :param format: format of export - :param path: path of the exported file + :returns: the graph export """ - transformer = Transformer.create(graph, format) - transformer.write(path) - + if format == 'turtle': + content = TripleFormat.create(graph).toString() + elif format == 'rdfxml': + content = RDFFormat.create(graph).toString() + elif format == 'jsonld': + content = JSONLDFormat.create(graph).toString() + elif format == 'n3': + content = NTripleFormat.create(graph).toString() + elif format == 'n4': + content = NQuadsFormat.create(graph).toString() + else: + raise Exception('Format not supported : ' + format) + + return content + +def writeToFile(content, path): + """Write content to a file + + :param content: content to write + :param path: path of the file + :returns: the file write + """ + with open(path, "w") as file: + file.write(content) + return file def load(path): """Load a graph from a local file or a URL @@ -148,7 +189,6 @@ def load(path): return graph - ######## # Main # ######## @@ -172,7 +212,7 @@ graph = BuildGraphCoreseApi() print("Graph build ! (" + str(graph.size()) + " triplets)") print("\nPrint Graph:") -print(graph.display()) +print(serialize(graph, 'n4')) ### @@ -181,12 +221,12 @@ print(graph.display()) printTitle("SPARQL Query") graph = load( - "https://raw.githubusercontent.com/stardog-union/stardog-tutorials/master/music/beatles.ttl") + "https://files.inria.fr/corese/data/unit-test/beatles.ttl") # Uri or path to the graph print("Graph load ! (" + str(graph.size()) + " triplets)") # List of U2 albums query = """ - prefix : + prefix : SELECT ?member WHERE { @@ -198,6 +238,23 @@ map = sparqlQuery(graph, query) print("\nQuery result ! (List of members of bands \"The Beatles\"): ") print(map) +### +# SHACL Validation +### +printTitle("SHACL Validation") + +graph = load( + "https://files.inria.fr/corese/data/unit-test/beatles.ttl") +print("Graph load ! (" + str(graph.size()) + " triplets)") + +shacl = load( + "https://files.inria.fr/corese/data/unit-test/beatles-validator.ttl") +print("SHACL load ! (" + str(shacl.size()) + " triplets)") + +result = shaclValidation(graph, shacl) +print("SHACL validation report: ") +print(serialize(result, 'turtle')) + ### # Load / Export @@ -205,18 +262,20 @@ print(map) printTitle("Load / Export") graph = load( - "https://raw.githubusercontent.com/stardog-union/stardog-tutorials/master/music/beatles.ttl") + "https://files.inria.fr/corese/data/unit-test/beatles.ttl") print("Graph load ! (" + str(graph.size()) + " triplets)") path_export_file = "export.rdf" -exportToFile(graph, Transformer.RDFXML, path_export_file) +writeToFile(serialize(graph, 'turtle'), path_export_file) print("Graph Export in file (" + path_export_file + ")") + ``` Results : ```plaintext -Gateway Server Started +Loaded default config +CoresePy4j gateway server started on port 25333 @@ -226,8 +285,7 @@ Gateway Server Started Graph build ! (1 triplets) Print Graph: -predicate rdf:type [1] -00 kg:default rdf:type + . @@ -238,10 +296,27 @@ predicate rdf:type [1] Graph load ! (28 triplets) Query result ! (List of members of bands "The Beatles"): -01 ?member = ; -02 ?member = ; -03 ?member = ; -04 ?member = ; +01 ?member = ; +02 ?member = ; +03 ?member = ; +04 ?member = ; + + + + +====================== +== SHACL Validation == +====================== +Graph load ! (28 triplets) +SHACL load ! (46 triplets) +SHACL validation report: +@prefix xsh: . +@prefix sh: . +@prefix rdf: . + +_:b8 a sh:ValidationReport ; + sh:conforms true . + diff --git a/docs/corese-python/Corese-server with Python.md b/docs/corese-python/Corese-server with Python.md index 652d3fb7b7..1e1115b80e 100644 --- a/docs/corese-python/Corese-server with Python.md +++ b/docs/corese-python/Corese-server with Python.md @@ -65,34 +65,18 @@ Done ### 2.3. Execute a select query ```python -import json +from SPARQLWrapper import get_sparql_dataframe -import pandas as pd -from SPARQLWrapper import JSON, SPARQLWrapper - -def sparql_service_to_dataframe(service, query): +def sparql_service_to_dataframe(endpoint, query): """ - Helper function to convert SPARQL results into a Pandas DataFrame. - - Credit to Ted Lawless https://lawlesst.github.io/notebook/sparql-dataframe.html + Query the given endpoint with the given query and return the result as a pandas DataFrame. + :param endpoint: The SPARQL endpoint to query + :param query: The SPARQL query + :return: A pandas DataFrame containing the query result """ - sparql = SPARQLWrapper(service) - sparql.setQuery(query) - sparql.setReturnFormat(JSON) - result = sparql.query() - - processed_results = json.load(result.response) - cols = processed_results['head']['vars'] - - out = [] - for row in processed_results['results']['bindings']: - item = [] - for c in cols: - item.append(row.get(c, {}).get('value')) - out.append(item) - - return pd.DataFrame(out, columns=cols) + df = get_sparql_dataframe(endpoint, query) + return df query = ''' diff --git a/docs/getting started/Getting Started With Corese-command.md b/docs/getting started/Getting Started With Corese-command.md index 039edc3d2a..d7e58c6dab 100644 --- a/docs/getting started/Getting Started With Corese-command.md +++ b/docs/getting started/Getting Started With Corese-command.md @@ -4,57 +4,15 @@ Corese-Command is a command-line interface (CLI) for the Corese Semantic Web Fac Designed to simplify and streamline tasks related to querying, converting, and validating RDF data, Corese-Command is suitable for developers, data scientists, and anyone working with Semantic Web technologies. -## 1. Table of Contents - -1. [Getting Started With Corese-Command](#getting-started-with-corese-command) - 1. [1. Table of Contents](#1-table-of-contents) - 2. [2. Installation](#2-installation) - 3. [3. The `sparql` Command](#3-the-sparql-command) - 1. [3.1. Basic Usage](#31-basic-usage) - 2. [3.2. Choose the Result Format](#32-choose-the-result-format) - 3. [3.3. Different Types of Input](#33-different-types-of-input) - 4. [3.4. Different Types of Queries](#34-different-types-of-queries) - 5. [3.5. Multiple Input Files](#35-multiple-input-files) - 6. [3.6. Different Types of Output](#36-different-types-of-output) - 4. [4. The `convert` Command](#4-the-convert-command) - 1. [4.1. Basic Usage](#41-basic-usage) - 2. [4.2. Different Types of Input](#42-different-types-of-input) - 3. [4.3. Different Types of Output](#43-different-types-of-output) - 4. [4.4. Summary of Available Formats](#44-summary-of-available-formats) - 5. [5. The `shacl` Command](#5-the-shacl-command) - 1. [5.1. Basic Usage](#51-basic-usage) - 2. [5.2. Different Types of Input](#52-different-types-of-input) - 3. [5.3. Different Types of Output](#53-different-types-of-output) - 4. [5.4. Multiple Input Files](#54-multiple-input-files) - 6. [6. `remote-sparql` Command](#6-remote-sparql-command) - 1. [6.1. Basic Usage](#61-basic-usage) - 2. [6.2. Choose the Result Format](#62-choose-the-result-format) - 3. [6.3. Different Types of Queries](#63-different-types-of-queries) - 4. [6.4. Different Types of Output](#64-different-types-of-output) - 5. [6.5. Different Types of Methods](#65-different-types-of-methods) - 6. [6.6. Specifying Graphs](#66-specifying-graphs) - 1. [6.6.1. Default Graph](#661-default-graph) - 2. [6.6.2. Named Graph](#662-named-graph) - 7. [6.7. Additional Request Configurations](#67-additional-request-configurations) - 1. [6.7.1. Custom HTTP Headers](#671-custom-http-headers) - 2. [6.7.2. Redirection Limit](#672-redirection-limit) - 3. [6.7.3. Query Validation](#673-query-validation) - 7. [7. General Options](#7-general-options) - 1. [7.1. Configuration file](#71-configuration-file) - 2. [7.2. Verbose](#72-verbose) - 3. [7.3. Version](#73-version) - 4. [7.4. Get Help](#74-get-help) - 5. [7.5. Disabling OWL Auto Import](#75-disabling-owl-auto-import) - -## 2. Installation +## 1. Installation Installations instructions are available on the [Corese-Command GitHub repository](https://github.com/Wimmics/corese). -## 3. The `sparql` Command +## 2. The `sparql` Command The `sparql` command allows you to run SPARQL queries on RDF datasets. -### 3.1. Basic Usage +### 2.1. Basic Usage Let's start with a simple example, executing a query on a local file: @@ -71,7 +29,7 @@ corese-command sparql -q 'SELECT * WHERE {?s ?p ?o}' -i myData.ttl In this example, the query is provided directly on the command line with the `-q` flag, and the input file is specified with the `-i` flag. The result is printed to the standard output with the default format, which is `markdown`. -### 3.2. Choose the Result Format +### 2.2. Choose the Result Format Let's try the same query as before, but this time with the `json` format as output: @@ -153,7 +111,7 @@ Here is a table of available formats according to the type of request: | tsv | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | | markdown | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | -### 3.3. Different Types of Input +### 2.3. Different Types of Input The input can be provided in different ways: @@ -187,7 +145,7 @@ cat myData.ttl | corese-command sparql -q 'SELECT * WHERE {?s ?p ?o}' -if turtle > - `nquads`, `nq`, or `application/n-quads` > - `rdfa`, `html` or `application/xhtml+xml` -### 3.4. Different Types of Queries +### 2.4. Different Types of Queries The query can be provided in different ways: @@ -209,7 +167,7 @@ corese-command sparql -q myQuery.rq -i myData.ttl corese-command sparql -q 'http://example.org/myQuery.rq' -i myData.ttl ``` -### 3.5. Multiple Input Files +### 2.5. Multiple Input Files - **Multiple Input:** It's possible to provide multiple input files by repeating the `-i` flag: @@ -239,7 +197,7 @@ corese-command sparql -q myQuery.rq -i ./myDirectory/ corese-command sparql -q myQuery.rq -i ./myDirectory/ -R ``` -### 3.6. Different Types of Output +### 2.6. Different Types of Output If you want to save the result to a file, you can do so with the `-o` flag: @@ -253,11 +211,11 @@ If no `-o` flag is provided, the result is printed to the standard output. corese-command sparql -q 'SELECT * WHERE {?s ?p ?o}' -i myData.ttl -r json | jq […] ``` -## 4. The `convert` Command +## 3. The `convert` Command The `convert` command allows you to convert an RDF file from one serialization format to another. -### 4.1. Basic Usage +### 3.1. Basic Usage ```shell corese-command convert -i myFile.ttl -r jsonld @@ -265,7 +223,7 @@ corese-command convert -i myFile.ttl -r jsonld This example converts `myFile.ttl` from `turtle` to `jsonld`. The `-i` flag specifies the input file, and the `-r` flag specifies the output format. -### 4.2. Different Types of Input +### 3.2. Different Types of Input The input can be provided in different ways: @@ -299,7 +257,7 @@ cat myData.ttl | corese-command convert -r turtle -if turtle > - `nquads`, `nq`, or `application/n-quads` > - `rdfa`, `html` or `application/xhtml+xml` -### 4.3. Different Types of Output +### 3.3. Different Types of Output The output can be provided in different ways: @@ -324,7 +282,7 @@ corese-command convert -i myData.ttl -r jsonld | jq […] > - NTRIPLES: `ntriples`, `nt` or `application/n-triples` > - NQUADS: `nquads`, `nq`, or `application/n-quads` -### 4.4. Summary of Available Formats +### 3.4. Summary of Available Formats The `convert` command supports the following formats for input and output: @@ -338,11 +296,43 @@ The `convert` command supports the following formats for input and output: | NQUADS | ✅ | ✅ | | RDFA | ✅ | ❌ | -## 5. The `shacl` Command +### 3.5. Multiple Input Files + +- **Multiple Input:** It's possible to provide multiple input files by repeating the `-i` flag: + +```shell +corese-command convert -i myData1.ttl -i myData2.ttl -r jsonld +``` + +- **Shell Globbing:** It's also possible to use shell globbing to provide multiple input files: + +```shell +corese-command convert -i rdf/*.ttl -r jsonld +``` + +```shell +corese-command convert -i myData?.ttl -r jsonld +``` + +- **Directory Input:** If you want to use a whole directory as input, you can do so. + +```shell +corese-command convert -i ./myDirectory/ -r jsonld +``` + +- **Directory Input Recursive:** If you want to use a whole directory as input, you can do so. The `-R` flag allows you to use the directory recursively. + +```shell +corese-command convert -i ./myDirectory/ -r jsonld -R +``` + +> The command integrates all specified input files into a single dataset for processing. During conversion, these files are collectively transformed into the designated output format, effectively merging all data into one coherent file. + +## 4. The `shacl` Command The `shacl` command allows you to validate RDF data against SHACL shapes. -### 5.1. Basic Usage +### 4.1. Basic Usage ```shell corese-command shacl -i myData.ttl -s myShapes.ttl @@ -350,7 +340,7 @@ corese-command shacl -i myData.ttl -s myShapes.ttl This example validates `myData.ttl` against `myShapes.ttl`. The `-i` flag specifies the input file, and the `-s` flag specifies the shapes file. -### 5.2. Different Types of Input +### 4.2. Different Types of Input The input can be provided in different ways: @@ -384,7 +374,7 @@ cat myData.ttl | corese-command shacl -s myShapes.ttl -if turtle > - `nquads`, `nq`, or `application/n-quads` > - `rdfa`, `html` or `application/xhtml+xml` -### 5.3. Different Types of Output +### 4.3. Different Types of Output The output report can be provided in different ways (the default format is `turtle`): @@ -409,7 +399,7 @@ corese-command shacl -i myData.ttl -s myShapes.ttl | other-command > - NTRIPLES: `ntriples`, `nt` or `application/n-triples` > - NQUADS: `nquads`, `nq`, or `application/n-quads` -### 5.4. Multiple Input Files +### 4.4. Multiple Input Files - **Multiple Input:** It's possible to provide multiple input files by repeating the `-i` and `-s` flags: @@ -441,11 +431,11 @@ corese-command shacl -i ./myDirectory/ -s ./myShapes/ -R > All input files are loaded into the same dataset, and all shapes files are loaded into the same shapes graph. The dataset is validated against all shapes graphs. -## 6. `remote-sparql` Command +## 5. The `remote-sparql` Command The `remote-sparql` command allows you to run SPARQL queries on a remote SPARQL endpoint. -### 6.1. Basic Usage +### 5.1. Basic Usage ```shell corese-command remote-sparql -q 'SELECT * WHERE {?s ?p ?o}' -e "http://example.org/sparql" @@ -453,7 +443,7 @@ corese-command remote-sparql -q 'SELECT * WHERE {?s ?p ?o}' -e "http://example.o This example executes a query on the remote SPARQL endpoint `http://example.org/sparql`. The `-q` flag specifies the query, and the `-e` flag specifies the endpoint. -### 6.2. Choose the Result Format +### 5.2. Choose the Result Format Let's try the same query as before, but this time with the `json` format as output: @@ -470,7 +460,7 @@ The format of the result can be specified by using one of the following flags: ` > If no `-a`, `-of`, or `--accept` flag is provided, the program uses 'text/csv' as the default format. -### 6.3. Different Types of Queries +### 5.3. Different Types of Queries The query can be provided in different ways: @@ -498,7 +488,7 @@ corese-command remote-sparql -q 'http://example.org/myQuery.rq' -e "http://examp cat myQuery.rq | corese-command remote-sparql -e "http://example.org/sparql" ``` -### 6.4. Different Types of Output +### 5.4. Different Types of Output The output can be provided in different ways: @@ -514,7 +504,7 @@ corese-command remote-sparql -q 'SELECT * WHERE {?s ?p ?o}' -e "http://example.o corese-command remote-sparql -q 'SELECT * WHERE {?s ?p ?o}' -e "http://example.org/sparql" -a "application/sparql-results+json" | jq […] ``` -### 6.5. Different Types of Methods +### 5.5. Different Types of Methods In SPARQL 1.1, three different methods are defined for sending a SPARQL query to a SPARQL endpoint: @@ -526,11 +516,11 @@ In SPARQL 1.1, three different methods are defined for sending a SPARQL query to In the command line interface, the `-m` or `--request-method` flags are used to specify the HTTP request method to use. The default value is `GET`. The available options are `GET`, `POST-Encoded`, and `POST-Direct`, corresponding to the GET, POST-URLENCODED, and POST-Direct methods respectively. -### 6.6. Specifying Graphs +### 5.6. Specifying Graphs In SPARQL, the dataset to be queried can be specified using the `FROM` and `FROM NAMED` clauses in the query itself. However, you can also specify the default and named graphs using command line arguments when invoking the SPARQL processor. This can be particularly useful when you want to query multiple graphs without having to specify them within the query text. -#### 6.6.1. Default Graph +#### 5.6.1. Default Graph The default graph can be specified using the `-d` or `--default-graph` option. Each occurrence of this option represents a URI of a default graph. Multiple URIs can be specified by repeating this option. @@ -538,7 +528,7 @@ The default graph can be specified using the `-d` or `--default-graph` option. E corese-command remote-sparql -q 'SELECT * WHERE {?s ?p ?o}' -e "http://example.org/sparql" -d http://example.org/graph1 -d http://example.org/graph2 ``` -#### 6.6.2. Named Graph +#### 5.6.2. Named Graph The named graph can be specified using the `-n` or `--named-graph` option. Each occurrence of this option represents a URI of a named graph. Multiple URIs can be specified by repeating this option. @@ -546,11 +536,11 @@ The named graph can be specified using the `-n` or `--named-graph` option. Each corese-command remote-sparql -q 'SELECT * WHERE {?s ?p ?o}' -e "http://example.org/sparql" -n http://example.org/graph1 -n http://example.org/graph2 ``` -### 6.7. Additional Request Configurations +### 5.7. Additional Request Configurations The following options provide additional configurations for the HTTP request sent to the SPARQL endpoint. These configurations include setting custom headers, controlling redirections, and toggling query validation. -#### 6.7.1. Custom HTTP Headers +#### 5.7.1. Custom HTTP Headers Custom HTTP headers can be added to the request using the `-H` or `--header` option. Each occurrence of this option represents a single header, with the header name and value separated by a colon `:`. @@ -560,7 +550,7 @@ corese-command remote-sparql -q 'SELECT * WHERE {?s ?p ?o}' -e "http://example.o > When the `--accept` option is used alongside the `--header "Accept: …"` option, the request sent to the server will contain a list of MIME types in the `Accept` header. The MIME type specified by the `--accept` option will be placed first in this list, followed by the MIME types specified with the `--header "Accept: …"` option. -#### 6.7.2. Redirection Limit +#### 5.7.2. Redirection Limit The maximum number of HTTP redirections to follow can be specified using the `-r` or `--max-redirection` option. The default value is 5. @@ -568,7 +558,7 @@ The maximum number of HTTP redirections to follow can be specified using the `-r corese-command remote-sparql -q 'SELECT * WHERE {?s ?p ?o}' -e "http://example.org/sparql" --max-redirection 10 ``` -#### 6.7.3. Query Validation +#### 5.7.3. Query Validation By default, the query is validated before being sent to the SPARQL endpoint. This can be disabled using the `-i` or `--ignore-query-validation` option. @@ -578,6 +568,114 @@ corese-command remote-sparql -q 'SELECT * WHERE {?s ?p ?o}' -e "http://example.o This option is useful when you want to send a query that is not valid according to the SPARQL grammar, but is still accepted by the SPARQL endpoint. +## 6. The `canonicalize` Command + +The `canonicalize` command allows you to apply a specific canonicalization algorithm to RDF files. + +### 6.1. Basic Usage + +Use the following syntax to canonicalize an RDF file using the SHA-256 algorithm under the RDFC 1.0 specification: + +```shell +corese-command canonicalize -i myData.ttl -r rdfc-1.0-sha256 +``` + +This example canonicalizes `myData.ttl` to the `rdfc-1.0-sha256` (See [RDFC1.0](https://www.w3.org/TR/rdf-canon/)) canonical algorithm. The `-i` flag specifies the input file, and the `-r` flag specifies the canonical algorithm. + +### 6.2. Different Types of Input + +The input can be provided in different ways: + +- **File Input:** The input file can be specified with the `-i` flag: + +```shell +corese-command canonicalize -i myData.ttl -r rdfc-1.0-sha256 +``` + +- **URL Input:** URLs can be specified with the `-i` flag: + +```shell +corese-command canonicalize -i 'http://example.org/myData.ttl' -r rdfc-1.0-sha256 +``` + +- **Standard Input:** If no input file is specified with `-i`, the program uses the standard input: + +```shell +cat myData.ttl | corese-command canonicalize -r rdfc-1.0-sha256 -if turtle +``` + +> The input file format is automatically detected for file and URL inputs. If +> the input is provided on the standard input or you want to force the input +> format, you can use the `-f` or `-if` flag. Possible values are: +> +> - `rdfxml`, `rdf` or `application/rdf+xml` +> - `turtle`, `ttl` or `text/turtle` +> - `trig`, `application/trig` +> - `jsonld`, `application/ld+json` +> - `ntriples`, `nt` or `application/n-triples` +> - `nquads`, `nq`, or `application/n-quads` +> - `rdfa`, `html` or `application/xhtml+xml` + +### 6.3. Different Types of Output + +The output can be provided in different ways: + +- **File Output:** The output file can be specified with the `-o` flag: + +```shell +corese-command canonicalize -i myData.ttl -r rdfc-1.0-sha256 -o myResult.ttl +``` + +- **Standard Output:** If no output file is specified with `-o`, the program uses the standard output: + +```shell +corese-command canonicalize -i myData.ttl -r rdfc-1.0-sha256 | other-command +``` + +### 6.4. Canonicalization Algorithms + +The following canonicalization algorithms are available: + +- [RDFC 1.0](https://www.w3.org/TR/rdf-canon/) with SHA-256. +- [RDFC 1.0](https://www.w3.org/TR/rdf-canon/) with SHA-384. + +> The output file format can be specified with the `-r` flag. Possible values are: +> +> - `rdfc-1.0` or `rdfc-1.0-sha256` for [RDFC 1.0](https://www.w3.org/TR/rdf-canon/) with SHA-256 +> - `rdfc-1.0-sha384` for [RDFC 1.0](https://www.w3.org/TR/rdf-canon/) with SHA-384 + +### 6.5. Multiple Input Files + +- **Multiple Input:** It's possible to provide multiple input files by repeating the `-i` flag: + +```shell +corese-command canonicalize -i myData1.ttl -i myData2.ttl -r rdfc-1.0-sha256 +``` + +- **Shell Globbing:** It's also possible to use shell globbing to provide multiple input files: + +```shell +corese-command canonicalize -i rdf/*.ttl -r rdfc-1.0-sha256 +``` + +```shell +corese-command canonicalize -i myData?.ttl -r rdfc-1.0-sha256 +``` + +- **Directory Input:** If you want to use a whole directory as input, you can do so. + +```shell +corese-command canonicalize -i ./myDirectory/ -r rdfc-1.0-sha256 +``` + +- **Directory Input Recursive:** If you want to use a whole directory as input, you can do so. The `-R` flag allows you to use the directory recursively. + +```shell +corese-command canonicalize -i ./myDirectory/ -r rdfc-1.0-sha256 -R +``` + +> All input files are loaded into the same dataset. Canonicalization algorithms are applied to the entire dataset. + ## 7. General Options General options are available for all commands. diff --git a/docs/getting started/Getting Started With Corese-library.md b/docs/getting started/Getting Started With Corese-library.md index 6267ef2e9e..9ba39c073c 100644 --- a/docs/getting started/Getting Started With Corese-library.md +++ b/docs/getting started/Getting Started With Corese-library.md @@ -10,28 +10,6 @@ The fourth part shows how to transforme a graph with the extension language [SPA The fifth part details how to apply a set of rules on a graph using the [SPARQL Rule](https://files.inria.fr/corese/doc/rule.html) extension language. Finally, the sixth part describes how to define and use functions with the [LDScript](https://files.inria.fr/corese/doc/ldscript.html) extension language. -1. [Getting Started With Corese-library](#getting-started-with-corese-library) - 1. [1. Installation](#1-installation) - 2. [2. Graph](#2-graph) - 1. [2.1. Build a Graph by program](#21-build-a-graph-by-program) - 2. [2.2. Load Graph from file](#22-load-graph-from-file) - 3. [2.3. Export Graph to file](#23-export-graph-to-file) - 3. [3. SPARQL Protocol and RDF Query Language](#3-sparql-protocol-and-rdf-query-language) - 1. [3.1. SPARQL Select query](#31-sparql-select-query) - 2. [3.2. SPARQL Ask query](#32-sparql-ask-query) - 3. [3.3. SPARQL Construct query](#33-sparql-construct-query) - 4. [3.4. SPARQL Update query](#34-sparql-update-query) - 4. [4. Shapes Constraint Language (SHACL)](#4-shapes-constraint-language-shacl) - 5. [5. SPARQL Template Transformation Language (STTL)](#5-sparql-template-transformation-language-sttl) - 1. [5.1. Transform a graph in a visual HTML format](#51-transform-a-graph-in-a-visual-html-format) - 6. [6. SPARQL Rule](#6-sparql-rule) - 1. [6.1. Load rules from a file](#61-load-rules-from-a-file) - 2. [6.2. OWL Rules](#62-owl-rules) - 7. [7. LDScript](#7-ldscript) - 1. [7.1. Fibonacci function call from Java](#71-fibonacci-function-call-from-java) - 2. [7.2. LDScript in SPARQL](#72-ldscript-in-sparql) - 3. [7.3. Advanced example](#73-advanced-example) - ## 1. Installation Installations instructions are available on the [Corese-Command GitHub repository](https://github.com/Wimmics/corese). @@ -652,6 +630,7 @@ prefix fun: select ?name ?area where { ?city rdf:type ex:city ; + ex:name ?name ; ex:area ?area . filter(?area > fun:toSquareKm(40)) } diff --git a/docs/getting started/Getting Started With Corese-server.md b/docs/getting started/Getting Started With Corese-server.md index 22ae69021c..f3804bb56d 100644 --- a/docs/getting started/Getting Started With Corese-server.md +++ b/docs/getting started/Getting Started With Corese-server.md @@ -2,19 +2,9 @@ This tutorial shows how to use the basic features of the Corese-server framework. -1. [Getting Started With Corese-server](#getting-started-with-corese-server) - 1. [1. Installation](#1-installation) - 2. [2. Load data](#2-load-data) - 1. [2.1. Command line](#21-command-line) - 2. [2.2. Profile file](#22-profile-file) - 3. [3. Create multiple endpoints](#3-create-multiple-endpoints) - 1. [3.1. Multiple endpoints with different data](#31-multiple-endpoints-with-different-data) - 4. [4. Restrict access to external endpoints](#4-restrict-access-to-external-endpoints) - 5. [5. To go deeper](#5-to-go-deeper) - ## 1. Installation -Installations instructions are available on the [Corese-Command GitHub repository](https://github.com/Wimmics/corese). +Installation instructions are available on the [Corese-Command GitHub repository](https://github.com/Wimmics/corese). ## 2. Load data @@ -63,7 +53,7 @@ st:user a st:Server; ``` The keyword `st:user` designates the default endpoint available in . -In this example, we add on the default endpoint the workflow named `<#loadBeatles>` which loads the file "beatles.tll". +In this example, we add on the default endpoint the workflow named `<#loadBeatles>` which loads the file "beatles.ttl". There can be several load in a workflow body. To load Corese-server with a profile, use the options `-lp -pp "profileFile"`. @@ -157,46 +147,58 @@ An example of properties file is available on the [Corese-Command GitHub reposit Here we list only some of the most commonly used properties. ### 4.1. Blank node format + ```properties -BLANK_NODE = _:b +BLANK_NODE = _:b ``` + `BLANK_NODE` specifies the format of blank nodes. The default value is `_:b`. ### 4.2. Loading in the default graph + ```properties -LOAD_IN_DEFAULT_GRAPH = true +LOAD_IN_DEFAULT_GRAPH = true ``` + By default, the data is loaded into the default graph. If `LOAD_IN_DEFAULT_GRAPH` is set to `false`, the data is loaded into a named graph whose name is the path of the file. Note that internally, the default graph of the Corese server is named `http://ns.inria.fr/corese/kgram/default`, or `kg:default`. #### 4.3. RDF* (RDF Star) + ```properties -RDF_STAR = false +RDF_STAR = false ``` -Corese implements a prototype extension for the RDF* specification. `RDF_STAR` enables this extension. + +Corese implements a prototype extension for the RDF* specification. RDF_STAR enables this extension. ### 4.4. OWL utilities ```properties DISABLE_OWL_AUTO_IMPORT = true ``` + By default, when a triple with the predicate `owl:imports` is loaded, the Corese-server automatically loads the ontology specified in the object of the triple. If `DISABLE_OWL_AUTO_IMPORT` is set to `true`, the Corese-server does not load the ontology specified in the object of the triple. ### 4.5. SPARQL engine behavior + ```properties -SPARQL_COMPLIANT = false +SPARQL_COMPLIANT = false ``` + `SPARQL_COMPLIANT` specifies the behavior of the SPARQL engine. If `SPARQL_COMPLIANT` is set to `true`, the SPARQL engine is compliant with the W3C test cases. In practice, this means that the SPARQL engine will consider that two literals are different if they have the same value but different types (E.g: `1` and `"1"^^xsd:integer`). ```properties -REENTRANT_QUERY = false +REENTRANT_QUERY = false ``` -`REENRANT_QUERY` enables the update during a query. This option was implemented in cooperation with the [SPARQL micro-service project](https://github.com/frmichel/sparql-micro-service). + +`REENRANT_QUERY` enables the update during a query. This option was implemented in cooperation with the [SPARQL micro-service project](https://github.com/frmichel/sparql-micro-service). It is equivalent to using `-re` argument. ### 4.6. SPARQL federation behavior + ```properties -SERVICE_BINDING = values +SERVICE_BINDING = values ``` + When binding values between clauses from different endpoints, the Corese-server uses the `SERVICE_BINDING` property to specify the method to use. The default value is `values`. The other possible value is `filter`. For example, with the following data in the local endpoint: @@ -206,6 +208,7 @@ For example, with the following data in the local endpoint: ex:John :name "John" . ``` + if the following query is executed: ```sparql @@ -230,13 +233,18 @@ SELECT * { } ``` +This is equivalent to add `@binding values` in the query. If `SERVICE_BINDING` is defined in the properties file and `@binding` is also defined in the query, then the value of `@binding` in the query is used. + ```properties -SERVICE_SLICE = 20 +SERVICE_SLICE = 20 ``` + `SERVICE_SLICE` specifies the number of bindings to send to a remote endpoint. The default value is `20`. +This is equivalent to add `@slice 20` in the query. If `SERVICE_SLICE` is defined in the properties file and `@slice` is also defined in the query, then the value of `@slice` in the query is used. + ```properties -SERVICE_LIMIT = 1000 +SERVICE_LIMIT = 1000 ``` `SERVICE_LIMIT` specifies the maximum number of results to return from a remote endpoint. The default value is `1000`. In the previous example, the query sent to the remote endpoint should actually be: @@ -250,35 +258,44 @@ SELECT * { LIMIT 1000 } ``` + +This is equivalent to add `@limit 1000` in the query. If `SERVICE_LIMIT` is defined in the properties file and `@limit` is also defined in the query, then the value of `@limit` in the query is used. + Corese will try to obtain the next 1000 results by sending the same query with the `OFFSET` clause. ```properties SERVICE_TIMEOUT = 2000 ``` + `SERVICE_TIMEOUT` specifies the timeout in milliseconds for a remote endpoint. The default value is `10000`. +This is equivalent to add `@timeout 2000` in the query. If `SERVICE_TIMEOUT` is defined in the properties file and `@timeout` is also defined in the query, then the value of `@timeout` in the query is used. + ### 4.7. SPARQL LOAD parameters + ```properties LOAD_LIMIT = 10 ``` + `LOAD_LIMIT` specifies the maximum number of triples to load from a file. This feature is not enabled by default. ```properties LOAD_WITH_PARAMETER = true ``` + `LOAD_WITH_PARAMETER` enables the use of the `LOAD` clause with a parameter. This feature is not enabled by default. ```properties LOAD_FORMAT = text/turtle;q=1.0, application/rdf+xml;q=0.9, application/ld+json;q=0.7; application/json;q=0.6 ``` + ```properties LOAD_FORMAT = application/rdf+xml ``` + If `LOAD_WITH_PARAMETER` is enabled, `LOAD_FORMAT` can be used to specify which mime type should be resquest as format for the loaded data. -## 6. To go deeper +## 5. To go deeper - [Technical documentation](https://files.inria.fr/corese/doc/server.html) - [Storage](https://github.com/Wimmics/corese/blob/master/docs/storage/Configuring%20and%20Connecting%20to%20Different%20Storage%20Systems%20in%20Corese.md#configuring-and-connecting-to-different-storage-systems-in-corese) - - diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000000..f4f9b4538a --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,68 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +if "%1" == "link" goto link + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +REM Link the README.md file to user_guide.md +:link +pushd %SOURCEDIR% + +REM Link the docs/source/sub-directories to source directories to the docs/sub-directories +REM This is necessary to accomodate the sphinx build system + +set "dirs=getting started;rdf4j;corese-python;federation;storage;advanced" + +echo Linking docs directories to source directories + +for %%i in ("%dirs:;=";"%") do ( + echo %%~i + + if not exist "%%~i" ( + mklink /D "%%~i" "..\%%~i" + ) +) + +REM the markdown file for docker is now outside of the docs directory so we need to link it +REM Link the corese-server/build-docker directory to the docker directory +REM TODO: consider moving the docker directory to the docs directory + +echo docker +if not exist "docker" ( + mklink /D "docker" "..\..\corese-server\build-docker" +) + +popd +goto end + +:end +popd diff --git a/docs/rdf4j/RDF4J API in Corese.md b/docs/rdf4j/RDF4J API in Corese.md index 225129583c..f882d2d4cc 100644 --- a/docs/rdf4j/RDF4J API in Corese.md +++ b/docs/rdf4j/RDF4J API in Corese.md @@ -4,18 +4,6 @@ This document details the RDF4J APIs implemented in Corese and how to use and connect the Corese framework with RDF4J. -1. [Corese-library and RDF4J](#corese-library-and-rdf4j) - 1. [1. Implementation of the RDF4J model API in Corese](#1-implementation-of-the-rdf4j-model-api-in-corese) - 1. [1.1. Build a Corese Model by program](#11-build-a-corese-model-by-program) - 2. [1.2. Manipulate a Corese Model](#12-manipulate-a-corese-model) - 3. [1.3. Get a Corese Model from a Corese Graph](#13-get-a-corese-model-from-a-corese-graph) - 4. [1.4. Get a Corese Graph from a Corese Model](#14-get-a-corese-graph-from-a-corese-model) - 5. [1.5. Get a Corese model from any object that implements the RDF4J model API](#15-get-a-corese-model-from-any-object-that-implements-the-rdf4j-model-api) - 6. [1.6. Use external tools compatible with the RDF4J model API](#16-use-external-tools-compatible-with-the-rdf4j-model-api) - 1. [1.6.1. RIO](#161-rio) - 2. [1.6.2. RDF4J SPARQL Engine with a Corese model](#162-rdf4j-sparql-engine-with-a-corese-model) - 2. [2. Implementation of an RDF4J data manager in Corese](#2-implementation-of-an-rdf4j-data-manager-in-corese) - ## 1. Implementation of the RDF4J model API in Corese [RDF4J model API](https://rdf4j.org/documentation/programming/model/) provides the basic building blocks for manipulating RDF data. diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000000..15d8f62b6b --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,12 @@ +sphinx>=7.1.2 +pydata_sphinx_theme>=0.14.4 +babel>=2.9 +packaging>=21.0 +docutils<0.21,>=0.18.1 +Jinja2>=3.0 +sphinx-design==0.5.0 +myst-parser==2.0.0 +sphinxcontrib-mermaid==0.9.2 +breathe==4.35.0 +exhale==0.3.7 +sphinx-copybutton>=0.5.0 \ No newline at end of file diff --git a/docs/source/Doxyfile b/docs/source/Doxyfile new file mode 100644 index 0000000000..d462ddb29e --- /dev/null +++ b/docs/source/Doxyfile @@ -0,0 +1,2851 @@ +# Doxyfile 1.10.0 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a double hash (##) is considered a comment and is placed in +# front of the TAG it is preceding. +# +# All text after a single hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists, items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (\" \"). +# +# Note: +# +# Use doxygen to compare the used configuration file with the template +# configuration file: +# doxygen -x [configFile] +# Use doxygen to compare the used configuration file with the template +# configuration file without replacing the environment variables or CMake type +# replacement variables: +# doxygen -x_noenv [configFile] + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the configuration +# file that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# https://www.gnu.org/software/libiconv/ for the list of possible encodings. +# The default value is: UTF-8. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by +# double-quotes, unless you are using Doxywizard) that should identify the +# project for which the documentation is generated. This name is used in the +# title of most generated pages and in a few other places. +# The default value is: My Project. + +PROJECT_NAME = "corese" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. This +# could be handy for archiving the generated documentation or if some version +# control system is used. + +PROJECT_NUMBER = + +# Using the PROJECT_BRIEF tag one can provide an optional one line description +# for a project that appears at the top of each page and should give viewer a +# quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = + +# With the PROJECT_LOGO tag one can specify a logo or an icon that is included +# in the documentation. The maximum height of the logo should not exceed 55 +# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy +# the logo to the output directory. + +PROJECT_LOGO = + +# With the PROJECT_ICON tag one can specify an icon that is included in the tabs +# when the HTML document is shown. Doxygen will copy the logo to the output +# directory. + +PROJECT_ICON = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path +# into which the generated documentation will be written. If a relative path is +# entered, it will be relative to the location where doxygen was started. If +# left blank the current directory will be used. + +#OUTPUT_DIRECTORY = "./_doxygen" +OUTPUT_DIRECTORY = ../build + +# If the CREATE_SUBDIRS tag is set to YES then doxygen will create up to 4096 +# sub-directories (in 2 levels) under the output directory of each output format +# and will distribute the generated files over these directories. Enabling this +# option can be useful when feeding doxygen a huge amount of source files, where +# putting all generated files in the same directory would otherwise causes +# performance problems for the file system. Adapt CREATE_SUBDIRS_LEVEL to +# control the number of sub-directories. +# The default value is: NO. + +CREATE_SUBDIRS = NO + +# Controls the number of sub-directories that will be created when +# CREATE_SUBDIRS tag is set to YES. Level 0 represents 16 directories, and every +# level increment doubles the number of directories, resulting in 4096 +# directories at level 8 which is the default and also the maximum value. The +# sub-directories are organized in 2 levels, the first level always has a fixed +# number of 16 directories. +# Minimum value: 0, maximum value: 8, default value: 8. +# This tag requires that the tag CREATE_SUBDIRS is set to YES. + +CREATE_SUBDIRS_LEVEL = 8 + +# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII +# characters to appear in the names of generated files. If set to NO, non-ASCII +# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode +# U+3044. +# The default value is: NO. + +ALLOW_UNICODE_NAMES = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Bulgarian, +# Catalan, Chinese, Chinese-Traditional, Croatian, Czech, Danish, Dutch, English +# (United States), Esperanto, Farsi (Persian), Finnish, French, German, Greek, +# Hindi, Hungarian, Indonesian, Italian, Japanese, Japanese-en (Japanese with +# English messages), Korean, Korean-en (Korean with English messages), Latvian, +# Lithuanian, Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, +# Romanian, Russian, Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, +# Swedish, Turkish, Ukrainian and Vietnamese. +# The default value is: English. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member +# descriptions after the members that are listed in the file and class +# documentation (similar to Javadoc). Set to NO to disable this. +# The default value is: YES. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief +# description of a member or function before the detailed description +# +# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. +# The default value is: YES. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator that is +# used to form the text in various listings. Each string in this list, if found +# as the leading text of the brief description, will be stripped from the text +# and the result, after processing the whole list, is used as the annotated +# text. Otherwise, the brief description is used as-is. If left blank, the +# following values are used ($name is automatically replaced with the name of +# the entity):The $name class, The $name widget, The $name file, is, provides, +# specifies, contains, represents, a, an and the. + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# doxygen will generate a detailed section even if there is only a brief +# description. +# The default value is: NO. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. +# The default value is: NO. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path +# before files name in the file list and in the header files. If set to NO the +# shortest path that makes the file name unique will be used +# The default value is: YES. + +FULL_PATH_NAMES = YES + +# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. +# Stripping is only done if one of the specified strings matches the left-hand +# part of the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the path to +# strip. +# +# Note that you can specify absolute paths here, but also relative paths, which +# will be relative from the directory where doxygen is started. +# This tag requires that the tag FULL_PATH_NAMES is set to YES. + +STRIP_FROM_PATH = ".." + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the +# path mentioned in the documentation of a class, which tells the reader which +# header file to include in order to use a class. If left blank only the name of +# the header file containing the class definition is used. Otherwise one should +# specify the list of include paths that are normally passed to the compiler +# using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but +# less readable) file names. This can be useful is your file systems doesn't +# support long names like on DOS, Mac, or CD-ROM. +# The default value is: NO. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the +# first line (until the first dot) of a Javadoc-style comment as the brief +# description. If set to NO, the Javadoc-style will behave just like regular Qt- +# style comments (thus requiring an explicit @brief command for a brief +# description.) +# The default value is: NO. + +JAVADOC_AUTOBRIEF = YES + +# If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line +# such as +# /*************** +# as being the beginning of a Javadoc-style comment "banner". If set to NO, the +# Javadoc-style will behave just like regular comments and it will not be +# interpreted by doxygen. +# The default value is: NO. + +JAVADOC_BANNER = NO + +# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first +# line (until the first dot) of a Qt-style comment as the brief description. If +# set to NO, the Qt-style will behave just like regular Qt-style comments (thus +# requiring an explicit \brief command for a brief description.) +# The default value is: NO. + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a +# multi-line C++ special comment block (i.e. a block of //! or /// comments) as +# a brief description. This used to be the default behavior. The new default is +# to treat a multi-line C++ comment block as a detailed description. Set this +# tag to YES if you prefer the old behavior instead. +# +# Note that setting this tag to YES also means that rational rose comments are +# not recognized any more. +# The default value is: NO. + +MULTILINE_CPP_IS_BRIEF = NO + +# By default Python docstrings are displayed as preformatted text and doxygen's +# special commands cannot be used. By setting PYTHON_DOCSTRING to NO the +# doxygen's special commands can be used and the contents of the docstring +# documentation blocks is shown as doxygen documentation. +# The default value is: YES. + +PYTHON_DOCSTRING = NO + +# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the +# documentation from any documented member that it re-implements. +# The default value is: YES. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new +# page for each member. If set to NO, the documentation of a member will be part +# of the file/class/namespace that contains it. +# The default value is: NO. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen +# uses this value to replace tabs by spaces in code fragments. +# Minimum value: 1, maximum value: 16, default value: 4. + +TAB_SIZE = 4 + +# This tag can be used to specify a number of aliases that act as commands in +# the documentation. An alias has the form: +# name=value +# For example adding +# "sideeffect=@par Side Effects:^^" +# will allow you to put the command \sideeffect (or @sideeffect) in the +# documentation, which will result in a user-defined paragraph with heading +# "Side Effects:". Note that you cannot put \n's in the value part of an alias +# to insert newlines (in the resulting output). You can put ^^ in the value part +# of an alias to insert a newline as if a physical newline was in the original +# file. When you need a literal { or } or , in the value part of an alias you +# have to escape them by means of a backslash (\), this can lead to conflicts +# with the commands \{ and \} for these it is advised to use the version @{ and +# @} or use a double escape (\\{ and \\}) + +ALIASES = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources +# only. Doxygen will then generate output that is more tailored for C. For +# instance, some of the names that are used will be different. The list of all +# members will be omitted, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or +# Python sources only. Doxygen will then generate output that is more tailored +# for that language. For instance, namespaces will be presented as packages, +# qualified scopes will look different, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_JAVA = YES + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources. Doxygen will then generate output that is tailored for Fortran. +# The default value is: NO. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for VHDL. +# The default value is: NO. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice +# sources only. Doxygen will then generate output that is more tailored for that +# language. For instance, namespaces will be presented as modules, types will be +# separated into more groups, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_SLICE = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given +# extension. Doxygen has a built-in mapping, but you can override or extend it +# using this tag. The format is ext=language, where ext is a file extension, and +# language is one of the parsers supported by doxygen: IDL, Java, JavaScript, +# Csharp (C#), C, C++, Lex, D, PHP, md (Markdown), Objective-C, Python, Slice, +# VHDL, Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: +# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser +# tries to guess whether the code is fixed or free formatted code, this is the +# default for Fortran type files). For instance to make doxygen treat .inc files +# as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. +# +# Note: For files without extension you can use no_extension as a placeholder. +# +# Note that for custom extensions you also need to set FILE_PATTERNS otherwise +# the files are not read by doxygen. When specifying no_extension you should add +# * to the FILE_PATTERNS. +# +# Note see also the list of default file extension mappings. + +EXTENSION_MAPPING = + +# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments +# according to the Markdown format, which allows for more readable +# documentation. See https://daringfireball.net/projects/markdown/ for details. +# The output of markdown processing is further processed by doxygen, so you can +# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in +# case of backward compatibilities issues. +# The default value is: YES. + +MARKDOWN_SUPPORT = YES + +# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up +# to that level are automatically included in the table of contents, even if +# they do not have an id attribute. +# Note: This feature currently applies only to Markdown headings. +# Minimum value: 0, maximum value: 99, default value: 5. +# This tag requires that the tag MARKDOWN_SUPPORT is set to YES. + +TOC_INCLUDE_HEADINGS = 5 + +# The MARKDOWN_ID_STYLE tag can be used to specify the algorithm used to +# generate identifiers for the Markdown headings. Note: Every identifier is +# unique. +# Possible values are: DOXYGEN use a fixed 'autotoc_md' string followed by a +# sequence number starting at 0 and GITHUB use the lower case version of title +# with any whitespace replaced by '-' and punctuation characters removed. +# The default value is: DOXYGEN. +# This tag requires that the tag MARKDOWN_SUPPORT is set to YES. + +MARKDOWN_ID_STYLE = DOXYGEN + +# When enabled doxygen tries to link words that correspond to documented +# classes, or namespaces to their corresponding documentation. Such a link can +# be prevented in individual cases by putting a % sign in front of the word or +# globally by setting AUTOLINK_SUPPORT to NO. +# The default value is: YES. + +AUTOLINK_SUPPORT = YES + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should set this +# tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); +# versus func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. +# The default value is: NO. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. +# The default value is: NO. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: +# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen +# will parse them like normal C++ but will assume all classes use public instead +# of private inheritance when no explicit protection keyword is present. +# The default value is: NO. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate +# getter and setter methods for a property. Setting this option to YES will make +# doxygen to replace the get and set methods by a property in the documentation. +# This will only work if the methods are indeed getting or setting a simple +# type. If this is not the case, or you want to show the methods anyway, you +# should set this option to NO. +# The default value is: YES. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. +# The default value is: NO. + +DISTRIBUTE_GROUP_DOC = NO + +# If one adds a struct or class to a group and this option is enabled, then also +# any nested class or struct is added to the same group. By default this option +# is disabled and one has to add nested compounds explicitly via \ingroup. +# The default value is: NO. + +GROUP_NESTED_COMPOUNDS = NO + +# Set the SUBGROUPING tag to YES to allow class member groups of the same type +# (for instance a group of public functions) to be put as a subgroup of that +# type (e.g. under the Public Functions section). Set it to NO to prevent +# subgrouping. Alternatively, this can be done per class using the +# \nosubgrouping command. +# The default value is: YES. + +SUBGROUPING = YES + +# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions +# are shown inside the group in which they are included (e.g. using \ingroup) +# instead of on a separate page (for HTML and Man pages) or section (for LaTeX +# and RTF). +# +# Note that this feature does not work in combination with +# SEPARATE_MEMBER_PAGES. +# The default value is: NO. + +INLINE_GROUPED_CLASSES = NO + +# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions +# with only public data fields or simple typedef fields will be shown inline in +# the documentation of the scope in which they are defined (i.e. file, +# namespace, or group documentation), provided this scope is documented. If set +# to NO, structs, classes, and unions are shown on a separate page (for HTML and +# Man pages) or section (for LaTeX and RTF). +# The default value is: NO. + +INLINE_SIMPLE_STRUCTS = NO + +# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or +# enum is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically be +# useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. +# The default value is: NO. + +TYPEDEF_HIDES_STRUCT = NO + +# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This +# cache is used to resolve symbols given their name and scope. Since this can be +# an expensive process and often the same symbol appears multiple times in the +# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small +# doxygen will become slower. If the cache is too large, memory is wasted. The +# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range +# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 +# symbols. At the end of a run doxygen will report the cache usage and suggest +# the optimal cache size from a speed point of view. +# Minimum value: 0, maximum value: 9, default value: 0. + +LOOKUP_CACHE_SIZE = 0 + +# The NUM_PROC_THREADS specifies the number of threads doxygen is allowed to use +# during processing. When set to 0 doxygen will based this on the number of +# cores available in the system. You can set it explicitly to a value larger +# than 0 to get more control over the balance between CPU load and processing +# speed. At this moment only the input processing can be done using multiple +# threads. Since this is still an experimental feature the default is set to 1, +# which effectively disables parallel processing. Please report any issues you +# encounter. Generating dot graphs in parallel is controlled by the +# DOT_NUM_THREADS setting. +# Minimum value: 0, maximum value: 32, default value: 1. + +NUM_PROC_THREADS = 1 + +# If the TIMESTAMP tag is set different from NO then each generated page will +# contain the date or date and time when the page was generated. Setting this to +# NO can help when comparing the output of multiple runs. +# Possible values are: YES, NO, DATETIME and DATE. +# The default value is: NO. + +TIMESTAMP = NO + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in +# documentation are documented, even if no documentation was available. Private +# class members and static file members will be hidden unless the +# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. +# Note: This will also disable the warnings about undocumented members that are +# normally produced when WARNINGS is set to YES. +# The default value is: NO. + +EXTRACT_ALL = NO + +# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will +# be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual +# methods of a class will be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIV_VIRTUAL = NO + +# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal +# scope will be included in the documentation. +# The default value is: NO. + +EXTRACT_PACKAGE = NO + +# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be +# included in the documentation. +# The default value is: NO. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO, +# only classes defined in header files are included. Does not have any effect +# for Java sources. +# The default value is: YES. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. If set to YES, local methods, +# which are defined in the implementation section but not in the interface are +# included in the documentation. If set to NO, only methods in the interface are +# included. +# The default value is: NO. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base name of +# the file that contains the anonymous namespace. By default anonymous namespace +# are hidden. +# The default value is: NO. + +EXTRACT_ANON_NSPACES = NO + +# If this flag is set to YES, the name of an unnamed parameter in a declaration +# will be determined by the corresponding definition. By default unnamed +# parameters remain unnamed in the output. +# The default value is: YES. + +RESOLVE_UNNAMED_PARAMS = YES + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all +# undocumented members inside documented classes or files. If set to NO these +# members will be included in the various overviews, but no documentation +# section is generated. This option has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. If set +# to NO, these classes will be included in the various overviews. This option +# will also hide undocumented C++ concepts if enabled. This option has no effect +# if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend +# declarations. If set to NO, these declarations will be included in the +# documentation. +# The default value is: NO. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any +# documentation blocks found inside the body of a function. If set to NO, these +# blocks will be appended to the function's detailed documentation block. +# The default value is: NO. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation that is typed after a +# \internal command is included. If the tag is set to NO then the documentation +# will be excluded. Set it to YES to include the internal documentation. +# The default value is: NO. + +INTERNAL_DOCS = NO + +# With the correct setting of option CASE_SENSE_NAMES doxygen will better be +# able to match the capabilities of the underlying filesystem. In case the +# filesystem is case sensitive (i.e. it supports files in the same directory +# whose names only differ in casing), the option must be set to YES to properly +# deal with such files in case they appear in the input. For filesystems that +# are not case sensitive the option should be set to NO to properly deal with +# output files written for symbols that only differ in casing, such as for two +# classes, one named CLASS and the other named Class, and to also support +# references to files without having to specify the exact matching casing. On +# Windows (including Cygwin) and MacOS, users should typically set this option +# to NO, whereas on Linux or other Unix flavors it should typically be set to +# YES. +# Possible values are: SYSTEM, NO and YES. +# The default value is: SYSTEM. + +CASE_SENSE_NAMES = SYSTEM + +# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with +# their full class and namespace scopes in the documentation. If set to YES, the +# scope will be hidden. +# The default value is: NO. + +HIDE_SCOPE_NAMES = YES + +# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will +# append additional text to a page's title, such as Class Reference. If set to +# YES the compound reference will be hidden. +# The default value is: NO. + +HIDE_COMPOUND_REFERENCE= NO + +# If the SHOW_HEADERFILE tag is set to YES then the documentation for a class +# will show which file needs to be included to use the class. +# The default value is: YES. + +SHOW_HEADERFILE = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of +# the files that are included by a file in the documentation of that file. +# The default value is: YES. + +SHOW_INCLUDE_FILES = NO + +# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each +# grouped member an include statement to the documentation, telling the reader +# which file to include in order to use the member. +# The default value is: NO. + +SHOW_GROUPED_MEMB_INC = NO + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include +# files with double quotes in the documentation rather than with sharp brackets. +# The default value is: NO. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the +# documentation for inline members. +# The default value is: YES. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the +# (detailed) documentation of file and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. +# The default value is: YES. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief +# descriptions of file, namespace and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. Note that +# this will also influence the order of the classes in the class list. +# The default value is: NO. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the +# (brief and detailed) documentation of class members so that constructors and +# destructors are listed first. If set to NO the constructors will appear in the +# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. +# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief +# member documentation. +# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting +# detailed member documentation. +# The default value is: NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy +# of group names into alphabetical order. If set to NO the group names will +# appear in their defined order. +# The default value is: NO. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by +# fully-qualified names, including namespaces. If set to NO, the class list will +# be sorted only by class name, not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the alphabetical +# list. +# The default value is: NO. + +SORT_BY_SCOPE_NAME = NO + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper +# type resolution of all parameters of a function it will reject a match between +# the prototype and the implementation of a member function even if there is +# only one candidate or it is obvious which candidate to choose by doing a +# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still +# accept a match between prototype and implementation in such cases. +# The default value is: NO. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo +# list. This list is created by putting \todo commands in the documentation. +# The default value is: YES. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test +# list. This list is created by putting \test commands in the documentation. +# The default value is: YES. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug +# list. This list is created by putting \bug commands in the documentation. +# The default value is: YES. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) +# the deprecated list. This list is created by putting \deprecated commands in +# the documentation. +# The default value is: YES. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional documentation +# sections, marked by \if ... \endif and \cond +# ... \endcond blocks. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the +# initial value of a variable or macro / define can have for it to appear in the +# documentation. If the initializer consists of more lines than specified here +# it will be hidden. Use a value of 0 to hide initializers completely. The +# appearance of the value of individual variables and macros / defines can be +# controlled using \showinitializer or \hideinitializer command in the +# documentation regardless of this setting. +# Minimum value: 0, maximum value: 10000, default value: 30. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at +# the bottom of the documentation of classes and structs. If set to YES, the +# list will mention the files that were used to generate the documentation. +# The default value is: YES. + +SHOW_USED_FILES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This +# will remove the Files entry from the Quick Index and from the Folder Tree View +# (if specified). +# The default value is: YES. + +SHOW_FILES = NO + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces +# page. This will remove the Namespaces entry from the Quick Index and from the +# Folder Tree View (if specified). +# The default value is: YES. + +SHOW_NAMESPACES = NO + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command command input-file, where command is the value of the +# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided +# by doxygen. Whatever the program writes to standard output is used as the file +# version. For an example see the documentation. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. To create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. You can +# optionally specify a file name after the option, if omitted DoxygenLayout.xml +# will be used as the name of the layout file. See also section "Changing the +# layout of pages" for information. +# +# Note that if you run doxygen from a directory containing a file called +# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE +# tag is left empty. + +LAYOUT_FILE = + +# The CITE_BIB_FILES tag can be used to specify one or more bib files containing +# the reference definitions. This must be a list of .bib files. The .bib +# extension is automatically appended if omitted. This requires the bibtex tool +# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. +# For LaTeX the style of the bibliography can be controlled using +# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the +# search path. See also \cite for info how to create references. + +CITE_BIB_FILES = + +#--------------------------------------------------------------------------- +# Configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated to +# standard output by doxygen. If QUIET is set to YES this implies that the +# messages are off. +# The default value is: NO. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES +# this implies that the warnings are on. +# +# Tip: Turn warnings on while writing the documentation. +# The default value is: YES. + +WARNINGS = YES + +# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate +# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: YES. + +WARN_IF_UNDOCUMENTED = NO + +# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as documenting some parameters in +# a documented function twice, or documenting parameters that don't exist or +# using markup commands wrongly. +# The default value is: YES. + +WARN_IF_DOC_ERROR = YES + +# If WARN_IF_INCOMPLETE_DOC is set to YES, doxygen will warn about incomplete +# function parameter documentation. If set to NO, doxygen will accept that some +# parameters have no documentation without warning. +# The default value is: YES. + +WARN_IF_INCOMPLETE_DOC = YES + +# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that +# are documented, but have no documentation for their parameters or return +# value. If set to NO, doxygen will only warn about wrong parameter +# documentation, but not about the absence of documentation. If EXTRACT_ALL is +# set to YES then this flag will automatically be disabled. See also +# WARN_IF_INCOMPLETE_DOC +# The default value is: NO. + +WARN_NO_PARAMDOC = NO + +# If WARN_IF_UNDOC_ENUM_VAL option is set to YES, doxygen will warn about +# undocumented enumeration values. If set to NO, doxygen will accept +# undocumented enumeration values. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: NO. + +WARN_IF_UNDOC_ENUM_VAL = NO + +# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when +# a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS +# then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but +# at the end of the doxygen process doxygen will return with a non-zero status. +# If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS_PRINT then doxygen behaves +# like FAIL_ON_WARNINGS but in case no WARN_LOGFILE is defined doxygen will not +# write the warning messages in between other messages but write them at the end +# of a run, in case a WARN_LOGFILE is defined the warning messages will be +# besides being in the defined file also be shown at the end of a run, unless +# the WARN_LOGFILE is defined as - i.e. standard output (stdout) in that case +# the behavior will remain as with the setting FAIL_ON_WARNINGS. +# Possible values are: NO, YES, FAIL_ON_WARNINGS and FAIL_ON_WARNINGS_PRINT. +# The default value is: NO. + +WARN_AS_ERROR = NO + +# The WARN_FORMAT tag determines the format of the warning messages that doxygen +# can produce. The string should contain the $file, $line, and $text tags, which +# will be replaced by the file and line number from which the warning originated +# and the warning text. Optionally the format may contain $version, which will +# be replaced by the version of the file (if it could be obtained via +# FILE_VERSION_FILTER) +# See also: WARN_LINE_FORMAT +# The default value is: $file:$line: $text. + +WARN_FORMAT = "$file:$line: $text" + +# In the $text part of the WARN_FORMAT command it is possible that a reference +# to a more specific place is given. To make it easier to jump to this place +# (outside of doxygen) the user can define a custom "cut" / "paste" string. +# Example: +# WARN_LINE_FORMAT = "'vi $file +$line'" +# See also: WARN_FORMAT +# The default value is: at line $line of file $file. + +WARN_LINE_FORMAT = "at line $line of file $file" + +# The WARN_LOGFILE tag can be used to specify a file to which warning and error +# messages should be written. If left blank the output is written to standard +# error (stderr). In case the file specified cannot be opened for writing the +# warning and error messages are written to standard error. When as file - is +# specified the warning and error messages are written to standard output +# (stdout). + +WARN_LOGFILE = + +#--------------------------------------------------------------------------- +# Configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag is used to specify the files and/or directories that contain +# documented source files. You may enter file names like myfile.cpp or +# directories like /usr/src/myproject. Separate the files or directories with +# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING +# Note: If this tag is empty the current directory is searched. + +INPUT = ../../corese-core/src/main/java/fr/inria/corese/core/Graph.java \ + ../../corese-core/src/main/java/fr/inria/corese/core/load/Load.java \ + ../../corese-core/src/main/java/fr/inria/corese/core/transform/Transformer.java \ + ../../corese-core/src/main/java/fr/inria/corese/core/query/QueryProcess.java \ + ../../corese-core/src/main/java/fr/inria/corese/core/print/ResultFormat.java \ + ../../corese-core/src/main/java/fr/inria/corese/core/shacl/Shacl.java \ + ../../corese-core/src/main/java/fr/inria/corese/core/api/Loader.java \ +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses +# libiconv (or the iconv built into libc) for the transcoding. See the libiconv +# documentation (see: +# https://www.gnu.org/software/libiconv/) for the list of possible encodings. +# See also: INPUT_FILE_ENCODING +# The default value is: UTF-8. + +INPUT_ENCODING = UTF-8 + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses The INPUT_FILE_ENCODING tag can be used to specify +# character encoding on a per file pattern basis. Doxygen will compare the file +# name with each pattern and apply the encoding instead of the default +# INPUT_ENCODING) if there is a match. The character encodings are a list of the +# form: pattern=encoding (like *.php=ISO-8859-1). See cfg_input_encoding +# "INPUT_ENCODING" for further information on supported encodings. + +INPUT_FILE_ENCODING = + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and +# *.h) to filter out the source-files in the directories. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# read by doxygen. +# +# Note the list of default checked file patterns might differ from the list of +# default file extension mappings. +# +# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cxxm, +# *.cpp, *.cppm, *.ccm, *.c++, *.c++m, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, +# *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, *.h++, *.ixx, *.l, *.cs, *.d, +# *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, *.md, *.mm, *.dox (to +# be provided as doxygen C comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, +# *.f18, *.f, *.for, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice. + +FILE_PATTERNS = *.java + +# The RECURSIVE tag can be used to specify whether or not subdirectories should +# be searched for input files as well. +# The default value is: NO. + +RECURSIVE = NO + +# The EXCLUDE tag can be used to specify files and/or directories that should be +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. +# +# Note that relative paths are relative to the directory from which doxygen is +# run. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. +# The default value is: NO. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# ANamespace::AClass, ANamespace::*Test + +EXCLUDE_SYMBOLS = java::lang fr::inria::corese::sparql* + +# The EXAMPLE_PATH tag can be used to specify one or more files or directories +# that contain example code fragments that are included (see the \include +# command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank all +# files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude commands +# irrespective of the value of the RECURSIVE tag. +# The default value is: NO. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or directories +# that contain images that are to be included in the documentation (see the +# \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command: +# +# +# +# where is the value of the INPUT_FILTER tag, and is the +# name of an input file. Doxygen will then use the output that the filter +# program writes to standard output. If FILTER_PATTERNS is specified, this tag +# will be ignored. +# +# Note that the filter must not add or remove lines; it is applied before the +# code is scanned, but not when the output code is generated. If lines are added +# or removed, the anchors will not be placed correctly. +# +# Note that doxygen will use the data processed and written to standard output +# for further processing, therefore nothing else, like debug statements or used +# commands (so in case of a Windows batch file always use @echo OFF), should be +# written to standard output. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: pattern=filter +# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how +# filters are used. If the FILTER_PATTERNS tag is empty or if none of the +# patterns match the file name, INPUT_FILTER is applied. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will also be used to filter the input files that are used for +# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). +# The default value is: NO. + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and +# it is also possible to disable source filtering for a specific pattern using +# *.ext= (so without naming a filter). +# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. + +FILTER_SOURCE_PATTERNS = + +# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that +# is part of the input, its contents will be placed on the main page +# (index.html). This can be useful if you have a project on for instance GitHub +# and want to reuse the introduction page also for the doxygen output. + +USE_MDFILE_AS_MAINPAGE = + +# The Fortran standard specifies that for fixed formatted Fortran code all +# characters from position 72 are to be considered as comment. A common +# extension is to allow longer lines before the automatic comment starts. The +# setting FORTRAN_COMMENT_AFTER will also make it possible that longer lines can +# be processed before the automatic comment starts. +# Minimum value: 7, maximum value: 10000, default value: 72. + +FORTRAN_COMMENT_AFTER = 72 + +#--------------------------------------------------------------------------- +# Configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will be +# generated. Documented entities will be cross-referenced with these sources. +# +# Note: To get rid of all source code in the generated output, make sure that +# also VERBATIM_HEADERS is set to NO. +# The default value is: NO. + +SOURCE_BROWSER = NO + +# Setting the INLINE_SOURCES tag to YES will include the body of functions, +# multi-line macros, enums or list initialized variables directly into the +# documentation. +# The default value is: NO. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any +# special comment blocks from generated source code fragments. Normal C, C++ and +# Fortran comments will always remain visible. +# The default value is: YES. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES then for each documented +# entity all documented functions referencing it will be listed. +# The default value is: NO. + +REFERENCED_BY_RELATION = NO + +# If the REFERENCES_RELATION tag is set to YES then for each documented function +# all documented entities called/used by that function will be listed. +# The default value is: NO. + +REFERENCES_RELATION = NO + +# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set +# to YES then the hyperlinks from functions in REFERENCES_RELATION and +# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will +# link to the documentation. +# The default value is: YES. + +REFERENCES_LINK_SOURCE = YES + +# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the +# source code will show a tooltip with additional information such as prototype, +# brief description and links to the definition and documentation. Since this +# will make the HTML file larger and loading of large files a bit slower, you +# can opt to disable this feature. +# The default value is: YES. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +SOURCE_TOOLTIPS = YES + +# If the USE_HTAGS tag is set to YES then the references to source code will +# point to the HTML generated by the htags(1) tool instead of doxygen built-in +# source browser. The htags tool is part of GNU's global source tagging system +# (see https://www.gnu.org/software/global/global.html). You will need version +# 4.8.6 or higher. +# +# To use it do the following: +# - Install the latest version of global +# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file +# - Make sure the INPUT points to the root of the source tree +# - Run doxygen as normal +# +# Doxygen will invoke htags (and that will in turn invoke gtags), so these +# tools must be available from the command line (i.e. in the search path). +# +# The result: instead of the source browser generated by doxygen, the links to +# source code will now point to the output of htags. +# The default value is: NO. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a +# verbatim copy of the header file for each class for which an include is +# specified. Set to NO to disable this. +# See also: Section \class. +# The default value is: YES. + +VERBATIM_HEADERS = YES + +# If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the +# clang parser (see: +# http://clang.llvm.org/) for more accurate parsing at the cost of reduced +# performance. This can be particularly helpful with template rich C++ code for +# which doxygen's built-in parser lacks the necessary type information. +# Note: The availability of this option depends on whether or not doxygen was +# generated with the -Duse_libclang=ON option for CMake. +# The default value is: NO. + +CLANG_ASSISTED_PARSING = NO + +# If the CLANG_ASSISTED_PARSING tag is set to YES and the CLANG_ADD_INC_PATHS +# tag is set to YES then doxygen will add the directory of each input to the +# include path. +# The default value is: YES. +# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. + +CLANG_ADD_INC_PATHS = NO + +# If clang assisted parsing is enabled you can provide the compiler with command +# line options that you would normally use when invoking the compiler. Note that +# the include paths will already be set by doxygen for the files and directories +# specified with INPUT and INCLUDE_PATH. +# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. + +CLANG_OPTIONS = + +# If clang assisted parsing is enabled you can provide the clang parser with the +# path to the directory containing a file called compile_commands.json. This +# file is the compilation database (see: +# http://clang.llvm.org/docs/HowToSetupToolingForLLVM.html) containing the +# options used when the source files were built. This is equivalent to +# specifying the -p option to a clang tool, such as clang-check. These options +# will then be passed to the parser. Any options specified with CLANG_OPTIONS +# will be added as well. +# Note: The availability of this option depends on whether or not doxygen was +# generated with the -Duse_libclang=ON option for CMake. + +CLANG_DATABASE_PATH = + +#--------------------------------------------------------------------------- +# Configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all +# compounds will be generated. Enable this if the project contains a lot of +# classes, structs, unions or interfaces. +# The default value is: YES. + +ALPHABETICAL_INDEX = YES + +# The IGNORE_PREFIX tag can be used to specify a prefix (or a list of prefixes) +# that should be ignored while generating the index headers. The IGNORE_PREFIX +# tag works for classes, function and member names. The entity will be placed in +# the alphabetical list under the first letter of the entity name that remains +# after removing the prefix. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output +# The default value is: YES. + +GENERATE_HTML = NO + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_OUTPUT = doxygen_html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each +# generated HTML page (for example: .htm, .php, .asp). +# The default value is: .html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a user-defined HTML header file for +# each generated HTML page. If the tag is left blank doxygen will generate a +# standard header. +# +# To get valid HTML the header file that includes any scripts and style sheets +# that doxygen needs, which is dependent on the configuration options used (e.g. +# the setting GENERATE_TREEVIEW). It is highly recommended to start with a +# default header using +# doxygen -w html new_header.html new_footer.html new_stylesheet.css +# YourConfigFile +# and then modify the file new_header.html. See also section "Doxygen usage" +# for information on how to generate the default header that doxygen normally +# uses. +# Note: The header is subject to change so you typically have to regenerate the +# default header when upgrading to a newer version of doxygen. For a description +# of the possible markers and block names see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each +# generated HTML page. If the tag is left blank doxygen will generate a standard +# footer. See HTML_HEADER for more information on how to generate a default +# footer and what special commands can be used inside the footer. See also +# section "Doxygen usage" for information on how to generate the default footer +# that doxygen normally uses. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FOOTER = + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style +# sheet that is used by each HTML page. It can be used to fine-tune the look of +# the HTML output. If left blank doxygen will generate a default style sheet. +# See also section "Doxygen usage" for information on how to generate the style +# sheet that doxygen normally uses. +# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as +# it is more robust and this tag (HTML_STYLESHEET) will in the future become +# obsolete. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_STYLESHEET = + +# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined +# cascading style sheets that are included after the standard style sheets +# created by doxygen. Using this option one can overrule certain style aspects. +# This is preferred over using HTML_STYLESHEET since it does not replace the +# standard style sheet and is therefore more robust against future updates. +# Doxygen will copy the style sheet files to the output directory. +# Note: The order of the extra style sheet files is of importance (e.g. the last +# style sheet in the list overrules the setting of the previous ones in the +# list). +# Note: Since the styling of scrollbars can currently not be overruled in +# Webkit/Chromium, the styling will be left out of the default doxygen.css if +# one or more extra stylesheets have been specified. So if scrollbar +# customization is desired it has to be added explicitly. For an example see the +# documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_STYLESHEET = + +# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the HTML output directory. Note +# that these files will be copied to the base HTML output directory. Use the +# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these +# files. In the HTML_STYLESHEET file, use the file name only. Also note that the +# files will be copied as-is; there are no commands or markers available. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_FILES = + +# The HTML_COLORSTYLE tag can be used to specify if the generated HTML output +# should be rendered with a dark or light theme. +# Possible values are: LIGHT always generate light mode output, DARK always +# generate dark mode output, AUTO_LIGHT automatically set the mode according to +# the user preference, use light mode if no preference is set (the default), +# AUTO_DARK automatically set the mode according to the user preference, use +# dark mode if no preference is set and TOGGLE allow to user to switch between +# light and dark mode via a button. +# The default value is: AUTO_LIGHT. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE = AUTO_LIGHT + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen +# will adjust the colors in the style sheet and background images according to +# this color. Hue is specified as an angle on a color-wheel, see +# https://en.wikipedia.org/wiki/Hue for more information. For instance the value +# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 +# purple, and 360 is red again. +# Minimum value: 0, maximum value: 359, default value: 220. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors +# in the HTML output. For a value of 0 the output will use gray-scales only. A +# value of 255 will produce the most vivid colors. +# Minimum value: 0, maximum value: 255, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the +# luminance component of the colors in the HTML output. Values below 100 +# gradually make the output lighter, whereas values above 100 make the output +# darker. The value divided by 100 is the actual gamma applied, so 80 represents +# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not +# change the gamma. +# Minimum value: 40, maximum value: 240, default value: 80. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML +# documentation will contain a main index with vertical navigation menus that +# are dynamically created via JavaScript. If disabled, the navigation index will +# consists of multiple levels of tabs that are statically embedded in every HTML +# page. Disable this option to support browsers that do not have JavaScript, +# like the Qt help browser. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_MENUS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_SECTIONS = NO + +# If the HTML_CODE_FOLDING tag is set to YES then classes and functions can be +# dynamically folded and expanded in the generated HTML source code. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_CODE_FOLDING = YES + +# If the HTML_COPY_CLIPBOARD tag is set to YES then doxygen will show an icon in +# the top right corner of code and text fragments that allows the user to copy +# its content to the clipboard. Note this only works if supported by the browser +# and the web page is served via a secure context (see: +# https://www.w3.org/TR/secure-contexts/), i.e. using the https: or file: +# protocol. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COPY_CLIPBOARD = YES + +# Doxygen stores a couple of settings persistently in the browser (via e.g. +# cookies). By default these settings apply to all HTML pages generated by +# doxygen across all projects. The HTML_PROJECT_COOKIE tag can be used to store +# the settings under a project specific key, such that the user preferences will +# be stored separately. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_PROJECT_COOKIE = + +# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries +# shown in the various tree structured indices initially; the user can expand +# and collapse entries dynamically later on. Doxygen will expand the tree to +# such a level that at most the specified number of entries are visible (unless +# a fully collapsed tree already exceeds this amount). So setting the number of +# entries 1 will produce a full collapsed tree by default. 0 is a special value +# representing an infinite number of entries and will result in a full expanded +# tree by default. +# Minimum value: 0, maximum value: 9999, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_INDEX_NUM_ENTRIES = 100 + +# If the GENERATE_DOCSET tag is set to YES, additional index files will be +# generated that can be used as input for Apple's Xcode 3 integrated development +# environment (see: +# https://developer.apple.com/xcode/), introduced with OSX 10.5 (Leopard). To +# create a documentation set, doxygen will generate a Makefile in the HTML +# output directory. Running make will produce the docset in that directory and +# running make install will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at +# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy +# genXcode/_index.html for more information. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_DOCSET = NO + +# This tag determines the name of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# The default value is: Doxygen generated docs. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# This tag determines the URL of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDURL = + +# This tag specifies a string that should uniquely identify the documentation +# set bundle. This should be a reverse domain-name style string, e.g. +# com.mycompany.MyDocSet. Doxygen will append .docset to the name. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. +# The default value is: org.doxygen.Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. +# The default value is: Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three +# additional HTML index files: index.hhp, index.hhc, and index.hhk. The +# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop +# on Windows. In the beginning of 2021 Microsoft took the original page, with +# a.o. the download links, offline the HTML help workshop was already many years +# in maintenance mode). You can download the HTML help workshop from the web +# archives at Installation executable (see: +# http://web.archive.org/web/20160201063255/http://download.microsoft.com/downlo +# ad/0/A/9/0A939EF6-E31C-430F-A3DF-DFAE7960D564/htmlhelp.exe). +# +# The HTML Help Workshop contains a compiler that can convert all HTML output +# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML +# files are now used as the Windows 98 help format, and will replace the old +# Windows help format (.hlp) on all Windows platforms in the future. Compressed +# HTML files also contain an index, a table of contents, and you can search for +# words in the documentation. The HTML workshop also contains a viewer for +# compressed HTML files. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_HTMLHELP = NO + +# The CHM_FILE tag can be used to specify the file name of the resulting .chm +# file. You can add a path in front of the file if the result should not be +# written to the html output directory. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_FILE = + +# The HHC_LOCATION tag can be used to specify the location (absolute path +# including file name) of the HTML help compiler (hhc.exe). If non-empty, +# doxygen will try to run the HTML help compiler on the generated index.hhp. +# The file has to be specified with full path. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +HHC_LOCATION = + +# The GENERATE_CHI flag controls if a separate .chi index file is generated +# (YES) or that it should be included in the main .chm file (NO). +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +GENERATE_CHI = NO + +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) +# and project file content. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_INDEX_ENCODING = + +# The BINARY_TOC flag controls whether a binary table of contents is generated +# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it +# enables the Previous and Next buttons. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members to +# the table of contents of the HTML help documentation and to the tree view. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +TOC_EXPAND = NO + +# The SITEMAP_URL tag is used to specify the full URL of the place where the +# generated documentation will be placed on the server by the user during the +# deployment of the documentation. The generated sitemap is called sitemap.xml +# and placed on the directory specified by HTML_OUTPUT. In case no SITEMAP_URL +# is specified no sitemap is generated. For information about the sitemap +# protocol see https://www.sitemaps.org +# This tag requires that the tag GENERATE_HTML is set to YES. + +SITEMAP_URL = + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that +# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help +# (.qch) of the generated HTML documentation. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify +# the file name of the resulting .qch file. The path specified is relative to +# the HTML output folder. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help +# Project output. For more information please see Qt Help Project / Namespace +# (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_NAMESPACE = org.doxygen.Project + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt +# Help Project output. For more information please see Qt Help Project / Virtual +# Folders (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-folders). +# The default value is: doc. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_VIRTUAL_FOLDER = doc + +# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom +# filter to add. For more information please see Qt Help Project / Custom +# Filters (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see Qt Help Project / Custom +# Filters (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's filter section matches. Qt Help Project / Filter Attributes (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_SECT_FILTER_ATTRS = + +# The QHG_LOCATION tag can be used to specify the location (absolute path +# including file name) of Qt's qhelpgenerator. If non-empty doxygen will try to +# run qhelpgenerator on the generated .qhp file. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be +# generated, together with the HTML files, they form an Eclipse help plugin. To +# install this plugin and make it available under the help contents menu in +# Eclipse, the contents of the directory containing the HTML and XML files needs +# to be copied into the plugins directory of eclipse. The name of the directory +# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. +# After copying Eclipse needs to be restarted before the help appears. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the Eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have this +# name. Each documentation set should have its own identifier. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# If you want full control over the layout of the generated HTML pages it might +# be necessary to disable the index and replace it with your own. The +# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top +# of each HTML page. A value of NO enables the index and the value YES disables +# it. Since the tabs in the index contain the same information as the navigation +# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +DISABLE_INDEX = NO + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. If the tag +# value is set to YES, a side panel will be generated containing a tree-like +# index structure (just like the one that is generated for HTML Help). For this +# to work a browser that supports JavaScript, DHTML, CSS and frames is required +# (i.e. any modern browser). Windows users are probably better off using the +# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can +# further fine tune the look of the index (see "Fine-tuning the output"). As an +# example, the default style sheet generated by doxygen has an example that +# shows how to put an image at the root of the tree instead of the PROJECT_NAME. +# Since the tree basically has the same information as the tab index, you could +# consider setting DISABLE_INDEX to YES when enabling this option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_TREEVIEW = NO + +# When both GENERATE_TREEVIEW and DISABLE_INDEX are set to YES, then the +# FULL_SIDEBAR option determines if the side bar is limited to only the treeview +# area (value NO) or if it should extend to the full height of the window (value +# YES). Setting this to YES gives a layout similar to +# https://docs.readthedocs.io with more room for contents, but less room for the +# project logo, title, and description. If either GENERATE_TREEVIEW or +# DISABLE_INDEX is set to NO, this option has no effect. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FULL_SIDEBAR = NO + +# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that +# doxygen will group on one line in the generated HTML documentation. +# +# Note that a value of 0 will completely suppress the enum values from appearing +# in the overview section. +# Minimum value: 0, maximum value: 20, default value: 4. +# This tag requires that the tag GENERATE_HTML is set to YES. + +ENUM_VALUES_PER_LINE = 4 + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used +# to set the initial width (in pixels) of the frame in which the tree is shown. +# Minimum value: 0, maximum value: 1500, default value: 250. +# This tag requires that the tag GENERATE_HTML is set to YES. + +TREEVIEW_WIDTH = 250 + +# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to +# external symbols imported via tag files in a separate window. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +EXT_LINKS_IN_WINDOW = NO + +# If the OBFUSCATE_EMAILS tag is set to YES, doxygen will obfuscate email +# addresses. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +OBFUSCATE_EMAILS = YES + +# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg +# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see +# https://inkscape.org) to generate formulas as SVG images instead of PNGs for +# the HTML output. These images will generally look nicer at scaled resolutions. +# Possible values are: png (the default) and svg (looks nicer but requires the +# pdf2svg or inkscape tool). +# The default value is: png. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FORMULA_FORMAT = png + +# Use this tag to change the font size of LaTeX formulas included as images in +# the HTML documentation. When you change the font size after a successful +# doxygen run you need to manually remove any form_*.png images from the HTML +# output directory to force them to be regenerated. +# Minimum value: 8, maximum value: 50, default value: 10. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_FONTSIZE = 10 + +# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands +# to create new LaTeX commands to be used in formulas as building blocks. See +# the section "Including formulas" for details. + +FORMULA_MACROFILE = + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see +# https://www.mathjax.org) which uses client side JavaScript for the rendering +# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX +# installed or if you want to formulas look prettier in the HTML output. When +# enabled you may also need to install MathJax separately and configure the path +# to it using the MATHJAX_RELPATH option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +USE_MATHJAX = NO + +# With MATHJAX_VERSION it is possible to specify the MathJax version to be used. +# Note that the different versions of MathJax have different requirements with +# regards to the different settings, so it is possible that also other MathJax +# settings have to be changed when switching between the different MathJax +# versions. +# Possible values are: MathJax_2 and MathJax_3. +# The default value is: MathJax_2. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_VERSION = MathJax_2 + +# When MathJax is enabled you can set the default output format to be used for +# the MathJax output. For more details about the output format see MathJax +# version 2 (see: +# http://docs.mathjax.org/en/v2.7-latest/output.html) and MathJax version 3 +# (see: +# http://docs.mathjax.org/en/latest/web/components/output.html). +# Possible values are: HTML-CSS (which is slower, but has the best +# compatibility. This is the name for Mathjax version 2, for MathJax version 3 +# this will be translated into chtml), NativeMML (i.e. MathML. Only supported +# for NathJax 2. For MathJax version 3 chtml will be used instead.), chtml (This +# is the name for Mathjax version 3, for MathJax version 2 this will be +# translated into HTML-CSS) and SVG. +# The default value is: HTML-CSS. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_FORMAT = HTML-CSS + +# When MathJax is enabled you need to specify the location relative to the HTML +# output directory using the MATHJAX_RELPATH option. The destination directory +# should contain the MathJax.js script. For instance, if the mathjax directory +# is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax +# Content Delivery Network so you can quickly see the result without installing +# MathJax. However, it is strongly recommended to install a local copy of +# MathJax from https://www.mathjax.org before deployment. The default value is: +# - in case of MathJax version 2: https://cdn.jsdelivr.net/npm/mathjax@2 +# - in case of MathJax version 3: https://cdn.jsdelivr.net/npm/mathjax@3 +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_RELPATH = + +# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax +# extension names that should be enabled during MathJax rendering. For example +# for MathJax version 2 (see +# https://docs.mathjax.org/en/v2.7-latest/tex.html#tex-and-latex-extensions): +# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols +# For example for MathJax version 3 (see +# http://docs.mathjax.org/en/latest/input/tex/extensions/index.html): +# MATHJAX_EXTENSIONS = ams +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_EXTENSIONS = + +# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces +# of code that will be used on startup of the MathJax code. See the MathJax site +# (see: +# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. For an +# example see the documentation. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_CODEFILE = + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box for +# the HTML output. The underlying search engine uses javascript and DHTML and +# should work on any modern browser. Note that when using HTML help +# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) +# there is already a search function so this one should typically be disabled. +# For large projects the javascript based search engine can be slow, then +# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to +# search using the keyboard; to jump to the search box use + S +# (what the is depends on the OS and browser, but it is typically +# , /