diff --git a/.gitignore b/.gitignore index 9e727f0b..deeb8432 100755 --- a/.gitignore +++ b/.gitignore @@ -55,3 +55,6 @@ release/RELEASE_OUTPUT.md .idea data .nvmrc +wikidata +qendpoint-store/wdbench-indexes +wdbench-results diff --git a/pom.xml b/pom.xml index 02f4faf0..7843a378 100644 --- a/pom.xml +++ b/pom.xml @@ -69,6 +69,19 @@ 17 + + + oss.sonatype.org-snapshot + https://oss.sonatype.org/content/repositories/snapshots + + false + + + true + + + + sonatype-nexus-snapshots @@ -205,4 +218,4 @@ Github https://github.com/the-qa-company/qEndpoint/issues - \ No newline at end of file + diff --git a/qendpoint-backend/pom.xml b/qendpoint-backend/pom.xml index 12238424..17a676ff 100644 --- a/qendpoint-backend/pom.xml +++ b/qendpoint-backend/pom.xml @@ -25,12 +25,25 @@ + + + oss.sonatype.org-snapshot + https://oss.sonatype.org/content/repositories/snapshots + + false + + + true + + + + 1.15 1.1.1 4.13.2 3.3.1 - 4.2.3 + 5.0.0-SNAPSHOT 3.0.2 1.4.5 @@ -132,4 +145,4 @@ - \ No newline at end of file + diff --git a/qendpoint-core/pom.xml b/qendpoint-core/pom.xml index 99b2ae24..b6dce5c9 100644 --- a/qendpoint-core/pom.xml +++ b/qendpoint-core/pom.xml @@ -26,9 +26,7 @@ com.the-qa-company - - qendpoint-parent - + qendpoint-parent 1.14.1 @@ -46,7 +44,6 @@ 1.1.1 4.13.2 3.3.1 - 4.2.3 2.7.5 1.4.5 0.9.44 @@ -54,9 +51,7 @@ 4.3.2 1.7.30 - - UTF-8 - + UTF-8 UTF-8 @@ -99,18 +94,14 @@ pl.pragmatists - - JUnitParams - + JUnitParams 1.1.1 test org.tukaani xz - - 1.6 - + 1.6 com.beust @@ -123,4 +114,4 @@ ${roaringbitmap.version} - \ No newline at end of file + diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/enums/TripleComponentOrder.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/enums/TripleComponentOrder.java index fb648429..9669f888 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/enums/TripleComponentOrder.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/enums/TripleComponentOrder.java @@ -19,6 +19,8 @@ package com.the_qa_company.qendpoint.core.enums; +import java.util.ArrayList; +import java.util.List; import java.util.Map; /** @@ -78,6 +80,24 @@ public enum TripleComponentOrder { this.mask = mask; } + /** + * Search for an acceptable value in a map of orders + * + * @param flags flags to search the value + * @param map map + * @param value type + * @return find value, null for no matching value + */ + public static List fetchAllBestForCfg(int flags, Map map) { + ArrayList ret = new ArrayList<>(); + for (Map.Entry e : map.entrySet()) { + if ((e.getKey().mask & flags) != 0) { + ret.add(e.getKey()); + } + } + return ret; + } + /** * Search for an acceptable value in a map of orders * @@ -123,4 +143,5 @@ public TripleComponentRole getPredicateMapping() { public TripleComponentRole getObjectMapping() { return objectMapping; } + } diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/hdt/impl/HDTImpl.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/hdt/impl/HDTImpl.java index b7d2c95c..868e63db 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/hdt/impl/HDTImpl.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/hdt/impl/HDTImpl.java @@ -206,7 +206,8 @@ public void mapFromHDT(File f, long offset, ProgressListener listener) throws IO String hdtFormat = ci.getFormat(); if (!hdtFormat.equals(HDTVocabulary.HDT_CONTAINER) && !hdtFormat.equals(HDTVocabulary.HDT_CONTAINER_2)) { throw new IllegalFormatException("This software (v" + HDTVersion.HDT_VERSION + ".x.x | v" - + HDTVersion.HDT_VERSION_2 + ".x.x) cannot open this version of HDT File (" + hdtFormat + ")"); + + HDTVersion.HDT_VERSION_2 + ".x.x) cannot open this version of HDT File hdtFileName:" + + hdtFileName + " format:" + hdtFormat + ""); } input.printIndex("HDT Header"); diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/QEPComponent.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/QEPComponent.java index 2353ed4d..01cc1c62 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/QEPComponent.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/QEPComponent.java @@ -10,6 +10,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.Serial; +import java.io.Serializable; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -21,10 +23,14 @@ * * @author Antoine Willerval */ -public class QEPComponent implements Cloneable { - record SharedElement(long id, DictionarySectionRole role, QEPDataset dataset, String debugMapped) {} +public class QEPComponent implements Cloneable, Serializable { + @Serial + private static final long serialVersionUID = 6621230579376315429L; - record PredicateElement(long id, QEPDataset dataset) {} + record SharedElement(long id, DictionarySectionRole role, QEPDataset dataset, String debugMapped) + implements Serializable {} + + record PredicateElement(long id, QEPDataset dataset) implements Serializable {} private static final Logger logger = LoggerFactory.getLogger(QEPComponent.class); @@ -34,7 +40,7 @@ record PredicateElement(long id, QEPDataset dataset) {} RDFNodeType rdfNodeType; Optional language; CharSequence datatype; - final QEPCore core; + transient final QEPCore core; private QEPComponent(QEPComponent other) { this.predicateIds = new HashMap<>(other.predicateIds); diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/QEPCore.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/QEPCore.java index d1c142df..0eabb5c6 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/QEPCore.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/QEPCore.java @@ -17,6 +17,7 @@ import com.the_qa_company.qendpoint.core.options.HDTOptions; import com.the_qa_company.qendpoint.core.storage.converter.NodeConverter; import com.the_qa_company.qendpoint.core.storage.iterator.CatQueryCloseable; +import com.the_qa_company.qendpoint.core.storage.iterator.CloseableIterator; import com.the_qa_company.qendpoint.core.storage.iterator.QueryCloseableIterator; import com.the_qa_company.qendpoint.core.storage.merge.QEPCoreMergeThread; import com.the_qa_company.qendpoint.core.storage.search.QEPComponentTriple; @@ -31,6 +32,8 @@ import org.slf4j.LoggerFactory; import java.io.IOException; +import java.io.Serial; +import java.io.Serializable; import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; @@ -79,6 +82,7 @@ * @author Antoine Willerval */ public class QEPCore implements AutoCloseable { + private static final Logger logger = LoggerFactory.getLogger(QEPCore.class); /** * the max size of a dataset id diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/QEPDataset.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/QEPDataset.java index a4cbde0b..6cf9ffda 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/QEPDataset.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/QEPDataset.java @@ -14,6 +14,8 @@ import java.io.Closeable; import java.io.IOException; +import java.io.Serial; +import java.io.Serializable; import java.nio.file.Path; import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; @@ -26,7 +28,10 @@ * @author Antoine Willerval * @see QEPCore */ -public class QEPDataset implements Closeable { +public class QEPDataset implements Closeable, Serializable { + + @Serial + private static final long serialVersionUID = 7525689572432598258L; public record ComponentFind(QEPDataset dataset, TripleComponentRole role, long id, long pid) { public boolean isFind() { @@ -261,7 +266,7 @@ public void setComponentInDelta(TripleComponentRole role, long component) { public QueryCloseableIterator search(CharSequence subject, CharSequence predicate, CharSequence object) throws QEPCoreException { QEPDatasetContext ctx = createContext(); - return search(ctx, subject, predicate, object).attach(ctx); + return (QueryCloseableIterator) search(ctx, subject, predicate, object).attach(ctx); } /** @@ -278,7 +283,7 @@ public QueryCloseableIterator search(CharSequence subject, CharSequence predicat public QueryCloseableIterator search(QEPComponent subject, QEPComponent predicate, QEPComponent object) throws QEPCoreException { QEPDatasetContext ctx = createContext(); - return search(ctx, subject, predicate, object).attach(ctx); + return (QueryCloseableIterator) search(ctx, subject, predicate, object).attach(ctx); } /** @@ -292,7 +297,7 @@ public QueryCloseableIterator search(QEPComponent subject, QEPComponent predicat */ public QueryCloseableIterator search(QEPComponentTriple pattern) throws QEPCoreException { QEPDatasetContext ctx = createContext(); - return search(ctx, pattern).attach(ctx); + return (QueryCloseableIterator) search(ctx, pattern).attach(ctx); } /** diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/CloseableAttachIterator.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/CloseableAttachIterator.java index b7a5e415..7989c55f 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/CloseableAttachIterator.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/CloseableAttachIterator.java @@ -11,32 +11,33 @@ * @param * @param */ -public class CloseableAttachIterator implements CloseableIterator { +public class CloseableAttachIterator implements CloseableIterator { @SafeVarargs - public static CloseableIterator of(CloseableIterator it, - AutoCloseableGeneric... closeables) { + public static CloseableIterator of(CloseableIterator it, + AutoCloseableGeneric... closeables) { if (closeables.length == 0) { return it; } - return new CloseableAttachIterator<>(it, closeables); + return new CloseableAttachIterator(it, closeables); } - private final CloseableIterator handle; - private final List> closeables; + private final CloseableIterator handle; + private final List> closeables; @SafeVarargs - private CloseableAttachIterator(CloseableIterator handle, AutoCloseableGeneric... closeableGenerics) { + private CloseableAttachIterator(CloseableIterator handle, + AutoCloseableGeneric... closeableGenerics) { this.handle = handle; closeables = new ArrayList<>(List.of(closeableGenerics)); } @Override - public void close() throws E { + public void close() { try { handle.close(); } catch (Error | Exception t) { try { - AutoCloseableGeneric.closeAll(closeables); + AutoCloseableGeneric.closeAll(closeables); } catch (RuntimeException | Error err) { err.addSuppressed(t); throw err; @@ -62,12 +63,6 @@ public void remove() { handle.remove(); } - @Override - public CloseableIterator attach(AutoCloseableGeneric closeable) { - closeables.add(closeable); - return this; - } - @Override public void forEachRemaining(Consumer action) { handle.forEachRemaining(action); diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/CloseableAttachQueryIterator.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/CloseableAttachQueryIterator.java index 645e5c84..d03f0605 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/CloseableAttachQueryIterator.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/CloseableAttachQueryIterator.java @@ -12,7 +12,7 @@ public class CloseableAttachQueryIterator implements QueryCloseableIterator { @SafeVarargs public static QueryCloseableIterator of(QueryCloseableIterator it, - AutoCloseableGeneric... closeables) { + AutoCloseableGeneric... closeables) { Objects.requireNonNull(it, "it can't be null!"); if (closeables.length == 0) { return it; @@ -21,11 +21,11 @@ public static QueryCloseableIterator of(QueryCloseableIterator it, } private final QueryCloseableIterator handle; - private final List> closeables; + private final List> closeables; @SafeVarargs private CloseableAttachQueryIterator(QueryCloseableIterator handle, - AutoCloseableGeneric... closeableGenerics) { + AutoCloseableGeneric... closeableGenerics) { this.handle = handle; closeables = new ArrayList<>(List.of(closeableGenerics)); } @@ -68,7 +68,7 @@ public long estimateCardinality() { } @Override - public QueryCloseableIterator attach(AutoCloseableGeneric closeable) { + public QueryCloseableIterator attach(AutoCloseableGeneric closeable) { closeables.add(closeable); return this; } diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/CloseableIterator.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/CloseableIterator.java index 458bd36c..03cbc62f 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/CloseableIterator.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/CloseableIterator.java @@ -12,7 +12,7 @@ * @param AutoCloseable close exception * @author Antoine Willerval */ -public interface CloseableIterator extends Iterator, AutoCloseableGeneric { +public interface CloseableIterator extends Iterator, AutoCloseableGeneric { /** * create a closeable iterator from an iterator, if the iterator is an * {@link AutoCloseable}, it will assume that E is the only thrown @@ -25,15 +25,13 @@ public interface CloseableIterator extends Iterator, * @return closeable iterator */ @SuppressWarnings("unchecked") - static CloseableIterator of(Iterator it) { + static CloseableIterator of(Iterator it) { if (it instanceof AutoCloseable) { return of(it, () -> { try { ((AutoCloseable) it).close(); - } catch (Error | RuntimeException e) { - throw e; } catch (Exception e) { - throw (E) e; + throw new RuntimeException(e); } }); } @@ -49,10 +47,10 @@ static CloseableIterator of(Iterator it) { * @param close exception * @return closeable iterator */ - static CloseableIterator of(Iterator it, AutoCloseableGeneric closeOperation) { + static CloseableIterator of(Iterator it, AutoCloseableGeneric closeOperation) { return new CloseableIterator<>() { @Override - public void close() throws E { + public void close() { closeOperation.close(); } @@ -83,7 +81,7 @@ public void forEachRemaining(Consumer action) { * @param close exception * @return empty closeable iterator */ - static CloseableIterator empty() { + static CloseableIterator empty() { return empty(() -> {}); } @@ -93,12 +91,12 @@ static CloseableIterator empty() { * @param close exception * @return empty closeable iterator */ - static CloseableIterator empty(AutoCloseableGeneric closeable) { + static CloseableIterator empty(AutoCloseableGeneric closeable) { return of(new EmptyIterator<>(), closeable); } @Override - void close() throws E; + void close(); /** * attach an auto closeable element to this iterator @@ -106,7 +104,7 @@ static CloseableIterator empty(AutoCloseableGener * @param closeable closeable element * @return new iterator with the new closeable element */ - default CloseableIterator attach(AutoCloseableGeneric closeable) { + default CloseableIterator attach(AutoCloseableGeneric closeable) { return CloseableAttachIterator.of(this, closeable); } } diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/FetcherCloseableIterator.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/FetcherCloseableIterator.java index a50b6484..dd0f2eef 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/FetcherCloseableIterator.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/FetcherCloseableIterator.java @@ -9,7 +9,7 @@ * @param iterator type * @param close type */ -public abstract class FetcherCloseableIterator implements CloseableIterator { +public abstract class FetcherCloseableIterator implements CloseableIterator { private T next; protected FetcherCloseableIterator() { diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/QueryCloseableIterator.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/QueryCloseableIterator.java index 9dac2f43..ba08d0d3 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/QueryCloseableIterator.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/storage/iterator/QueryCloseableIterator.java @@ -8,7 +8,7 @@ * * @author Antoine Willerval */ -public interface QueryCloseableIterator extends CloseableIterator { +public interface QueryCloseableIterator extends CloseableIterator { static QueryCloseableIterator empty() { return new QueryCloseableIterator() { @Override @@ -39,7 +39,7 @@ public QEPComponentTriple next() { long estimateCardinality(); @Override - default QueryCloseableIterator attach(AutoCloseableGeneric closeable) { + default QueryCloseableIterator attach(AutoCloseableGeneric closeable) { return CloseableAttachQueryIterator.of(this, closeable); } } diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/Triples.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/Triples.java index 339defa8..77bf5643 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/Triples.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/Triples.java @@ -19,13 +19,14 @@ package com.the_qa_company.qendpoint.core.triples; -import java.io.Closeable; -import java.util.Iterator; - import com.the_qa_company.qendpoint.core.enums.TripleComponentOrder; import com.the_qa_company.qendpoint.core.header.Header; import com.the_qa_company.qendpoint.core.iterator.SuppliableIteratorTripleID; +import java.io.Closeable; +import java.util.Iterator; +import java.util.List; + /** * Interface for Triples implementation. */ @@ -121,4 +122,8 @@ default TripleID findTriple(long position) { default Iterator iterator() { return searchAll(); } + + default List getTripleComponentOrder(TripleID t) { + return List.of(); + } } diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/impl/BitmapTriples.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/impl/BitmapTriples.java index 3e7fd912..7059c2e8 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/impl/BitmapTriples.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/impl/BitmapTriples.java @@ -395,8 +395,9 @@ public IteratorTripleID searchAll(int searchMask) { */ @Override public long getNumberOfElements() { - if (isClosed) + if (isClosed) { return 0; + } return seqZ.getNumberOfElements(); } @@ -406,8 +407,9 @@ public long getNumberOfElements() { */ @Override public long size() { - if (isClosed) + if (isClosed) { return 0; + } return seqY.size() + seqZ.size() + bitmapY.getSizeBytes() + bitmapZ.getSizeBytes(); } @@ -472,6 +474,7 @@ public void load(InputStream input, ControlInfo ci, ProgressListener listener) t @Override public void mapFromFile(CountInputStream input, File f, ProgressListener listener) throws IOException { + log.info("Mapping BitmapTriples from {}", f.getName()); ControlInformation ci = new ControlInformation(); ci.load(input); if (ci.getType() != ControlInfo.Type.TRIPLES) { @@ -1080,6 +1083,29 @@ public TripleID findTriple(long position, TripleID tripleID) { return tripleID; } + @Override + public List getTripleComponentOrder(TripleID pattern) { + if (isClosed) { + throw new IllegalStateException("Cannot search on BitmapTriples if it's already closed"); + } + + if (getNumberOfElements() == 0 || pattern.isNoMatch()) { + return List.of(TripleComponentOrder.POS, TripleComponentOrder.PSO, TripleComponentOrder.SPO, + TripleComponentOrder.SOP, TripleComponentOrder.OSP, TripleComponentOrder.OPS); + } + + TripleID reorderedPat = new TripleID(pattern); + TripleOrderConvert.swapComponentOrder(reorderedPat, TripleComponentOrder.SPO, order); + int flags = reorderedPat.getPatternOrderFlags(); + + if ((indexesMask & flags) != 0) { + return TripleComponentOrder.fetchAllBestForCfg(flags, indexes); + } + + return List.of(); + + } + /* * (non-Javadoc) * @see hdt.triples.Triples#saveIndex(java.io.OutputStream, diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/impl/OneReadTempTriples.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/impl/OneReadTempTriples.java index 327f9e6a..96c8f738 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/impl/OneReadTempTriples.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/impl/OneReadTempTriples.java @@ -23,6 +23,7 @@ import java.io.OutputStream; import java.nio.file.Path; import java.util.Iterator; +import java.util.List; /** * {@link TempTriples} only readable once with the {@link #searchAll()} method @@ -206,6 +207,11 @@ public TripleID findTriple(long position, TripleID buffer) { throw new NotImplementedException(); } + @Override + public List getTripleComponentOrder(TripleID t) { + return List.of(); + } + @Override public void close() throws IOException { // nothing to do diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/impl/WriteBitmapTriples.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/impl/WriteBitmapTriples.java index 03f31473..714713ed 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/impl/WriteBitmapTriples.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/triples/impl/WriteBitmapTriples.java @@ -33,6 +33,7 @@ import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; +import java.util.List; /** * Appendable write {@link BitmapTriples} version @@ -157,6 +158,11 @@ public TripleID findTriple(long position, TripleID tripleID) { throw new NotImplementedException(); } + @Override + public List getTripleComponentOrder(TripleID t) { + return List.of(); + } + @Override public void load(InputStream input, ControlInfo ci, ProgressListener listener) { throw new NotImplementedException(); diff --git a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/util/map/CopyOnWriteMap.java b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/util/map/CopyOnWriteMap.java index 1d9c6656..001f81ff 100644 --- a/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/util/map/CopyOnWriteMap.java +++ b/qendpoint-core/src/main/java/com/the_qa_company/qendpoint/core/util/map/CopyOnWriteMap.java @@ -1,5 +1,7 @@ package com.the_qa_company.qendpoint.core.util.map; +import java.io.Serial; +import java.io.Serializable; import java.util.Collection; import java.util.HashMap; import java.util.Map; @@ -15,7 +17,11 @@ * @param value type * @author Antoine Willerval */ -public class CopyOnWriteMap implements Map { +public class CopyOnWriteMap implements Map, Serializable { + + @Serial + private static final long serialVersionUID = -3127117388123088572L; + private Map wrapper; private boolean write; diff --git a/qendpoint-core/src/test/java/com/the_qa_company/qendpoint/core/storage/QEPCoreTest.java b/qendpoint-core/src/test/java/com/the_qa_company/qendpoint/core/storage/QEPCoreTest.java index b73bfc14..bc8a6ca4 100644 --- a/qendpoint-core/src/test/java/com/the_qa_company/qendpoint/core/storage/QEPCoreTest.java +++ b/qendpoint-core/src/test/java/com/the_qa_company/qendpoint/core/storage/QEPCoreTest.java @@ -164,8 +164,7 @@ public void coreSearchTest() throws QEPCoreException, IOException, NotFoundExcep QEPCore core = new QEPCore(coreRoot, HDTOptions.of()); Bitmap64Big findBM = Bitmap64Big.memory(hdt.getTriples().getNumberOfElements())) { assertEquals(hdt.getTriples().getNumberOfElements(), core.triplesCount()); - try (CloseableIterator search = core.search("", "", - "")) { + try (CloseableIterator search = core.search("", "", "")) { long count = 0; while (search.hasNext()) { @@ -231,7 +230,7 @@ public void coreSearchInvTest() throws QEPCoreException, IOException, NotFoundEx // convert to a triple string to search over the main HDT count++; // search the ts - try (CloseableIterator searchIt = core.search(ts)) { + try (CloseableIterator searchIt = core.search(ts)) { assertTrue("missing triple for " + ts + " in core", searchIt.hasNext()); QEPComponentTriple qts = searchIt.next(); @@ -450,14 +449,14 @@ public void generationTest() throws IOException, ParserException { assertEquals("size isn't matching", ts.size(), core.triplesCount()); for (TripleString t : ts) { - try (CloseableIterator s = core.search(t)) { + try (CloseableIterator s = core.search(t)) { if (!s.hasNext()) { throw new AssertionError(format("Can't find triple '%s' in the core", t)); } } } - try (CloseableIterator sit = core.search()) { + try (CloseableIterator sit = core.search()) { while (sit.hasNext()) { QEPComponentTriple triple = sit.next(); // we need to convert it using tripleToString() diff --git a/qendpoint-store/pom.xml b/qendpoint-store/pom.xml index fd217084..84363d10 100644 --- a/qendpoint-store/pom.xml +++ b/qendpoint-store/pom.xml @@ -23,12 +23,25 @@ + + + oss.sonatype.org-snapshot + https://oss.sonatype.org/content/repositories/snapshots + + false + + + true + + + + 1.15 1.1.1 4.13.2 3.3.1 - 4.3.2 + 5.0.0-SNAPSHOT 1.4.5 UTF-8 diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/SailCompiler.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/SailCompiler.java index 3199f8b7..ad0d202c 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/SailCompiler.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/SailCompiler.java @@ -400,8 +400,7 @@ public LinkedSail compileNode(Value node) throws SailCo */ public Value searchOne(Resource subject, IRI predicate) throws SailCompilerException { Value out; - try (CloseableIteration it = connection.getStatements(subject, - predicate, null)) { + try (CloseableIteration it = connection.getStatements(subject, predicate, null)) { if (!it.hasNext()) { throw new SailCompilerException( "Can't find statements for the query (" + subject + ", " + predicate + ", ???)!"); @@ -426,8 +425,7 @@ public Value searchOne(Resource subject, IRI predicate) throws SailCompilerExcep */ public Optional searchOneOpt(Resource subject, IRI predicate) throws SailCompilerException { Value out; - try (CloseableIteration it = connection.getStatements(subject, - predicate, null)) { + try (CloseableIteration it = connection.getStatements(subject, predicate, null)) { if (!it.hasNext()) { return Optional.empty(); } @@ -466,8 +464,7 @@ public T searchPropertyValue(Resource subject, */ public List search(Resource subject, IRI predicate) { List values = new ArrayList<>(); - try (CloseableIteration it = connection.getStatements(subject, - predicate, null)) { + try (CloseableIteration it = connection.getStatements(subject, predicate, null)) { it.stream().forEach(s -> values.add(s.getObject())); } return values; diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/TripleSourceGetter.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/TripleSourceGetter.java index 297c9378..7c8a8b8d 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/TripleSourceGetter.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/TripleSourceGetter.java @@ -25,6 +25,5 @@ public interface TripleSourceGetter extends AutoCloseable { * @throws SailCompiler.SailCompilerException error while getting the * triples */ - CloseableIteration getStatements(Resource s, IRI p, Value o) - throws SailCompiler.SailCompilerException; + CloseableIteration getStatements(Resource s, IRI p, Value o) throws SailCompiler.SailCompilerException; } diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/source/EmptyTripleSourceGetter.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/source/EmptyTripleSourceGetter.java index 94350830..89654f4f 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/source/EmptyTripleSourceGetter.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/source/EmptyTripleSourceGetter.java @@ -22,7 +22,7 @@ public void close() throws SailCompiler.SailCompilerException { } @Override - public CloseableIteration getStatements(Resource s, IRI p, Value o) + public CloseableIteration getStatements(Resource s, IRI p, Value o) throws SailCompiler.SailCompilerException { return new CloseableIteration<>() { @Override diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/source/ModelTripleSourceGetter.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/source/ModelTripleSourceGetter.java index 0196606b..12a22747 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/source/ModelTripleSourceGetter.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/source/ModelTripleSourceGetter.java @@ -32,7 +32,7 @@ public void close() throws SailCompiler.SailCompilerException { } @Override - public CloseableIteration getStatements(Resource s, IRI p, Value o) { + public CloseableIteration getStatements(Resource s, IRI p, Value o) { final Iterator it = model.getStatements(s, p, o).iterator(); return new CloseableIteration<>() { @Override diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/source/SailTripleSourceModel.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/source/SailTripleSourceModel.java index a9012abd..3601751e 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/source/SailTripleSourceModel.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/compiler/source/SailTripleSourceModel.java @@ -58,10 +58,8 @@ public void close() throws SailCompiler.SailCompilerException { } @Override - public CloseableIteration getStatements(Resource s, IRI p, - Value o) { - CloseableIteration it = sail.getConnection().getStatements(s, p, o, - false); + public CloseableIteration getStatements(Resource s, IRI p, Value o) { + CloseableIteration it = sail.getConnection().getStatements(s, p, o, false); return new CloseableIteration<>() { @Override public void close() throws SailCompiler.SailCompilerException { diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/federation/WikibaseLabelService.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/federation/WikibaseLabelService.java index 520c66a0..45165bce 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/federation/WikibaseLabelService.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/federation/WikibaseLabelService.java @@ -53,8 +53,8 @@ public boolean ask(Service service, BindingSet bindingSet, String s) throws Quer } @Override - public CloseableIteration select(Service service, Set set, - BindingSet bindingSet, String s) throws QueryEvaluationException { + public CloseableIteration select(Service service, Set set, BindingSet bindingSet, String s) + throws QueryEvaluationException { System.out.println("Should never pass here, report the query if this is the case!"); return null; } @@ -126,9 +126,8 @@ private List getAskedLanguage(String languageConfig) { } @Override - public CloseableIteration evaluate(Service service, - CloseableIteration closeableIteration, String s) - throws QueryEvaluationException { + public CloseableIteration evaluate(Service service, CloseableIteration closeableIteration, + String s) throws QueryEvaluationException { TupleExpr tupleExpr = service.getArg(); // currently implements only the automatic mode // https://en.wikibooks.org/wiki/SPARQL/SERVICE_-_Label @@ -196,9 +195,8 @@ private BindingSet expandBindingSet(BindingSet bindingSet, List language valuesWithLabels.add(bindingSet.getBinding(name).getValue()); namesWithLabels.add(name + expantionNameSuffix[e]); if (bindingSet.getBinding(name).getValue() instanceof Resource) { - try (CloseableIteration iteration = tripleSource - .getStatements((Resource) bindingSet.getBinding(name).getValue(), expantionProperties[e], - null)) { + try (CloseableIteration iteration = tripleSource.getStatements( + (Resource) bindingSet.getBinding(name).getValue(), expantionProperties[e], null)) { while (iteration.hasNext()) { Statement next = iteration.next(); if (next.getObject().isLiteral()) { diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/functions/Split.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/functions/Split.java index 95312e9f..c9f9027a 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/functions/Split.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/functions/Split.java @@ -25,8 +25,8 @@ public String getURI() { } @Override - public CloseableIteration, QueryEvaluationException> evaluate( - final ValueFactory valueFactory, Value... args) throws QueryEvaluationException { + public CloseableIteration> evaluate(final ValueFactory valueFactory, Value... args) + throws QueryEvaluationException { if (args.length != 2) { throw new ValueExprEvaluationException( String.format("%s requires 2 arguments, got %d", getURI(), args.length)); diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/model/HDTValue.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/model/HDTValue.java index 7f11507f..9c5f978d 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/model/HDTValue.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/model/HDTValue.java @@ -13,7 +13,7 @@ static int compare(HDTValue v1, HDTValue v2) { return c; } - return Long.compare(v1.getHDTPosition(), v2.getHDTPosition()); + return Long.compare(v1.getHDTId(), v2.getHDTId()); } /** diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/model/SimpleIRIHDT.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/model/SimpleIRIHDT.java index 4be71659..5d30f41d 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/model/SimpleIRIHDT.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/model/SimpleIRIHDT.java @@ -1,12 +1,13 @@ package com.the_qa_company.qendpoint.model; import com.the_qa_company.qendpoint.core.enums.DictionarySectionRole; +import com.the_qa_company.qendpoint.core.enums.TripleComponentRole; import com.the_qa_company.qendpoint.core.exceptions.NotImplementedException; +import com.the_qa_company.qendpoint.core.hdt.HDT; import com.the_qa_company.qendpoint.store.exception.EndpointStoreException; +import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.base.AbstractIRI; import org.eclipse.rdf4j.model.util.URIUtil; -import com.the_qa_company.qendpoint.core.enums.TripleComponentRole; -import com.the_qa_company.qendpoint.core.hdt.HDT; import java.io.Serial; @@ -127,14 +128,20 @@ public String getLocalName() { @Override public boolean equals(Object o) { - if (o == null) + if (o == null) { return false; + } if (this == o) { return true; } else if (o instanceof SimpleIRIHDT && this.id != -1 && ((SimpleIRIHDT) o).getId() != -1) { return this.id == (((SimpleIRIHDT) o).getId()); } else { // could not compare IDs, we have to compare to string - return toString().equals(o.toString()); + if (o instanceof IRI) { + return toString().equals(o.toString()); + } else { + return false; + } + } } @@ -142,17 +149,18 @@ public boolean equals(Object o) { public int hashCode() { if (id != -1 && !delegate) { String prefix = "http://hdt.org/"; - if (this.postion == SHARED_POS) + if (this.postion == SHARED_POS) { prefix += "SO"; - else if (this.postion == SUBJECT_POS) + } else if (this.postion == SUBJECT_POS) { prefix += "S"; - else if (this.postion == PREDICATE_POS) + } else if (this.postion == PREDICATE_POS) { prefix += "P"; - else if (this.postion == OBJECT_POS) + } else if (this.postion == OBJECT_POS) { prefix += "O"; - else { - if (iriString != null) + } else { + if (iriString != null) { prefix = iriString; + } return prefix.hashCode(); } prefix += id; @@ -182,4 +190,5 @@ public void setDelegate(boolean delegate) { public boolean isDelegate() { return delegate; } + } diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStore.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStore.java index d5be2558..d1ab1cac 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStore.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStore.java @@ -117,9 +117,9 @@ public class EndpointStore extends AbstractNotifyingSail { File checkFile; // flag if the store is merging or not - private boolean isMerging = false; + private volatile boolean isMerging = false; - public boolean isMergeTriggered = false; + public volatile boolean isMergeTriggered = false; private boolean freezeNotifications = false; @@ -788,7 +788,7 @@ public void markDeletedTempTriples() throws IOException { try (InputStream inputStream = new FileInputStream(endpointFiles.getTempTriples())) { RDFParser rdfParser = Rio.createParser(RDFFormat.NTRIPLES); rdfParser.getParserConfig().set(BasicParserSettings.VERIFY_URI_SYNTAX, false); - try (GraphQueryResult res = QueryResults.parseGraphBackground(inputStream, null, rdfParser, null)) { + try (GraphQueryResult res = QueryResults.parseGraphBackground(inputStream, null, rdfParser)) { while (res.hasNext()) { Statement st = res.next(); IteratorTripleString search = this.hdt.search(st.getSubject().toString(), @@ -998,8 +998,7 @@ public boolean isNativeStoreContainsAtLeast(long number) { try (SailConnection connection = getChangingStore().getConnection()) { // https://github.com/eclipse/rdf4j/discussions/3734 // return connection.size() >= number; - try (CloseableIteration it = connection.getStatements(null, null, null, - false)) { + try (CloseableIteration it = connection.getStatements(null, null, null, false)) { for (long i = 0; i < number; i++) { if (!it.hasNext()) { return false; diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStoreConnection.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStoreConnection.java index f00d0ce9..fcdab7c5 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStoreConnection.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStoreConnection.java @@ -154,8 +154,8 @@ public void begin() throws SailException { // for SPARQL queries @Override - protected CloseableIteration evaluateInternal(TupleExpr tupleExpr, - Dataset dataset, BindingSet bindings, boolean includeInferred) throws SailException { + protected CloseableIteration evaluateInternal(TupleExpr tupleExpr, Dataset dataset, + BindingSet bindings, boolean includeInferred) throws SailException { return queryPreparer.evaluate(tupleExpr, dataset, bindings, includeInferred, 0); } @@ -172,8 +172,8 @@ public Explanation explain(Explanation.Level level, TupleExpr tupleExpr, Dataset // USED from connection get api not SPARQL @Override - protected CloseableIteration getStatementsInternal(Resource subj, IRI pred, - Value obj, boolean includeInferred, Resource... contexts) throws SailException { + protected CloseableIteration getStatementsInternal(Resource subj, IRI pred, Value obj, + boolean includeInferred, Resource... contexts) throws SailException { if (MergeRunnableStopPoint.disableRequest) { throw new MergeRunnableStopPoint.MergeRunnableException("connections request disabled"); } @@ -189,12 +189,11 @@ protected CloseableIteration getStatementsIn if (timeout.get()) { throw new EndpointTimeoutException(); } - CloseableIteration result = tripleSource.getStatements(subj, - pred, obj, contexts); + CloseableIteration result = tripleSource.getStatements(subj, pred, obj, contexts); return new ExceptionConvertingIteration(result) { @Override - protected SailException convert(Exception e) { + protected SailException convert(RuntimeException e) { return new SailException(e); } }; @@ -280,8 +279,8 @@ public void addStatement(UpdateContext op, Resource subj, IRI pred, Value obj, R // check if we need to search over the other native connection if (endpoint.isMerging()) { if (endpoint.shouldSearchOverRDF4J(subjectID, predicateID, objectID)) { - try (CloseableIteration other = getOtherConnectionRead() - .getStatements(newSubj, newPred, newObj, false, contexts)) { + try (CloseableIteration other = getOtherConnectionRead().getStatements(newSubj, + newPred, newObj, false, contexts)) { if (other.hasNext()) { return; } @@ -322,7 +321,7 @@ protected void clearInternal(Resource... contexts) throws SailException { } @Override - protected CloseableIteration getNamespacesInternal() throws SailException { + protected CloseableIteration getNamespacesInternal() throws SailException { return getCurrentConnectionRead().getNamespaces(); } @@ -396,11 +395,6 @@ protected void rollbackInternal() throws SailException { getCurrentConnectionWrite().rollback(); } - @Override - public boolean pendingRemovals() { - return false; - } - @Override protected void closeInternal() throws SailException { logger.debug("Number of times native store was called:" + this.tripleSource.getCount()); @@ -425,7 +419,7 @@ protected void closeInternal() throws SailException { } @Override - protected CloseableIteration getContextIDsInternal() throws SailException { + protected CloseableIteration getContextIDsInternal() throws SailException { return getCurrentConnectionRead().getContextIDs(); } diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStoreQueryPreparer.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStoreQueryPreparer.java index 6ca8370b..d5e66d27 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStoreQueryPreparer.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStoreQueryPreparer.java @@ -18,6 +18,7 @@ import org.eclipse.rdf4j.query.algebra.UpdateExpr; import org.eclipse.rdf4j.query.algebra.evaluation.AbstractQueryPreparer; import org.eclipse.rdf4j.query.algebra.evaluation.EvaluationStrategy; +import org.eclipse.rdf4j.query.algebra.evaluation.QueryEvaluationStep; import org.eclipse.rdf4j.query.algebra.evaluation.impl.EvaluationStatistics; import org.eclipse.rdf4j.query.algebra.evaluation.impl.ExtendedEvaluationStrategy; import org.eclipse.rdf4j.query.algebra.evaluation.optimizer.BindingAssignerOptimizer; @@ -95,9 +96,8 @@ public EndpointTripleSource getTripleSource() { } @Override - protected CloseableIteration evaluate(TupleExpr tupleExpr, - Dataset dataset, BindingSet bindings, boolean includeInferred, int maxExecutionTime) - throws QueryEvaluationException { + protected CloseableIteration evaluate(TupleExpr tupleExpr, Dataset dataset, + BindingSet bindings, boolean includeInferred, int maxExecutionTime) throws QueryEvaluationException { if (this.cloneTupleExpression) { tupleExpr = tupleExpr.clone(); @@ -128,7 +128,7 @@ protected CloseableIteration eva new DisjunctiveConstraintOptimizer().optimize(tupleExpr, dataset, bindings); new SameTermFilterOptimizer().optimize(tupleExpr, dataset, bindings); new QueryModelNormalizerOptimizer().optimize(tupleExpr, dataset, bindings); - new QueryJoinOptimizer(evaluationStatistics).optimize(tupleExpr, dataset, bindings); + new QueryJoinOptimizer(evaluationStatistics, tripleSource).optimize(tupleExpr, dataset, bindings); new IterativeEvaluationOptimizer().optimize(tupleExpr, dataset, bindings); new FilterOptimizer().optimize(tupleExpr, dataset, bindings); new OrderLimitOptimizer().optimize(tupleExpr, dataset, bindings); @@ -136,7 +136,9 @@ protected CloseableIteration eva new ServiceClauseOptimizer().optimize(tupleExpr, dataset, bindings); - return strategy.evaluate(tupleExpr, bindings); + QueryEvaluationStep precompile = strategy.precompile(tupleExpr); + + return precompile.evaluate(bindings); } // @todo: this looks wrong, apperently if one wraps around the store @@ -163,8 +165,8 @@ public ParsedTupleQuery getParsedQuery() { } public TupleQueryResult evaluate() throws QueryEvaluationException { - CloseableIteration bindingsIter1 = null; - CloseableIteration bindingsIter2 = null; + CloseableIteration bindingsIter1 = null; + CloseableIteration bindingsIter2 = null; IteratingTupleQueryResult result; boolean allGood = false; diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStoreTripleIterator.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStoreTripleIterator.java index c2cf374e..2873a85a 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStoreTripleIterator.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointStoreTripleIterator.java @@ -3,6 +3,7 @@ import com.the_qa_company.qendpoint.core.enums.TripleComponentOrder; import com.the_qa_company.qendpoint.store.exception.EndpointTimeoutException; import org.eclipse.rdf4j.common.iteration.CloseableIteration; +import org.eclipse.rdf4j.common.iteration.IndexReportingIterator; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.model.Statement; @@ -18,7 +19,7 @@ import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; -public class EndpointStoreTripleIterator implements CloseableIteration { +public class EndpointStoreTripleIterator implements CloseableIteration, IndexReportingIterator { private static final Logger logger = LoggerFactory.getLogger(EndpointStoreTripleIterator.class); private final AtomicBoolean closed = new AtomicBoolean(); @@ -26,11 +27,11 @@ public class EndpointStoreTripleIterator implements CloseableIteration repositoryResult; + private final CloseableIteration repositoryResult; private Statement next; public EndpointStoreTripleIterator(EndpointStoreConnection connection, EndpointTripleSource endpointTripleSource, - IteratorTripleID iter, CloseableIteration repositoryResult) { + IteratorTripleID iter, CloseableIteration repositoryResult) { this.connection = Objects.requireNonNull(connection, "connection can't be null!"); this.endpoint = Objects.requireNonNull(connection.getEndpoint(), "endpoint can't be null!"); this.endpointTripleSource = Objects.requireNonNull(endpointTripleSource, "endpointTripleSource can't be null!"); @@ -108,4 +109,13 @@ public void close() { } } } + + @Override + public String getIndexName() { + TripleComponentOrder order = iterator.getOrder(); + if (order != null) { + return order.name(); + } + return null; + } } diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointTripleSource.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointTripleSource.java index 5f94ee27..93b3807d 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointTripleSource.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/EndpointTripleSource.java @@ -1,9 +1,16 @@ package com.the_qa_company.qendpoint.store; +import com.the_qa_company.qendpoint.core.enums.TripleComponentOrder; +import com.the_qa_company.qendpoint.core.enums.TripleComponentRole; +import com.the_qa_company.qendpoint.core.triples.IteratorTripleID; +import com.the_qa_company.qendpoint.core.triples.TripleID; +import com.the_qa_company.qendpoint.core.triples.impl.EmptyTriplesIterator; +import com.the_qa_company.qendpoint.model.HDTValue; import com.the_qa_company.qendpoint.store.exception.EndpointTimeoutException; import com.the_qa_company.qendpoint.utils.CombinedNativeStoreResult; import org.eclipse.rdf4j.common.iteration.CloseableIteration; import org.eclipse.rdf4j.common.iteration.EmptyIteration; +import org.eclipse.rdf4j.common.order.StatementOrder; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.model.Statement; @@ -12,15 +19,22 @@ import org.eclipse.rdf4j.query.QueryEvaluationException; import org.eclipse.rdf4j.query.algebra.evaluation.TripleSource; import org.eclipse.rdf4j.sail.SailException; -import com.the_qa_company.qendpoint.core.enums.TripleComponentOrder; -import com.the_qa_company.qendpoint.core.triples.IteratorTripleID; -import com.the_qa_company.qendpoint.core.triples.TripleID; -import com.the_qa_company.qendpoint.core.triples.impl.EmptyTriplesIterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.Arrays; +import java.util.Collection; +import java.util.Comparator; +import java.util.EnumSet; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + // this is the main class telling how, given a triple pattern, to find the results in HDT and the current stores public class EndpointTripleSource implements TripleSource { + private static final Logger logger = LoggerFactory.getLogger(EndpointTripleSource.class); private final EndpointStore endpoint; private long numberOfCurrentTriples; @@ -28,11 +42,13 @@ public class EndpointTripleSource implements TripleSource { // only for debugging ... private long count = 0; private final EndpointStoreConnection endpointStoreConnection; + private final boolean enableMergeJoin; public EndpointTripleSource(EndpointStoreConnection endpointStoreConnection, EndpointStore endpoint) { this.endpoint = endpoint; this.numberOfCurrentTriples = endpoint.getHdt().getTriples().getNumberOfElements(); this.endpointStoreConnection = endpointStoreConnection; + this.enableMergeJoin = endpoint.getHDTSpec().getBoolean("qendpoint.mergejoin", true); } private void initHDTIndex() { @@ -40,8 +56,21 @@ private void initHDTIndex() { } @Override - public CloseableIteration getStatements(Resource resource, IRI iri, - Value value, Resource... resources) throws QueryEvaluationException { + public CloseableIteration getStatements(Resource subj, IRI pred, Value obj, + Resource... contexts) throws QueryEvaluationException { + + return getStatements(null, subj, pred, obj, contexts); + + } + + @Override + public CloseableIteration getStatements(StatementOrder statementOrder, Resource subj, IRI pred, + Value obj, Resource... contexts) throws SailException { + + if (statementOrder != null && logger.isDebugEnabled()) { + logger.debug("getStatements(StatementOrder {}, Subject {}, Predicate {}, Object {}, Contexts... {})", + statementOrder, subj, pred, obj, contexts); + } if (EndpointStoreConnection.debugWaittime != 0) { try { @@ -66,22 +95,22 @@ public CloseableIteration getStat Resource newSubj; IRI newPred; Value newObj; - long subjectID = this.endpoint.getHdtConverter().subjectToID(resource); - long predicateID = this.endpoint.getHdtConverter().predicateToID(iri); - long objectID = this.endpoint.getHdtConverter().objectToID(value); + long subjectID = this.endpoint.getHdtConverter().subjectToID(subj); + long predicateID = this.endpoint.getHdtConverter().predicateToID(pred); + long objectID = this.endpoint.getHdtConverter().objectToID(obj); if (subjectID == 0 || subjectID == -1) { - newSubj = resource; + newSubj = subj; } else { newSubj = this.endpoint.getHdtConverter().subjectIdToIRI(subjectID); } if (predicateID == 0 || predicateID == -1) { - newPred = iri; + newPred = pred; } else { newPred = this.endpoint.getHdtConverter().predicateIdToIRI(predicateID); } if (objectID == 0 || objectID == -1) { - newObj = value; + newObj = obj; } else { newObj = this.endpoint.getHdtConverter().objectIdToIRI(objectID); } @@ -89,23 +118,27 @@ public CloseableIteration getStat logger.debug("SEARCH {} {} {}", newSubj, newPred, newObj); // check if we need to search over the delta and if yes, search - CloseableIteration repositoryResult; + CloseableIteration repositoryResult; if (shouldSearchOverNativeStore(subjectID, predicateID, objectID)) { + if (statementOrder != null) { + throw new UnsupportedOperationException( + "Statement ordering is not supported when searching over the native store"); + } logger.debug("Searching over native store"); count++; if (endpoint.isMergeTriggered) { // query both native stores logger.debug("Query both RDF4j stores!"); - CloseableIteration repositoryResult1 = this.endpointStoreConnection - .getConnA_read().getStatements(newSubj, newPred, newObj, false, resources); - CloseableIteration repositoryResult2 = this.endpointStoreConnection - .getConnB_read().getStatements(newSubj, newPred, newObj, false, resources); + CloseableIteration repositoryResult1 = this.endpointStoreConnection.getConnA_read() + .getStatements(newSubj, newPred, newObj, false, contexts); + CloseableIteration repositoryResult2 = this.endpointStoreConnection.getConnB_read() + .getStatements(newSubj, newPred, newObj, false, contexts); repositoryResult = new CombinedNativeStoreResult(repositoryResult1, repositoryResult2); } else { logger.debug("Query only one RDF4j stores!"); repositoryResult = this.endpointStoreConnection.getCurrentConnectionRead().getStatements(newSubj, - newPred, newObj, false, resources); + newPred, newObj, false, contexts); } } else { logger.debug("Not searching over native store"); @@ -117,8 +150,18 @@ public CloseableIteration getStat if (subjectID != -1 && predicateID != -1 && objectID != -1) { logger.debug("Searching over HDT {} {} {}", subjectID, predicateID, objectID); TripleID t = new TripleID(subjectID, predicateID, objectID); - // search with the ID to check if the triples has been deleted - iterator = this.endpoint.getHdt().getTriples().search(t); + + if (statementOrder != null) { + int indexMaskMatchingStatementOrder = getIndexMaskMatchingStatementOrder(statementOrder, subj, pred, + obj, t); + + // search with the ID to check if the triples has been deleted + iterator = this.endpoint.getHdt().getTriples().search(t, indexMaskMatchingStatementOrder); + } else { + // search with the ID to check if the triples has been deleted + iterator = this.endpoint.getHdt().getTriples().search(t); + } + } else {// no need to search over hdt iterator = new EmptyTriplesIterator(TripleComponentOrder.SPO); } @@ -185,4 +228,164 @@ public EndpointStore getEndpointStore() { public long getCount() { return count; } + + private int getIndexMaskMatchingStatementOrder(StatementOrder statementOrder, Resource subj, IRI pred, Value obj, + TripleID t) { + List tripleComponentOrder = this.endpoint.getHdt().getTriples() + .getTripleComponentOrder(t); + + if (subj != null && pred != null && obj != null) { + if (!tripleComponentOrder.isEmpty()) { + return tripleComponentOrder.get(0).mask; + } + } + + Optional first = tripleComponentOrder.stream() + .filter(o -> getStatementOrder(o, subj != null, pred != null, obj != null).contains(statementOrder)) + .findFirst(); + + if (first.isEmpty()) { + throw new AssertionError( + "Statement order " + statementOrder + " not supported for triple pattern " + t.getPatternString()); + } + int indexMaskMatchingStatementOrder = first.get().mask; + return indexMaskMatchingStatementOrder; + } + + public static Set getStatementOrder(TripleComponentOrder tripleComponentOrder, boolean subject, + boolean predicate, boolean object) { + List subjectMappings = List.of(tripleComponentOrder.getSubjectMapping(), + tripleComponentOrder.getPredicateMapping(), tripleComponentOrder.getObjectMapping()); + + EnumSet statementOrders = EnumSet.noneOf(StatementOrder.class); + if (subject) { + statementOrders.add(StatementOrder.S); + } + if (predicate) { + statementOrders.add(StatementOrder.P); + } + if (object) { + statementOrders.add(StatementOrder.O); + } + + for (TripleComponentRole mapping : subjectMappings) { + if (mapping == TripleComponentRole.SUBJECT) { + if (!subject) { + statementOrders.add(StatementOrder.S); + break; + } + } else if (mapping == TripleComponentRole.PREDICATE) { + if (!predicate) { + statementOrders.add(StatementOrder.P); + break; + } + } else if (mapping == TripleComponentRole.OBJECT) { + if (!object) { + statementOrders.add(StatementOrder.O); + break; + } + } + } + return statementOrders; + } + + @Override + public Set getSupportedOrders(Resource subj, IRI pred, Value obj, Resource... contexts) { + + if (!enableMergeJoin) { + return Set.of(); + } + + if (EndpointStoreConnection.debugWaittime != 0) { + try { + Thread.sleep(EndpointStoreConnection.debugWaittime); + } catch (InterruptedException e) { + throw new AssertionError("no interruption during sleep", e); + } + } + + if (endpointStoreConnection.isTimeout()) { + throw new EndpointTimeoutException(); + } + + // @todo: should we not move this to the EndpointStore in the resetHDT + // function? + // check if the index changed, then refresh it + if (this.numberOfCurrentTriples != this.endpoint.getHdt().getTriples().getNumberOfElements()) { + initHDTIndex(); + } + + // convert uris into ids if needed + Resource newSubj; + IRI newPred; + Value newObj; + long subjectID = this.endpoint.getHdtConverter().subjectToID(subj); + long predicateID = this.endpoint.getHdtConverter().predicateToID(pred); + long objectID = this.endpoint.getHdtConverter().objectToID(obj); + + if (subjectID == 0 || subjectID == -1) { + newSubj = subj; + } else { + newSubj = this.endpoint.getHdtConverter().subjectIdToIRI(subjectID); + } + if (predicateID == 0 || predicateID == -1) { + newPred = pred; + } else { + newPred = this.endpoint.getHdtConverter().predicateIdToIRI(predicateID); + } + if (objectID == 0 || objectID == -1) { + newObj = obj; + } else { + newObj = this.endpoint.getHdtConverter().objectIdToIRI(objectID); + } + + logger.debug("getSupportedOrders {} {} {}", newSubj, newPred, newObj); + + // check if we need to search over the delta, in which case the + // statements can not be ordered + if (shouldSearchOverNativeStore(subjectID, predicateID, objectID)) { + return Set.of(); + } + + // iterate over the HDT file + IteratorTripleID iterator; + if (subjectID != -1 && predicateID != -1 && objectID != -1) { + TripleID t = new TripleID(subjectID, predicateID, objectID); + // search with the ID to check if the triples has been deleted + List tripleComponentOrder = this.endpoint.getHdt().getTriples() + .getTripleComponentOrder(t); + + var orders = tripleComponentOrder.stream() + .map(o -> getStatementOrder(o, subj != null, pred != null, obj != null)).filter(Objects::nonNull) + .flatMap(Collection::stream).filter(p -> p != StatementOrder.P) + // we do not support predicate ordering since it doesn't use + // the same IDs as other IRIs + .collect(Collectors.toSet()); + + if (logger.isDebugEnabled()) { + logger.debug("Triple pattern: {}\nMatching indexes: {}\nPossible orders: {}", t.getPatternString(), + Arrays.toString(tripleComponentOrder.toArray()), Arrays.toString(orders.toArray())); + } + + return orders; + + } else {// no need to search over hdt + return Set.of(StatementOrder.S, StatementOrder.P, StatementOrder.O, StatementOrder.C); + } + + } + + @Override + public Comparator getComparator() { + return (o1, o2) -> { + if (o1 instanceof HDTValue && o2 instanceof HDTValue) { + assert ((HDTValue) o1).getHDTPosition() != 2 : "o1 is in predicate position"; + assert ((HDTValue) o2).getHDTPosition() != 2 : "o2 is in predicate position"; + + return Long.compare(((HDTValue) o1).getHDTId(), ((HDTValue) o2).getHDTId()); + } + throw new UnsupportedOperationException( + "Cannot compare values of type " + o1.getClass() + " and " + o2.getClass()); + }; + } } diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/HDTConverter.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/HDTConverter.java index 45596a81..4f5b5959 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/HDTConverter.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/HDTConverter.java @@ -2,7 +2,9 @@ import com.the_qa_company.qendpoint.core.dictionary.Dictionary; import com.the_qa_company.qendpoint.core.enums.RDFNodeType; +import com.the_qa_company.qendpoint.core.enums.TripleComponentRole; import com.the_qa_company.qendpoint.core.exceptions.NotImplementedException; +import com.the_qa_company.qendpoint.core.hdt.HDT; import com.the_qa_company.qendpoint.model.HDTValue; import com.the_qa_company.qendpoint.model.SimpleBNodeHDT; import com.the_qa_company.qendpoint.model.SimpleIRIHDT; @@ -13,10 +15,8 @@ import org.eclipse.rdf4j.model.Statement; import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.model.ValueFactory; +import org.eclipse.rdf4j.model.impl.SimpleValueFactory; import org.eclipse.rdf4j.query.algebra.evaluation.util.QueryEvaluationUtil; -import org.eclipse.rdf4j.sail.memory.model.MemValueFactory; -import com.the_qa_company.qendpoint.core.enums.TripleComponentRole; -import com.the_qa_company.qendpoint.core.hdt.HDT; // there are 4 types of resources: // resources coming from outside, @@ -29,7 +29,7 @@ public class HDTConverter { public static final String HDT_URI = "http://hdt.org/"; private final EndpointStore endpoint; private final HDT hdt; - private final ValueFactory valueFactory = new MemValueFactory(); + private final ValueFactory valueFactory = SimpleValueFactory.getInstance(); public HDTConverter(EndpointStore endpoint) { this.endpoint = endpoint; @@ -51,6 +51,7 @@ public long subjectToID(Resource subj) { if (position == SimpleIRIHDT.SHARED_POS || position == SimpleIRIHDT.SUBJECT_POS) { return id; } + String translate; if (position == SimpleIRIHDT.PREDICATE_POS) { translate = hdt.getDictionary().idToString(id, TripleComponentRole.PREDICATE).toString(); @@ -59,7 +60,9 @@ public long subjectToID(Resource subj) { } else { translate = ""; } + id = hdt.getDictionary().stringToId(translate, TripleComponentRole.SUBJECT); + return id; } diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/experimental/QEPSailStore.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/experimental/QEPSailStore.java index 99c0cb50..04f3e70d 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/experimental/QEPSailStore.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/experimental/QEPSailStore.java @@ -222,7 +222,7 @@ public void close() throws SailException { } @Override - public QEPCloseableIteration getNamespaces() throws SailException { + public QEPCloseableIteration getNamespaces() throws SailException { Map namespaces = core.getNamespaceData().getNamespaces(); return QEPCloseableIteration.of(CloseableIterator.of( namespaces.entrySet().stream().map(e -> new SimpleNamespace(e.getKey(), e.getValue())).iterator())); @@ -234,13 +234,13 @@ public String getNamespace(String prefix) throws SailException { } @Override - public QEPCloseableIteration getContextIDs() throws SailException { + public QEPCloseableIteration getContextIDs() throws SailException { return QEPCloseableIteration.of(); } @Override - public QEPCloseableIteration getStatements(Resource subj, IRI pred, - Value obj, Resource... contexts) throws SailException { + public QEPCloseableIteration getStatements(Resource subj, IRI pred, Value obj, + Resource... contexts) throws SailException { if (!explicit) { return QEPCloseableIteration.of(); } @@ -254,5 +254,6 @@ public QEPCloseableIteration getStatements(R return null; }); } + } } diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/experimental/model/QEPCloseableIteration.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/experimental/model/QEPCloseableIteration.java index c7352a47..9f481fde 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/experimental/model/QEPCloseableIteration.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/store/experimental/model/QEPCloseableIteration.java @@ -8,47 +8,47 @@ import java.util.function.Function; @SuppressWarnings("deprecation") -public class QEPCloseableIteration implements CloseableIteration { +public class QEPCloseableIteration implements CloseableIteration { - public static QEPCloseableIteration of(CloseableIterator it) { - return new QEPCloseableIteration<>(Objects.requireNonNull(it, "it can't be null!")); + public static QEPCloseableIteration of(CloseableIterator it) { + return new QEPCloseableIteration(Objects.requireNonNull(it, "it can't be null!")); } - public static QEPCloseableIteration of() { + public static QEPCloseableIteration of() { return of(CloseableIterator.empty()); } - private final CloseableIterator it; + private final CloseableIterator it; - private QEPCloseableIteration(CloseableIterator it) { + private QEPCloseableIteration(CloseableIterator it) { this.it = it; } @Override - public void close() throws E { + public void close() { it.close(); } @Override - public boolean hasNext() throws E { + public boolean hasNext() { return it.hasNext(); } @Override - public T next() throws E { + public T next() { return it.next(); } @Override - public void remove() throws E { + public void remove() { it.remove(); } - public QEPCloseableIteration map(Function mapElem, + public QEPCloseableIteration map(Function mapElem, Function map) { return new QEPCloseableIteration<>(new CloseableIterator<>() { @Override - public void close() throws E2 { + public void close() { try { it.close(); } catch (Throwable t) { diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/tools/QEPSearch.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/tools/QEPSearch.java index 27e88932..ccff6f00 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/tools/QEPSearch.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/tools/QEPSearch.java @@ -221,7 +221,7 @@ protected void iterate(NotifyingSail store, CharSequence subject, CharSequence p System.err.println(colorTool.red() + "Can't do SPO with NO_SPO=true" + colorTool.colorReset()); return; } - try (CloseableIteration it = conn.getStatements(ms, mp, mo, false)) { + try (CloseableIteration it = conn.getStatements(ms, mp, mo, false)) { count = 0; while (it.hasNext()) { Statement triple = it.next(); @@ -453,8 +453,7 @@ private void integrityTest(EndpointStore ep) { MultiThreadListenerConsole console = colorTool.getConsole(); console.notifyProgress(0, "open connection to native store"); try (SailConnection conn = sail.getConnection()) { - try (CloseableIteration it = conn.getStatements(null, null, null, - false)) { + try (CloseableIteration it = conn.getStatements(null, null, null, false)) { long triples = 0; HDTConverter converter = ep.getHdtConverter(); long shared = ep.getHdt().getDictionary().getNshared(); diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/CombinedNativeStoreResult.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/CombinedNativeStoreResult.java index ce09e585..248a304e 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/CombinedNativeStoreResult.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/CombinedNativeStoreResult.java @@ -9,10 +9,10 @@ * * @author Ali Haidar */ -public class CombinedNativeStoreResult implements CloseableIteration { +public class CombinedNativeStoreResult implements CloseableIteration { - private final CloseableIteration repositoryResult1; - private final CloseableIteration repositoryResult2; + private final CloseableIteration repositoryResult1; + private final CloseableIteration repositoryResult2; /** * create a combined CloseableIteration @@ -20,8 +20,8 @@ public class CombinedNativeStoreResult implements CloseableIteration repositoryResult1, - CloseableIteration repositoryResult2) { + public CombinedNativeStoreResult(CloseableIteration repositoryResult1, + CloseableIteration repositoryResult2) { this.repositoryResult1 = repositoryResult1; this.repositoryResult2 = repositoryResult2; } diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/VariableToIdSubstitution.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/VariableToIdSubstitution.java index 7f85dae5..28c5b32b 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/VariableToIdSubstitution.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/VariableToIdSubstitution.java @@ -63,7 +63,9 @@ public void meet(Var var) { } } if (id != -1) { - var.setValue(converter.idToHDTValue(id, position)); + Var var1 = new Var(var.getName(), converter.idToHDTValue(id, position), var.isAnonymous(), + var.isConstant()); + var.replaceWith(var1); } } } diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/iterators/IteratorToIteration.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/iterators/IteratorToIteration.java index f3370377..9cee68ca 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/iterators/IteratorToIteration.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/iterators/IteratorToIteration.java @@ -4,9 +4,9 @@ import org.eclipse.rdf4j.common.iteration.CloseableIteration; public class IteratorToIteration implements ExceptionIterator, AutoCloseable { - private final CloseableIteration delegate; + private final CloseableIteration delegate; - public IteratorToIteration(CloseableIteration delegate) { + public IteratorToIteration(CloseableIteration delegate) { this.delegate = delegate; } diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/sail/FilteringSailConnection.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/sail/FilteringSailConnection.java index 66eb6eb8..62a9eeca 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/sail/FilteringSailConnection.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/sail/FilteringSailConnection.java @@ -53,8 +53,8 @@ public void close() throws SailException { } @Override - public CloseableIteration evaluate(TupleExpr tupleExpr, - Dataset dataset, BindingSet bindings, boolean includeInferred) throws SailException { + public CloseableIteration evaluate(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings, + boolean includeInferred) throws SailException { if (filter.shouldHandleExpression(tupleExpr, dataset, bindings, includeInferred)) { return connectionIfYes.evaluate(tupleExpr, dataset, bindings, includeInferred); } else { @@ -63,12 +63,12 @@ public CloseableIteration evalua } @Override - public CloseableIteration getContextIDs() throws SailException { + public CloseableIteration getContextIDs() throws SailException { return connectionIfYes.getContextIDs(); } @Override - public CloseableIteration getStatements(Resource subj, IRI pred, Value obj, + public CloseableIteration getStatements(Resource subj, IRI pred, Value obj, boolean includeInferred, Resource... contexts) throws SailException { if (filter.shouldHandleGet(subj, pred, obj, includeInferred, contexts)) { return connectionIfYes.getStatements(subj, pred, obj, includeInferred, contexts); @@ -163,7 +163,7 @@ public void clear(Resource... contexts) throws SailException { } @Override - public CloseableIteration getNamespaces() throws SailException { + public CloseableIteration getNamespaces() throws SailException { return connectionIfYes.getNamespaces(); } @@ -187,11 +187,6 @@ public void clearNamespaces() throws SailException { connectionIfYes.clearNamespaces(); } - @Override - public boolean pendingRemovals() { - return connectionIfYes.pendingRemovals(); - } - @Override public void addConnectionListener(SailConnectionListener listener) { connectionIfYes.addConnectionListener(listener); diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/sail/OptimizingSail.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/sail/OptimizingSail.java index 080a600c..e44af60e 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/sail/OptimizingSail.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/sail/OptimizingSail.java @@ -8,16 +8,16 @@ import org.eclipse.rdf4j.query.algebra.TupleExpr; import org.eclipse.rdf4j.query.algebra.evaluation.EvaluationStrategy; import org.eclipse.rdf4j.query.algebra.evaluation.federation.FederatedServiceResolver; -import org.eclipse.rdf4j.query.algebra.evaluation.impl.BindingAssigner; -import org.eclipse.rdf4j.query.algebra.evaluation.impl.CompareOptimizer; -import org.eclipse.rdf4j.query.algebra.evaluation.impl.ConjunctiveConstraintSplitter; -import org.eclipse.rdf4j.query.algebra.evaluation.impl.ConstantOptimizer; -import org.eclipse.rdf4j.query.algebra.evaluation.impl.DisjunctiveConstraintOptimizer; -import org.eclipse.rdf4j.query.algebra.evaluation.impl.IterativeEvaluationOptimizer; -import org.eclipse.rdf4j.query.algebra.evaluation.impl.OrderLimitOptimizer; -import org.eclipse.rdf4j.query.algebra.evaluation.impl.QueryJoinOptimizer; -import org.eclipse.rdf4j.query.algebra.evaluation.impl.QueryModelNormalizer; -import org.eclipse.rdf4j.query.algebra.evaluation.impl.SameTermFilterOptimizer; +import org.eclipse.rdf4j.query.algebra.evaluation.optimizer.BindingAssignerOptimizer; +import org.eclipse.rdf4j.query.algebra.evaluation.optimizer.CompareOptimizer; +import org.eclipse.rdf4j.query.algebra.evaluation.optimizer.ConjunctiveConstraintSplitterOptimizer; +import org.eclipse.rdf4j.query.algebra.evaluation.optimizer.ConstantOptimizer; +import org.eclipse.rdf4j.query.algebra.evaluation.optimizer.DisjunctiveConstraintOptimizer; +import org.eclipse.rdf4j.query.algebra.evaluation.optimizer.IterativeEvaluationOptimizer; +import org.eclipse.rdf4j.query.algebra.evaluation.optimizer.OrderLimitOptimizer; +import org.eclipse.rdf4j.query.algebra.evaluation.optimizer.QueryJoinOptimizer; +import org.eclipse.rdf4j.query.algebra.evaluation.optimizer.QueryModelNormalizerOptimizer; +import org.eclipse.rdf4j.query.algebra.evaluation.optimizer.SameTermFilterOptimizer; import org.eclipse.rdf4j.query.algebra.evaluation.impl.TupleFunctionEvaluationStatistics; import org.eclipse.rdf4j.query.algebra.evaluation.impl.TupleFunctionEvaluationStrategy; import org.eclipse.rdf4j.sail.NotifyingSail; @@ -48,18 +48,18 @@ public OptimizingSailConnection(NotifyingSailConnection wrappedCon) { } @Override - public CloseableIteration evaluate(TupleExpr tupleExpr, - Dataset dataset, BindingSet bindings, boolean includeInferred) throws SailException { + public CloseableIteration evaluate(TupleExpr tupleExpr, Dataset dataset, + BindingSet bindings, boolean includeInferred) throws SailException { ValueFactory vf = getValueFactory(); EvaluationStrategy strategy = new TupleFunctionEvaluationStrategy( new SailTripleSource(this, includeInferred, vf), dataset, federatedServiceResolverSupplier.get()); - (new BindingAssigner()).optimize(tupleExpr, dataset, bindings); + (new BindingAssignerOptimizer()).optimize(tupleExpr, dataset, bindings); (new ConstantOptimizer(strategy)).optimize(tupleExpr, dataset, bindings); (new CompareOptimizer()).optimize(tupleExpr, dataset, bindings); - (new ConjunctiveConstraintSplitter()).optimize(tupleExpr, dataset, bindings); + (new ConjunctiveConstraintSplitterOptimizer()).optimize(tupleExpr, dataset, bindings); (new DisjunctiveConstraintOptimizer()).optimize(tupleExpr, dataset, bindings); (new SameTermFilterOptimizer()).optimize(tupleExpr, dataset, bindings); - (new QueryModelNormalizer()).optimize(tupleExpr, dataset, bindings); + (new QueryModelNormalizerOptimizer()).optimize(tupleExpr, dataset, bindings); (new QueryJoinOptimizer(new TupleFunctionEvaluationStatistics())).optimize(tupleExpr, dataset, bindings); (new IterativeEvaluationOptimizer()).optimize(tupleExpr, dataset, bindings); // FIXME: remove comment diff --git a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/sail/filter/TypeSailFilter.java b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/sail/filter/TypeSailFilter.java index e38f1dcd..887f54a0 100644 --- a/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/sail/filter/TypeSailFilter.java +++ b/qendpoint-store/src/main/java/com/the_qa_company/qendpoint/utils/sail/filter/TypeSailFilter.java @@ -107,8 +107,8 @@ private boolean isSubjectOfType(Resource subj) { // did we already saw the type in the connection? if (type == null) { // query the type - try (CloseableIteration it = subConnection.getStatements(subj, - predicate, null, false)) { + try (CloseableIteration it = subConnection.getStatements(subj, predicate, null, + false)) { if (it.hasNext()) { type = it.next().getObject(); if (type == null) { diff --git a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/WikiDataTest.java b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/WikiDataTest.java new file mode 100644 index 00000000..09e8bfa6 --- /dev/null +++ b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/WikiDataTest.java @@ -0,0 +1,950 @@ +//@formatter:off + +//package com.the_qa_company.qendpoint; +// +//import com.the_qa_company.qendpoint.core.options.HDTOptions; +//import com.the_qa_company.qendpoint.core.options.HDTOptionsKeys; +//import com.the_qa_company.qendpoint.model.HDTValue; +//import com.the_qa_company.qendpoint.store.EndpointFiles; +//import com.the_qa_company.qendpoint.store.EndpointStore; +//import com.the_qa_company.qendpoint.store.EndpointTripleSource; +//import org.apache.commons.lang3.time.StopWatch; +//import org.eclipse.rdf4j.query.BindingSet; +//import org.eclipse.rdf4j.query.QueryResults; +//import org.eclipse.rdf4j.query.TupleQuery; +//import org.eclipse.rdf4j.query.TupleQueryResult; +//import org.eclipse.rdf4j.query.algebra.evaluation.optimizer.QueryJoinOptimizer; +//import org.eclipse.rdf4j.query.explanation.Explanation; +//import org.eclipse.rdf4j.repository.sail.SailRepository; +//import org.eclipse.rdf4j.repository.sail.SailRepositoryConnection; +//import org.junit.jupiter.api.*; +// +//import java.io.IOException; +//import java.nio.file.Path; +//import java.util.Comparator; +//import java.util.Set; +// +//public class WikiDataTest { +// +// private static SailRepository endpointStore; +// +// @BeforeAll +// public static void beforeAll() throws IOException { +// Path dir = Path.of(System.getProperty("user.dir") + "/wdbench-indexes/"); +// System.out.println("Loading from: " + dir); +// +//// store options +// HDTOptions options = HDTOptions.of( +// // disable the default index (to use the custom indexes) +// HDTOptionsKeys.BITMAPTRIPLES_INDEX_NO_FOQ, true, +// // set the custom indexes we want +// HDTOptionsKeys.BITMAPTRIPLES_INDEX_OTHERS, "sop,ops,osp,pso,pos"); +// +// EndpointStore store = new EndpointStore(new EndpointFiles(dir, "wdbench.hdt"), options); +// store.init(); +// +// endpointStore = new SailRepository(store); +// } +// +// @AfterAll +// public static void afterAll() { +// if (endpointStore != null) { +// endpointStore.shutDown(); +// } +// endpointStore = null; +// } +// +// @AfterEach +// public void afterEach() { +// EndpointTripleSource.ENABLE_MERGE_JOIN = true; +// QueryJoinOptimizer.MERGE_JOIN_CARDINALITY_SIZE_DIFF_MULTIPLIER = 10; +// } +// +// @Test +// public void testMergeJoinOnSubject() { +// try (SailRepositoryConnection connection = endpointStore.getConnection()) { +// System.out.println(); +// String query = """ +// PREFIX wd: +// PREFIX wdt: +// PREFIX wikibase: +// PREFIX p: +// PREFIX ps: +// PREFIX pq: +// PREFIX rdfs: +// PREFIX bd: +// PREFIX wdno: +// +// SELECT (count(?x1) as ?count) WHERE { +// ?x1 . +// ?x1 wdt:P1476 ?c. +// } +// """; +// +// String explanation = runQuery(connection, query); +// Assertions.assertTrue(explanation.contains("JoinIterator) (resultSizeActual=37.4M, ")); +// Assertions.assertTrue(explanation +// .contains(" │ ╠══ Join (InnerMergeJoinIterator) (resultSizeActual=37.4M, totalTimeActual=")); +// +// } +// +// } +// +// @Test +// public void testScholarlyArticlesWithTitles() { +// try (SailRepositoryConnection connection = endpointStore.getConnection()) { +// System.out.println(); +// String query = """ +// PREFIX wd: +// PREFIX wdt: +// PREFIX wikibase: +// PREFIX p: +// PREFIX ps: +// PREFIX pq: +// PREFIX rdfs: +// PREFIX bd: +// PREFIX wdno: +// +// SELECT (count(?x1) as ?count) WHERE { +// ?x1 . +// ?x1 wdt:P1476 ?c. +// } +// """; +// +// runQuery(connection, query); +// System.out.println(runQuery(connection, query)); +///* +//Projection (resultSizeActual=1, totalTimeActual=12.5s, selfTimeActual=0.015ms) +//╠══ ProjectionElemList +//║ ProjectionElem "count" +//╚══ Extension (resultSizeActual=1, totalTimeActual=12.5s, selfTimeActual=0.003ms) +// ├── Group () (resultSizeActual=1, totalTimeActual=12.5s, selfTimeActual=2.0s) +// │ ╠══ Join (InnerMergeJoinIterator) (resultSizeActual=37.4M, totalTimeActual=10.5s, selfTimeActual=3.8s) +// │ ║ ├── StatementPattern [statementOrder: S] (costEstimate=37.3M, resultSizeEstimate=74.6M, resultSizeActual=37.2M, totalTimeActual=3.2s, selfTimeActual=3.2s) [left] +// │ ║ │ s: Var (name=x1) +// │ ║ │ p: Var (name=_const_e5f28efe_uri, value=http://www.wikidata.org/prop/direct/P31, anonymous) +// │ ║ │ o: Var (name=_const_5b5b226_uri, value=http://www.wikidata.org/entity/Q13442814, anonymous) +// │ ║ └── StatementPattern [statementOrder: S] (costEstimate=9.0K, resultSizeEstimate=81.8M, resultSizeActual=40.9M, totalTimeActual=3.5s, selfTimeActual=3.5s) [right] +// │ ║ s: Var (name=x1) +// │ ║ p: Var (name=_const_3389f0e2_uri, value=http://www.wikidata.org/prop/direct/P1476, anonymous) +// │ ║ o: Var (name=c) +// │ ╚══ GroupElem (count) +// │ Count +// │ Var (name=x1) +// └── ExtensionElem (count) +// Count +// Var (name=x1) +// */ +// +// +// EndpointTripleSource.ENABLE_MERGE_JOIN = false; +// runQuery(connection, query); +// System.out.println(runQuery(connection, query)); +///* +// +// */ +// +// +// } +// +// } +// +// @Test +// public void testMergeJoinOnObject() { +// try (SailRepositoryConnection connection = endpointStore.getConnection()) { +// System.out.println(); +// String query = """ +// PREFIX wd: +// PREFIX wdt: +// PREFIX wikibase: +// PREFIX p: +// PREFIX ps: +// PREFIX pq: +// PREFIX rdfs: +// PREFIX bd: +// PREFIX wdno: +// +// SELECT * WHERE { +// ?country1 wdt:P38 ?currency. # Country has currency +// ?country2 wdt:P38 ?currency. # Country has currency +// filter(?country1 != ?country2) +// } +// """; +// +// String explanation = runQuery(connection, query); +// Assertions.assertTrue(explanation.contains("Projection (resultSizeActual=30.1K, ")); +// Assertions.assertTrue(explanation.contains("╚══ Filter (resultSizeActual=30.1K, ")); +// Assertions.assertTrue(explanation.contains("Iterator) (resultSizeActual=31.9K, ")); +// Assertions +// .assertTrue(explanation.contains(" └── Join (InnerMergeJoinIterator) (resultSizeActual=31.9K, ")); +// +// } +// +// } +// +// @Test +// public void testMergeJoinOnObjectWithFilter() { +// +//// EndpointTripleSource.ENABLE_MERGE_JOIN = false; +// +// try (SailRepositoryConnection connection = endpointStore.getConnection()) { +// System.out.println(); +// String query = """ +// PREFIX wd: +// PREFIX wdt: +// PREFIX wikibase: +// PREFIX p: +// PREFIX ps: +// PREFIX pq: +// PREFIX rdfs: +// PREFIX bd: +// PREFIX wdno: +// +// SELECT * WHERE { +// ?country1 wdt:P38 ?currency. # Country has currency +// ?country2 wdt:P38 ?currency. # Country has currency +// FILTER(?currency IN (,, )) +// FILTER(?country1 != ?country2) +// +// } +// """; +// +// String explanation = runQuery(connection, query); +// Assertions.assertTrue(explanation.contains("JoinIterator) (resultSizeActual=8.6K, ")); +// Assertions.assertTrue(explanation.contains(" └── Join (InnerMergeJoinIterator) (resultSizeActual=8.6K, ")); +// } +// +// } +// +// @Test +// public void testRegularJoinUsedWhenMoreOptimal() { +// try (SailRepositoryConnection connection = endpointStore.getConnection()) { +// System.out.println(); +// String query = """ +// PREFIX wd: +// PREFIX wdt: +// PREFIX wikibase: +// PREFIX p: +// PREFIX ps: +// PREFIX pq: +// PREFIX rdfs: +// PREFIX bd: +// PREFIX wdno: +// +// SELECT (count(?x1) as ?count) WHERE { +// ?x1 "1795-1796" . +// ?x1 wdt:P1476 ?c. +// } +// """; +// +// String explanation = runQuery(connection, query); +// Assertions.assertTrue(explanation.contains("JoinIterator) (resultSizeActual=212, ")); +// Assertions.assertTrue(explanation.contains("│ ╠══ Join (JoinIterator) (resultSizeActual=212, ")); +// +// } +// +// } +// +// +//@Test +// public void temp() { +// try (SailRepositoryConnection connection = endpointStore.getConnection()) { +// System.out.println(); +// +// String query = """ +// PREFIX wd: +// PREFIX wdt: +// PREFIX wikibase: +// PREFIX p: +// PREFIX ps: +// PREFIX pq: +// PREFIX rdfs: +// PREFIX bd: +// PREFIX wdno: +// +// select * where { +// ?x1 . ?x1 . ?x2 ?x3 . OPTIONAL { ?x1 ?x4 . ?x1 ?x2 . ?x1 ?x5 . } +// } limit 100000 +// """; +// String explanation = runQuery(connection, query); +// System.out.println(explanation); +// +// } +// +// } +// +// +// @Test +// public void testNumberOfPeopleThatAreTennisPlayers() { +// try (SailRepositoryConnection connection = endpointStore.getConnection()) { +// System.out.println(); +// String query = """ +// PREFIX wd: +// PREFIX wdt: +// PREFIX wikibase: +// PREFIX p: +// PREFIX ps: +// PREFIX pq: +// PREFIX rdfs: +// PREFIX bd: +// PREFIX wdno: +// +// SELECT (count(?x1) as ?count) WHERE { +// ?x1 wd:Q5 . +// ?x1 wd:Q10833314 . +// } +// +// +// """; +// +// QueryJoinOptimizer.MERGE_JOIN_CARDINALITY_SIZE_DIFF_MULTIPLIER = 10; +// runQuery(connection, query); +// System.out.println(runQuery(connection, query)); +// +///* +//Projection (resultSizeActual=1, totalTimeActual=931ms, selfTimeActual=0.002ms) +//╠══ ProjectionElemList +//║ ProjectionElem "count" +//╚══ Extension (resultSizeActual=1, totalTimeActual=931ms, selfTimeActual=0.002ms) +// ├── Group () (resultSizeActual=1, totalTimeActual=931ms, selfTimeActual=1.91ms) +// │ ╠══ Join (InnerMergeJoinIterator) (resultSizeActual=10.9K, totalTimeActual=929ms, selfTimeActual=216ms) +// │ ║ ├── StatementPattern [statementOrder: S] (costEstimate=11.0K, resultSizeEstimate=22.0K, resultSizeActual=11.0K, totalTimeActual=0.989ms, selfTimeActual=0.989ms) [left] +// │ ║ │ s: Var (name=x1) +// │ ║ │ p: Var (name=_const_d85f4957_uri, value=http://www.wikidata.org/prop/direct/P106, anonymous) +// │ ║ │ o: Var (name=_const_6dc8acf8_uri, value=http://www.wikidata.org/entity/Q10833314, anonymous) +// │ ║ └── StatementPattern [statementOrder: S] (costEstimate=1, resultSizeEstimate=18.4M, resultSizeActual=9.1M, totalTimeActual=712ms, selfTimeActual=712ms) [right] +// │ ║ s: Var (name=x1) +// │ ║ p: Var (name=_const_e5f28efe_uri, value=http://www.wikidata.org/prop/direct/P31, anonymous) +// │ ║ o: Var (name=_const_f822a47a_uri, value=http://www.wikidata.org/entity/Q5, anonymous) +// │ ╚══ GroupElem (count) +// │ Count +// │ Var (name=x1) +// └── ExtensionElem (count) +// Count +// Var (name=x1) +//*/ +// +// EndpointTripleSource.ENABLE_MERGE_JOIN = false; +// runQuery(connection, query); +// System.out.println(runQuery(connection, query)); +// +// +///* +//Projection (resultSizeActual=1, totalTimeActual=23.2ms, selfTimeActual=0.002ms) +//╠══ ProjectionElemList +//║ ProjectionElem "count" +//╚══ Extension (resultSizeActual=1, totalTimeActual=23.2ms, selfTimeActual=0.002ms) +// ├── Group () (resultSizeActual=1, totalTimeActual=23.2ms, selfTimeActual=1.03ms) +// │ ╠══ Join (JoinIterator) (resultSizeActual=10.9K, totalTimeActual=22.1ms, selfTimeActual=21.5ms) +// │ ║ ├── StatementPattern (costEstimate=11.0K, resultSizeEstimate=22.0K, resultSizeActual=11.0K, totalTimeActual=0.661ms, selfTimeActual=0.661ms) [left] +// │ ║ │ s: Var (name=x1) +// │ ║ │ p: Var (name=_const_d85f4957_uri, value=http://www.wikidata.org/prop/direct/P106, anonymous) +// │ ║ │ o: Var (name=_const_6dc8acf8_uri, value=http://www.wikidata.org/entity/Q10833314, anonymous) +// │ ║ └── StatementPattern (costEstimate=1, resultSizeEstimate=18.4M, resultSizeActual=10.9K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [right] +// │ ║ s: Var (name=x1) +// │ ║ p: Var (name=_const_e5f28efe_uri, value=http://www.wikidata.org/prop/direct/P31, anonymous) +// │ ║ o: Var (name=_const_f822a47a_uri, value=http://www.wikidata.org/entity/Q5, anonymous) +// │ ╚══ GroupElem (count) +// │ Count +// │ Var (name=x1) +// └── ExtensionElem (count) +// Count +// Var (name=x1) +// +//*/ +// +// } +// +// } +// +// +// @Test +// public void testAllMuseumsInRome() { +// try (SailRepositoryConnection connection = endpointStore.getConnection()) { +// System.out.println(); +// String query = """ +// PREFIX wd: +// PREFIX wdt: +// PREFIX wikibase: +// PREFIX p: +// PREFIX ps: +// PREFIX pq: +// PREFIX rdfs: +// PREFIX bd: +// PREFIX wdno: +// +// SELECT * WHERE { +// ?x1 wd:Q33506 . +// ?x1 wd:Q220 . +// } +// """; +// +// QueryJoinOptimizer.MERGE_JOIN_CARDINALITY_SIZE_DIFF_MULTIPLIER = 100; +// runQuery(connection, query); +// System.out.println(runQuery(connection, query)); +// +///* +//Projection (resultSizeActual=3, totalTimeActual=4.27ms, selfTimeActual=0.001ms) +//╠══ ProjectionElemList +//║ ProjectionElem "x1" +//╚══ Join (InnerMergeJoinIterator) (resultSizeActual=3, totalTimeActual=4.27ms, selfTimeActual=1.14ms) +// ├── StatementPattern [statementOrder: S] (costEstimate=1.3K, resultSizeEstimate=2.6K, resultSizeActual=1.2K, totalTimeActual=0.105ms, selfTimeActual=0.105ms) [left] +// │ s: Var (name=x1) +// │ p: Var (name=_const_d85f49f5_uri, value=http://www.wikidata.org/prop/direct/P159, anonymous) +// │ o: Var (name=_const_7a0b68f5_uri, value=http://www.wikidata.org/entity/Q220, anonymous) +// └── StatementPattern [statementOrder: S] (costEstimate=1, resultSizeEstimate=81.1K, resultSizeActual=40.5K, totalTimeActual=3.02ms, selfTimeActual=3.02ms) [right] +// s: Var (name=x1) +// p: Var (name=_const_e5f28efe_uri, value=http://www.wikidata.org/prop/direct/P31, anonymous) +// o: Var (name=_const_24e3a460_uri, value=http://www.wikidata.org/entity/Q33506, anonymous) +// +// */ +// +// EndpointTripleSource.ENABLE_MERGE_JOIN = false; +// runQuery(connection, query); +// System.out.println(runQuery(connection, query)); +// +///* +//Projection (resultSizeActual=3, totalTimeActual=2.5ms, selfTimeActual=0.002ms) +//╠══ ProjectionElemList +//║ ProjectionElem "x1" +//╚══ Join (JoinIterator) (resultSizeActual=3, totalTimeActual=2.49ms, selfTimeActual=2.39ms) +// ├── StatementPattern (costEstimate=1.3K, resultSizeEstimate=2.6K, resultSizeActual=1.2K, totalTimeActual=0.105ms, selfTimeActual=0.105ms) [left] +// │ s: Var (name=x1) +// │ p: Var (name=_const_d85f49f5_uri, value=http://www.wikidata.org/prop/direct/P159, anonymous) +// │ o: Var (name=_const_7a0b68f5_uri, value=http://www.wikidata.org/entity/Q220, anonymous) +// └── StatementPattern (costEstimate=1, resultSizeEstimate=81.1K, resultSizeActual=3, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [right] +// s: Var (name=x1) +// p: Var (name=_const_e5f28efe_uri, value=http://www.wikidata.org/prop/direct/P31, anonymous) +// o: Var (name=_const_24e3a460_uri, value=http://www.wikidata.org/entity/Q33506, anonymous) +// +// */ +// +// +// +// } +// +// } +// +// @Test +// public void testNumberOfTennisPlayersFromCountriesInTheEU() { +// try (SailRepositoryConnection connection = endpointStore.getConnection()) { +// System.out.println(); +// String query = """ +// PREFIX wd: +// PREFIX wdt: +// PREFIX wikibase: +// PREFIX p: +// PREFIX ps: +// PREFIX pq: +// PREFIX rdfs: +// PREFIX bd: +// PREFIX wdno: +// +// SELECT * WHERE { +// ?x1 wdt:P31 wd:Q5 . +// ?x1 wdt:P106 wd:Q10833314 . +// ?x1 wdt:P19 ?country . +// ?country wdt:P361 wd:Q458 . +// } +// """; +// +///* +//- number of tennis players from countries in the EU +// +//select (count(?s) as ?c) where +//Eu administrative_regions ?country . +//?s instanceOf Human +//?s occupation TennisPlayers +//?s bornIn ?country +//*/ +// +// QueryJoinOptimizer.MERGE_JOIN_CARDINALITY_SIZE_DIFF_MULTIPLIER = 100000; +// runQuery(connection, query); +// System.out.println(runQuery(connection, query)); +// +// +// EndpointTripleSource.ENABLE_MERGE_JOIN = false; +// runQuery(connection, query); +// System.out.println(runQuery(connection, query)); +// +// +// +// } +// +// } +// +// @Test +// public void testHumansAndTheirPlaceOfBirth() { +// try (SailRepositoryConnection connection = endpointStore.getConnection()) { +// System.out.println(); +// String query = """ +// PREFIX wd: +// PREFIX wdt: +// PREFIX wikibase: +// PREFIX p: +// PREFIX ps: +// PREFIX pq: +// PREFIX rdfs: +// PREFIX bd: +// PREFIX wdno: +// +// SELECT * WHERE { +// ?x1 wdt:P31 wd:Q5 . +// ?x1 wdt:P19 ?country . +// } +// """; +// +// +// QueryJoinOptimizer.MERGE_JOIN_CARDINALITY_SIZE_DIFF_MULTIPLIER = 10; +// runQuery(connection, query); +// System.out.println(runQuery(connection, query)); +// +///* +//Projection (resultSizeActual=2.8M, totalTimeActual=1.6s, selfTimeActual=194ms) +//╠══ ProjectionElemList +//║ ProjectionElem "x1" +//║ ProjectionElem "country" +//╚══ Join (InnerMergeJoinIterator) (resultSizeActual=2.8M, totalTimeActual=1.4s, selfTimeActual=472ms) +// ├── StatementPattern [statementOrder: S] (costEstimate=2.8M, resultSizeEstimate=5.6M, resultSizeActual=2.8M, totalTimeActual=241ms, selfTimeActual=241ms) [left] +// │ s: Var (name=x1) +// │ p: Var (name=_const_e5f28ec8_uri, value=http://www.wikidata.org/prop/direct/P19, anonymous) +// │ o: Var (name=country) +// └── StatementPattern [statementOrder: S] (costEstimate=1, resultSizeEstimate=18.4M, resultSizeActual=9.1M, totalTimeActual=718ms, selfTimeActual=718ms) [right] +// s: Var (name=x1) +// p: Var (name=_const_e5f28efe_uri, value=http://www.wikidata.org/prop/direct/P31, anonymous) +// o: Var (name=_const_f822a47a_uri, value=http://www.wikidata.org/entity/Q5, anonymous) +// +//*/ +// +// EndpointTripleSource.ENABLE_MERGE_JOIN = false; +// runQuery(connection, query); +// System.out.println(runQuery(connection, query)); +// +///* +//Projection (resultSizeActual=2.8M, totalTimeActual=7.7s, selfTimeActual=174ms) +//╠══ ProjectionElemList +//║ ProjectionElem "x1" +//║ ProjectionElem "country" +//╚══ Join (JoinIterator) (resultSizeActual=2.8M, totalTimeActual=7.5s, selfTimeActual=7.4s) +// ├── StatementPattern (costEstimate=2.8M, resultSizeEstimate=5.6M, resultSizeActual=2.8M, totalTimeActual=173ms, selfTimeActual=173ms) [left] +// │ s: Var (name=x1) +// │ p: Var (name=_const_e5f28ec8_uri, value=http://www.wikidata.org/prop/direct/P19, anonymous) +// │ o: Var (name=country) +// └── StatementPattern (costEstimate=1, resultSizeEstimate=18.4M, resultSizeActual=2.8M, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [right] +// s: Var (name=x1) +// p: Var (name=_const_e5f28efe_uri, value=http://www.wikidata.org/prop/direct/P31, anonymous) +// o: Var (name=_const_f822a47a_uri, value=http://www.wikidata.org/entity/Q5, anonymous) +// +// */ +// +// } +// +// } +// +// +// @Test +// public void testBoardMembersOfFourCompanies() { +// try (SailRepositoryConnection connection = endpointStore.getConnection()) { +// System.out.println(); +// String query = """ +// PREFIX wd: +// PREFIX wdt: +// PREFIX wikibase: +// PREFIX p: +// PREFIX ps: +// PREFIX pq: +// PREFIX rdfs: +// PREFIX bd: +// PREFIX wdno: +// +// SELECT * WHERE { +// ?company1 wdt:P3320 ?boardMember . +// ?company2 wdt:P3320 ?boardMember . +// ?company3 wdt:P3320 ?boardMember . +// ?company4 wdt:P3320 ?boardMember . +// +// filter(?company1 != ?company2 && ?company1 != ?company3 && ?company2 != ?company3 && ?company1 != ?company4 && ?company2 != ?company4 && ?company3 != ?company4) +// +// ?boardMember wdt:P31 ?type ; +// wdt:P21 ?sexOrGender; +// wdt:P735 ?givenName ; +// wdt:P734 ?familyName ; +// wdt:P106 ?occupation ; +// wdt:P27 ?countryOfCitizenship . +// +// } +// """; +// +// +// QueryJoinOptimizer.MERGE_JOIN_CARDINALITY_SIZE_DIFF_MULTIPLIER = 10; +// runQuery(connection, query); +// System.out.println(runQuery(connection, query)); +// +///* +//Projection (resultSizeActual=2.6K, totalTimeActual=38.8ms, selfTimeActual=0.309ms) +//╠══ ProjectionElemList +//║ ProjectionElem "company1" +//║ ProjectionElem "boardMember" +//║ ProjectionElem "company2" +//║ ProjectionElem "company3" +//║ ProjectionElem "company4" +//║ ProjectionElem "type" +//║ ProjectionElem "sexOrGender" +//║ ProjectionElem "givenName" +//║ ProjectionElem "familyName" +//║ ProjectionElem "occupation" +//║ ProjectionElem "countryOfCitizenship" +//╚══ Join (JoinIterator) (resultSizeActual=2.6K, totalTimeActual=38.5ms, selfTimeActual=32.8ms) +// ├── Filter (resultSizeActual=7.2K, totalTimeActual=5.67ms, selfTimeActual=1.21ms) [left] +// │ ╠══ And +// │ ║ ├── And +// │ ║ │ ╠══ Compare (!=) +// │ ║ │ ║ Var (name=company1) +// │ ║ │ ║ Var (name=company4) +// │ ║ │ ╚══ Compare (!=) +// │ ║ │ Var (name=company2) +// │ ║ │ Var (name=company4) +// │ ║ └── Compare (!=) +// │ ║ Var (name=company3) +// │ ║ Var (name=company4) +// │ ╚══ Join (InnerMergeJoinIterator) (resultSizeActual=13.2K, totalTimeActual=4.46ms, selfTimeActual=1.48ms) +// │ ├── Filter (resultSizeActual=1.9K, totalTimeActual=2.64ms, selfTimeActual=0.31ms) [left] +// │ │ ╠══ And +// │ │ ║ ├── Compare (!=) +// │ │ ║ │ Var (name=company1) +// │ │ ║ │ Var (name=company3) +// │ │ ║ └── Compare (!=) +// │ │ ║ Var (name=company2) +// │ │ ║ Var (name=company3) +// │ │ ╚══ Join (InnerMergeJoinIterator) (resultSizeActual=3.9K, totalTimeActual=2.33ms, selfTimeActual=0.615ms) +// │ │ ├── Filter (resultSizeActual=988, totalTimeActual=1.41ms, selfTimeActual=0.183ms) [left] +// │ │ │ ╠══ Compare (!=) +// │ │ │ ║ Var (name=company1) +// │ │ │ ║ Var (name=company2) +// │ │ │ ╚══ Join (InnerMergeJoinIterator) (resultSizeActual=3.7K, totalTimeActual=1.23ms, selfTimeActual=0.546ms) +// │ │ │ ├── StatementPattern [statementOrder: O] (costEstimate=545, resultSizeEstimate=5.4K, resultSizeActual=2.7K, totalTimeActual=0.334ms, selfTimeActual=0.334ms) [left] +// │ │ │ │ s: Var (name=company1) +// │ │ │ │ p: Var (name=_const_338ad53e_uri, value=http://www.wikidata.org/prop/direct/P3320, anonymous) +// │ │ │ │ o: Var (name=boardMember) +// │ │ │ └── StatementPattern [statementOrder: O] (costEstimate=74, resultSizeEstimate=5.4K, resultSizeActual=2.7K, totalTimeActual=0.348ms, selfTimeActual=0.348ms) [right] +// │ │ │ s: Var (name=company2) +// │ │ │ p: Var (name=_const_338ad53e_uri, value=http://www.wikidata.org/prop/direct/P3320, anonymous) +// │ │ │ o: Var (name=boardMember) +// │ │ └── StatementPattern [statementOrder: O] (costEstimate=74, resultSizeEstimate=5.4K, resultSizeActual=2.7K, totalTimeActual=0.299ms, selfTimeActual=0.299ms) [right] +// │ │ s: Var (name=company3) +// │ │ p: Var (name=_const_338ad53e_uri, value=http://www.wikidata.org/prop/direct/P3320, anonymous) +// │ │ o: Var (name=boardMember) +// │ └── StatementPattern [statementOrder: O] (costEstimate=74, resultSizeEstimate=5.4K, resultSizeActual=2.7K, totalTimeActual=0.343ms, selfTimeActual=0.343ms) [right] +// │ s: Var (name=company4) +// │ p: Var (name=_const_338ad53e_uri, value=http://www.wikidata.org/prop/direct/P3320, anonymous) +// │ o: Var (name=boardMember) +// └── Join (JoinIterator) (resultSizeActual=2.6K, totalTimeActual=0.0ms) [right] +// ╠══ StatementPattern (costEstimate=2.6K, resultSizeEstimate=6.6M, resultSizeActual=6.6K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [left] +// ║ s: Var (name=boardMember) +// ║ p: Var (name=_const_d85f6038_uri, value=http://www.wikidata.org/prop/direct/P734, anonymous) +// ║ o: Var (name=familyName) +// ╚══ Join (JoinIterator) (resultSizeActual=2.6K, totalTimeActual=0.004ms, selfTimeActual=0.001ms) [right] +// ├── StatementPattern (costEstimate=2.9K, resultSizeEstimate=8.2M, resultSizeActual=6.7K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [left] +// │ s: Var (name=boardMember) +// │ p: Var (name=_const_e5f28ee5_uri, value=http://www.wikidata.org/prop/direct/P27, anonymous) +// │ o: Var (name=countryOfCitizenship) +// └── Join (JoinIterator) (resultSizeActual=2.6K, totalTimeActual=0.003ms, selfTimeActual=0.001ms) [right] +// ╠══ StatementPattern (costEstimate=3.5K, resultSizeEstimate=12.1M, resultSizeActual=6.7K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [left] +// ║ s: Var (name=boardMember) +// ║ p: Var (name=_const_d85f6039_uri, value=http://www.wikidata.org/prop/direct/P735, anonymous) +// ║ o: Var (name=givenName) +// ╚══ Join (JoinIterator) (resultSizeActual=2.6K, totalTimeActual=0.003ms, selfTimeActual=0.001ms) [right] +// ├── StatementPattern (costEstimate=3.8K, resultSizeEstimate=14.6M, resultSizeActual=6.7K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [left] +// │ s: Var (name=boardMember) +// │ p: Var (name=_const_e5f28edf_uri, value=http://www.wikidata.org/prop/direct/P21, anonymous) +// │ o: Var (name=sexOrGender) +// └── Join (JoinIterator) (resultSizeActual=2.6K, totalTimeActual=0.002ms, selfTimeActual=0.001ms) [right] +// ╠══ StatementPattern (costEstimate=4.1K, resultSizeEstimate=17.1M, resultSizeActual=2.6K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [left] +// ║ s: Var (name=boardMember) +// ║ p: Var (name=_const_d85f4957_uri, value=http://www.wikidata.org/prop/direct/P106, anonymous) +// ║ o: Var (name=occupation) +// ╚══ StatementPattern (costEstimate=13.8K, resultSizeEstimate=191.5M, resultSizeActual=2.6K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [right] +// s: Var (name=boardMember) +// p: Var (name=_const_e5f28efe_uri, value=http://www.wikidata.org/prop/direct/P31, anonymous) +// o: Var (name=type) +// +// */ +// +// EndpointTripleSource.ENABLE_MERGE_JOIN = false; +// runQuery(connection, query); +// System.out.println(runQuery(connection, query)); +// +///* +//Projection (resultSizeActual=2.6K, totalTimeActual=163ms, selfTimeActual=0.316ms) +//╠══ ProjectionElemList +//║ ProjectionElem "company1" +//║ ProjectionElem "boardMember" +//║ ProjectionElem "company2" +//║ ProjectionElem "company3" +//║ ProjectionElem "company4" +//║ ProjectionElem "type" +//║ ProjectionElem "sexOrGender" +//║ ProjectionElem "givenName" +//║ ProjectionElem "familyName" +//║ ProjectionElem "occupation" +//║ ProjectionElem "countryOfCitizenship" +//╚══ Filter (resultSizeActual=2.6K, totalTimeActual=163ms, selfTimeActual=0.54ms) +// ├── And +// │ ╠══ And +// │ ║ ├── Compare (!=) +// │ ║ │ Var (name=company1) +// │ ║ │ Var (name=company2) +// │ ║ └── Compare (!=) +// │ ║ Var (name=company1) +// │ ║ Var (name=company3) +// │ ╚══ Compare (!=) +// │ Var (name=company1) +// │ Var (name=company4) +// └── Join (JoinIterator) (resultSizeActual=7.0K, totalTimeActual=163ms, selfTimeActual=162ms) +// ╠══ StatementPattern (costEstimate=545, resultSizeEstimate=5.4K, resultSizeActual=2.7K, totalTimeActual=0.416ms, selfTimeActual=0.416ms) [left] +// ║ s: Var (name=company1) +// ║ p: Var (name=_const_338ad53e_uri, value=http://www.wikidata.org/prop/direct/P3320, anonymous) +// ║ o: Var (name=boardMember) +// ╚══ Filter (resultSizeActual=7.0K, totalTimeActual=0.073ms, selfTimeActual=0.0ms) [right] +// ├── And +// │ ╠══ Compare (!=) +// │ ║ Var (name=company2) +// │ ║ Var (name=company3) +// │ ╚══ Compare (!=) +// │ Var (name=company2) +// │ Var (name=company4) +// └── Join (JoinIterator) (resultSizeActual=14.3K, totalTimeActual=0.073ms, selfTimeActual=0.001ms) +// ╠══ StatementPattern (costEstimate=74, resultSizeEstimate=5.4K, resultSizeActual=3.7K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [left] +// ║ s: Var (name=company2) +// ║ p: Var (name=_const_338ad53e_uri, value=http://www.wikidata.org/prop/direct/P3320, anonymous) +// ║ o: Var (name=boardMember) +// ╚══ Filter (resultSizeActual=14.3K, totalTimeActual=0.071ms, selfTimeActual=0.0ms) [right] +// ├── Compare (!=) +// │ Var (name=company3) +// │ Var (name=company4) +// └── Join (JoinIterator) (resultSizeActual=22.5K, totalTimeActual=0.071ms, selfTimeActual=0.001ms) +// ╠══ StatementPattern (costEstimate=74, resultSizeEstimate=5.4K, resultSizeActual=7.6K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [left] +// ║ s: Var (name=company3) +// ║ p: Var (name=_const_338ad53e_uri, value=http://www.wikidata.org/prop/direct/P3320, anonymous) +// ║ o: Var (name=boardMember) +// ╚══ Join (JoinIterator) (resultSizeActual=22.5K, totalTimeActual=0.07ms, selfTimeActual=0.07ms) [right] +// ├── StatementPattern (costEstimate=74, resultSizeEstimate=5.4K, resultSizeActual=28.8K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [left] +// │ s: Var (name=company4) +// │ p: Var (name=_const_338ad53e_uri, value=http://www.wikidata.org/prop/direct/P3320, anonymous) +// │ o: Var (name=boardMember) +// └── Join (JoinIterator) (resultSizeActual=22.5K, totalTimeActual=0.0ms) [right] +// ╠══ StatementPattern (costEstimate=2.6K, resultSizeEstimate=6.6M, resultSizeActual=22.2K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [left] +// ║ s: Var (name=boardMember) +// ║ p: Var (name=_const_d85f6038_uri, value=http://www.wikidata.org/prop/direct/P734, anonymous) +// ║ o: Var (name=familyName) +// ╚══ Join (JoinIterator) (resultSizeActual=22.5K, totalTimeActual=0.004ms, selfTimeActual=0.001ms) [right] +// ├── StatementPattern (costEstimate=2.9K, resultSizeEstimate=8.2M, resultSizeActual=22.5K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [left] +// │ s: Var (name=boardMember) +// │ p: Var (name=_const_e5f28ee5_uri, value=http://www.wikidata.org/prop/direct/P27, anonymous) +// │ o: Var (name=countryOfCitizenship) +// └── Join (JoinIterator) (resultSizeActual=22.5K, totalTimeActual=0.003ms, selfTimeActual=0.001ms) [right] +// ╠══ StatementPattern (costEstimate=3.5K, resultSizeEstimate=12.1M, resultSizeActual=23.0K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [left] +// ║ s: Var (name=boardMember) +// ║ p: Var (name=_const_d85f6039_uri, value=http://www.wikidata.org/prop/direct/P735, anonymous) +// ║ o: Var (name=givenName) +// ╚══ Join (JoinIterator) (resultSizeActual=22.5K, totalTimeActual=0.002ms, selfTimeActual=0.001ms) [right] +// ├── StatementPattern (costEstimate=3.8K, resultSizeEstimate=14.6M, resultSizeActual=23.0K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [left] +// │ s: Var (name=boardMember) +// │ p: Var (name=_const_e5f28edf_uri, value=http://www.wikidata.org/prop/direct/P21, anonymous) +// │ o: Var (name=sexOrGender) +// └── Join (JoinIterator) (resultSizeActual=22.5K, totalTimeActual=0.001ms, selfTimeActual=0.001ms) [right] +// ╠══ StatementPattern (costEstimate=4.1K, resultSizeEstimate=17.1M, resultSizeActual=22.5K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [left] +// ║ s: Var (name=boardMember) +// ║ p: Var (name=_const_d85f4957_uri, value=http://www.wikidata.org/prop/direct/P106, anonymous) +// ║ o: Var (name=occupation) +// ╚══ StatementPattern (costEstimate=13.8K, resultSizeEstimate=191.5M, resultSizeActual=22.5K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [right] +// s: Var (name=boardMember) +// p: Var (name=_const_e5f28efe_uri, value=http://www.wikidata.org/prop/direct/P31, anonymous) +// o: Var (name=type) +// +// */ +// +// } +// +// } +// +// +// @Test +// public void testTemp() { +// try (SailRepositoryConnection connection = endpointStore.getConnection()) { +// System.out.println(); +// String query = """ +// PREFIX wd: +// PREFIX wdt: +// PREFIX wikibase: +// PREFIX p: +// PREFIX ps: +// PREFIX pq: +// PREFIX rdfs: +// PREFIX bd: +// PREFIX wdno: +// PREFIX schema: +// PREFIX dct: +// PREFIX skos: +//PREFIX psv: +//PREFIX psn: +//PREFIX pr: +//PREFIX rdf: +//PREFIX prov: +// +// +//#two chemical compounds with the same CAS registry number +// SELECT DISTINCT ?cas ?compound1 ?compound1Label ?compound2 ?compound2Label WHERE { +// ?compound1 wdt:P231 ?cas . +// ?compound2 wdt:P231 ?cas . +// FILTER (?compound1 != ?compound2) +// } +// """; +// +// +// QueryJoinOptimizer.MERGE_JOIN_CARDINALITY_SIZE_DIFF_MULTIPLIER = 10; +// runQuery(connection, query); +// System.out.println(runQuery(connection, query)); +// +///* +//Distinct (resultSizeActual=860, totalTimeActual=351ms, selfTimeActual=2.81ms) +// Projection (resultSizeActual=860, totalTimeActual=348ms, selfTimeActual=0.133ms) +// ├── ProjectionElemList +// │ ProjectionElem "cas" +// │ ProjectionElem "compound1" +// │ ProjectionElem "compound1Label" +// │ ProjectionElem "compound2" +// │ ProjectionElem "compound2Label" +// └── Filter (resultSizeActual=860, totalTimeActual=348ms, selfTimeActual=31.3ms) +// ╠══ Compare (!=) +// ║ Var (name=compound1) +// ║ Var (name=compound2) +// ╚══ Join (InnerMergeJoinIterator) (resultSizeActual=932.4K, totalTimeActual=316ms, selfTimeActual=147ms) +// ├── StatementPattern [statementOrder: O] (costEstimate=931.6K, resultSizeEstimate=1.9M, resultSizeActual=931.5K, totalTimeActual=94.3ms, selfTimeActual=94.3ms) [left] +// │ s: Var (name=compound1) +// │ p: Var (name=_const_d85f4d70_uri, value=http://www.wikidata.org/prop/direct/P231, anonymous) +// │ o: Var (name=cas) +// └── StatementPattern [statementOrder: O] (costEstimate=1.4K, resultSizeEstimate=1.9M, resultSizeActual=931.5K, totalTimeActual=74.8ms, selfTimeActual=74.8ms) [right] +// s: Var (name=compound2) +// p: Var (name=_const_d85f4d70_uri, value=http://www.wikidata.org/prop/direct/P231, anonymous) +// o: Var (name=cas) +// +// */ +// +// EndpointTripleSource.ENABLE_MERGE_JOIN = false; +// runQuery(connection, query); +// System.out.println(runQuery(connection, query)); +// +///* +//Distinct (resultSizeActual=860, totalTimeActual=1.2s, selfTimeActual=2.47ms) +// Projection (resultSizeActual=860, totalTimeActual=1.2s, selfTimeActual=0.203ms) +// ├── ProjectionElemList +// │ ProjectionElem "cas" +// │ ProjectionElem "compound1" +// │ ProjectionElem "compound1Label" +// │ ProjectionElem "compound2" +// │ ProjectionElem "compound2Label" +// └── Filter (resultSizeActual=860, totalTimeActual=1.2s, selfTimeActual=25.8ms) +// ╠══ Compare (!=) +// ║ Var (name=compound1) +// ║ Var (name=compound2) +// ╚══ Join (JoinIterator) (resultSizeActual=932.4K, totalTimeActual=1.2s, selfTimeActual=1.1s) +// ├── StatementPattern (costEstimate=931.6K, resultSizeEstimate=1.9M, resultSizeActual=931.5K, totalTimeActual=80.2ms, selfTimeActual=80.2ms) [left] +// │ s: Var (name=compound1) +// │ p: Var (name=_const_d85f4d70_uri, value=http://www.wikidata.org/prop/direct/P231, anonymous) +// │ o: Var (name=cas) +// └── StatementPattern (costEstimate=1.4K, resultSizeEstimate=1.9M, resultSizeActual=932.4K, totalTimeActual=0.0ms, selfTimeActual=0.0ms) [right] +// s: Var (name=compound2) +// p: Var (name=_const_d85f4d70_uri, value=http://www.wikidata.org/prop/direct/P231, anonymous) +// o: Var (name=cas) +// +// */ +// +// } +// +// } +// +// +// @Test +// public void testTemp2() { +// try (SailRepositoryConnection connection = endpointStore.getConnection()) { +// System.out.println(); +// String query = """ +// PREFIX wd: +// PREFIX wdt: +// PREFIX wikibase: +// PREFIX p: +// PREFIX ps: +// PREFIX pq: +// PREFIX rdfs: +// PREFIX bd: +// PREFIX wdno: +// PREFIX schema: +// PREFIX dct: +// PREFIX skos: +//PREFIX psv: +//PREFIX psn: +//PREFIX pr: +//PREFIX rdf: +//PREFIX prov: +// +//#defaultView:BubbleChart +//SELECT * +//WHERE +//{ +// ?object wdt:P50 ?author . +// ?author wdt:P106 ?occupation . +//} +// +// """; +// +// +// QueryJoinOptimizer.MERGE_JOIN_CARDINALITY_SIZE_DIFF_MULTIPLIER = 100; +//// runQuery(connection, query); +//// runQuery(connection, query); +//// runQuery(connection, query); +//// runQuery(connection, query); +//// runQuery(connection, query); +//// runQuery(connection, query); +//// runQuery(connection, query); +//// runQuery(connection, query); +//// runQuery(connection, query); +//// runQuery(connection, query); +//// runQuery(connection, query); +//// runQuery(connection, query); +//// runQuery(connection, query); +//// runQuery(connection, query); +// +//// QueryJoinOptimizer.MERGE_JOIN_CARDINALITY_SIZE_DIFF_MULTIPLIER = 10; +//// runQuery(connection, query); +// +///* +// +// */ +//// +//// EndpointTripleSource.ENABLE_MERGE_JOIN = false; +//// runQuery(connection, query); +// +///* +// +// */ +// +// } +// +// } +// +// +// +// private static String runQuery(SailRepositoryConnection connection, String query) { +// StopWatch stopWatch = StopWatch.createStarted(); +// TupleQuery tupleQuery = connection.prepareTupleQuery(query); +// tupleQuery.setMaxExecutionTime(10*60); +// Explanation explain = tupleQuery.explain(Explanation.Level.Timed); +//// System.out.println(explain); +//// System.out.println(); +// System.out.println("Took: " + stopWatch.formatTime()); +// +// return explain.toString(); +// +// } +//} diff --git a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointMultIndexSPARQL11QueryComplianceTest.java b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointMultIndexSPARQL11QueryComplianceTest.java index 59caedf4..18153d0a 100644 --- a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointMultIndexSPARQL11QueryComplianceTest.java +++ b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointMultIndexSPARQL11QueryComplianceTest.java @@ -34,11 +34,8 @@ public class EndpointMultIndexSPARQL11QueryComplianceTest extends SPARQL11QueryComplianceTest { private static final Logger logger = LoggerFactory.getLogger(EndpointSPARQL11QueryComplianceTest.class); - public EndpointMultIndexSPARQL11QueryComplianceTest(String displayName, String testURI, String name, - String queryFileURL, String resultFileURL, Dataset dataset, boolean ordered, boolean laxCardinality) - throws ParserException, NotFoundException, IOException { - super(displayName, testURI, name, queryFileURL, resultFileURL, null, ordered, laxCardinality); - setUpHDT(dataset); + public EndpointMultIndexSPARQL11QueryComplianceTest() { + super(); List testToIgnore = new ArrayList<>(); // @todo these tests are failing and should not, they are skipped so // that we can be sure that we see when @@ -72,6 +69,16 @@ public EndpointMultIndexSPARQL11QueryComplianceTest(String displayName, String t File nativeStore; File hdtStore; + @Override + protected void testParameterListener(String displayName, String testURI, String name, String queryFileURL, + String resultFileURL, Dataset dataset, boolean ordered, boolean laxCardinality) { + try { + setUpHDT(dataset); + } catch (IOException | ParserException | NotFoundException e) { + throw new RuntimeException(e); + } + } + @Override protected Repository newRepository() throws Exception { nativeStore = tempDir.newFolder(); @@ -116,11 +123,6 @@ public HDT loadIndex() throws IOException { return new SailRepository(endpoint); } - @Override - public void setUp() throws Exception { - super.setUp(); - } - HDT hdt; private void setUpHDT(Dataset dataset) throws IOException, ParserException, NotFoundException { diff --git a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointMultIndexSPARQL11UpdateComplianceTest.java b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointMultIndexSPARQL11UpdateComplianceTest.java index 698afb1c..105cda30 100644 --- a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointMultIndexSPARQL11UpdateComplianceTest.java +++ b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointMultIndexSPARQL11UpdateComplianceTest.java @@ -27,11 +27,8 @@ public class EndpointMultIndexSPARQL11UpdateComplianceTest extends SPARQL11UpdateComplianceTest { - public EndpointMultIndexSPARQL11UpdateComplianceTest(String displayName, String testURI, String name, - String requestFile, IRI defaultGraphURI, Map inputNamedGraphs, IRI resultDefaultGraphURI, - Map resultNamedGraphs) { - super(displayName, testURI, name, requestFile, defaultGraphURI, inputNamedGraphs, resultDefaultGraphURI, - resultNamedGraphs); + public EndpointMultIndexSPARQL11UpdateComplianceTest() { + super(); List testToIgnore = new ArrayList<>(); // @todo these tests are failing and should not, they are skipped so // that we can be sure that we see when @@ -95,8 +92,4 @@ public HDT loadIndex() throws IOException { // NativeStore(tempDir.newFolder(), "spoc"))); } - @Override - public void setUp() throws Exception { - super.setUp(); - } } diff --git a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointSPARQL11QueryComplianceTest.java b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointSPARQL11QueryComplianceTest.java index 06b5a687..95494c1e 100644 --- a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointSPARQL11QueryComplianceTest.java +++ b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointSPARQL11QueryComplianceTest.java @@ -1,3 +1,4 @@ + package com.the_qa_company.qendpoint.store; import org.eclipse.rdf4j.query.Dataset; @@ -21,6 +22,7 @@ import java.net.JarURLConnection; import java.net.URL; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; /** @@ -29,37 +31,41 @@ public class EndpointSPARQL11QueryComplianceTest extends SPARQL11QueryComplianceTest { private static final Logger logger = LoggerFactory.getLogger(EndpointSPARQL11QueryComplianceTest.class); - public EndpointSPARQL11QueryComplianceTest(String displayName, String testURI, String name, String queryFileURL, - String resultFileURL, Dataset dataset, boolean ordered, boolean laxCardinality) - throws ParserException, NotFoundException, IOException { - super(displayName, testURI, name, queryFileURL, resultFileURL, null, ordered, laxCardinality); - setUpHDT(dataset); - List testToIgnore = new ArrayList<>(); - // @todo these tests are failing and should not, they are skipped so - // that we can be sure that we see when - // currently passing tests are not failing. Many of these tests are not - // so problematic since we do not support - // named graphs anyway - testToIgnore.add("constructwhere02 - CONSTRUCT WHERE"); - testToIgnore.add("constructwhere03 - CONSTRUCT WHERE"); - testToIgnore.add("constructwhere04 - CONSTRUCT WHERE"); - testToIgnore.add("Exists within graph pattern"); - testToIgnore.add("(pp07) Path with one graph"); - testToIgnore.add("(pp35) Named Graph 2"); - testToIgnore.add("sq01 - Subquery within graph pattern"); - testToIgnore.add("sq02 - Subquery within graph pattern, graph variable is bound"); - testToIgnore.add("sq03 - Subquery within graph pattern, graph variable is not bound"); - testToIgnore.add("sq04 - Subquery within graph pattern, default graph does not apply"); - testToIgnore.add("sq05 - Subquery within graph pattern, from named applies"); - testToIgnore.add("sq06 - Subquery with graph pattern, from named applies"); - testToIgnore.add("sq07 - Subquery with from "); - testToIgnore.add("sq11 - Subquery limit per resource"); - testToIgnore.add("sq13 - Subqueries don't inject bindings"); - testToIgnore.add("sq14 - limit by resource"); + public EndpointSPARQL11QueryComplianceTest() { + super(); + + List testToIgnore = Arrays.asList( + // @todo these tests are failing and should not, they are + // skipped so + // that we can be sure that we see when + // currently passing tests are not failing. Many of these tests + // are not + // so problematic since we do not support + // named graphs anyway + "constructwhere02 - CONSTRUCT WHERE", "constructwhere03 - CONSTRUCT WHERE", + "constructwhere04 - CONSTRUCT WHERE", "Exists within graph pattern", "(pp07) Path with one graph", + "(pp35) Named Graph 2", "sq01 - Subquery within graph pattern", + "sq02 - Subquery within graph pattern, graph variable is bound", + "sq03 - Subquery within graph pattern, graph variable is not bound", + "sq04 - Subquery within graph pattern, default graph does not apply", + "sq05 - Subquery within graph pattern, from named applies", + "sq06 - Subquery with graph pattern, from named applies", "sq07 - Subquery with from ", + "sq11 - Subquery limit per resource", "sq13 - Subqueries don't inject bindings", + "sq14 - limit by resource"); this.setIgnoredTests(testToIgnore); } + @Override + protected void testParameterListener(String displayName, String testURI, String name, String queryFileURL, + String resultFileURL, Dataset dataset, boolean ordered, boolean laxCardinality) { + try { + setUpHDT(dataset); + } catch (IOException | ParserException | NotFoundException e) { + throw new RuntimeException(e); + } + } + @Rule public TemporaryFolder tempDir = TemporaryFolder.builder().assureDeletion().build(); @@ -87,11 +93,6 @@ protected Repository newRepository() throws Exception { return new SailRepository(endpoint); } - @Override - public void setUp() throws Exception { - super.setUp(); - } - HDT hdt; private void setUpHDT(Dataset dataset) throws IOException, ParserException, NotFoundException { diff --git a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointSPARQL11UpdateComplianceTest.java b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointSPARQL11UpdateComplianceTest.java index 6b11d665..b6ac6ef8 100644 --- a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointSPARQL11UpdateComplianceTest.java +++ b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointSPARQL11UpdateComplianceTest.java @@ -22,11 +22,8 @@ */ public class EndpointSPARQL11UpdateComplianceTest extends SPARQL11UpdateComplianceTest { - public EndpointSPARQL11UpdateComplianceTest(String displayName, String testURI, String name, String requestFile, - IRI defaultGraphURI, Map inputNamedGraphs, IRI resultDefaultGraphURI, - Map resultNamedGraphs) { - super(displayName, testURI, name, requestFile, defaultGraphURI, inputNamedGraphs, resultDefaultGraphURI, - resultNamedGraphs); + public EndpointSPARQL11UpdateComplianceTest() { + List testToIgnore = new ArrayList<>(); // @todo these tests are failing and should not, they are skipped so // that we can be sure that we see when @@ -63,8 +60,4 @@ protected Repository newRepository() throws Exception { // NativeStore(tempDir.newFolder(), "spoc"))); } - @Override - public void setUp() throws Exception { - super.setUp(); - } } diff --git a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointStoreNotifyTest.java b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointStoreNotifyTest.java index 33ed2f59..fe42ed63 100644 --- a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointStoreNotifyTest.java +++ b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointStoreNotifyTest.java @@ -396,8 +396,8 @@ public void basicQueryAfterMerge() throws InterruptedException { MergeRunnable.debugWaitMerge(); try (NotifyingSailConnection co = store.getConnection()) { - try (CloseableIteration stmt = co.getStatements(iri("ex5"), - iri("p"), iri("placeholder"), false)) { + try (CloseableIteration stmt = co.getStatements(iri("ex5"), iri("p"), + iri("placeholder"), false)) { assertTrue(stmt.hasNext()); assertTrue(stmt.next().getSubject() instanceof HDTValue); assertFalse(stmt.hasNext()); @@ -466,8 +466,8 @@ public void basicQueryDuringMerge() throws InterruptedException { MergeRunnable.debugWaitMerge(); try (NotifyingSailConnection co = store.getConnection()) { - try (CloseableIteration stmt = co.getStatements(iri("ex5"), - iri("p"), iri("placeholder"), false)) { + try (CloseableIteration stmt = co.getStatements(iri("ex5"), iri("p"), + iri("placeholder"), false)) { assertTrue(stmt.hasNext()); assertTrue(stmt.next().getSubject() instanceof HDTValue); assertFalse(stmt.hasNext()); @@ -563,8 +563,8 @@ public void basicQueryDuringMergeWithDelete() throws InterruptedException { MergeRunnable.debugWaitMerge(); try (NotifyingSailConnection co = store.getConnection()) { - try (CloseableIteration stmt = co.getStatements(iri("ex5"), - iri("p"), iri("placeholder"), false)) { + try (CloseableIteration stmt = co.getStatements(iri("ex5"), iri("p"), + iri("placeholder"), false)) { assertTrue(stmt.hasNext()); assertTrue(stmt.next().getSubject() instanceof HDTValue); assertFalse(stmt.hasNext()); @@ -671,8 +671,8 @@ public NotifyTestStoreConnection(NotifyingSailConnection wrappedCon) { } @Override - public CloseableIteration evaluate(TupleExpr tupleExpr, - Dataset dataset, BindingSet bindings, boolean includeInferred) throws SailException { + public CloseableIteration evaluate(TupleExpr tupleExpr, Dataset dataset, + BindingSet bindings, boolean includeInferred) throws SailException { tupleExpr = tupleExpr.clone(); if (!(tupleExpr instanceof QueryRoot)) { tupleExpr = new QueryRoot(tupleExpr); diff --git a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointStoreTest.java b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointStoreTest.java index c7de1f46..a1b3fcb3 100644 --- a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointStoreTest.java +++ b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/EndpointStoreTest.java @@ -707,6 +707,7 @@ public void sparqlJoinTest() throws IOException, NotFoundException { try { try (RepositoryConnection connection = endpointStore.getConnection()) { + connection.begin(); ValueFactory vf = connection.getValueFactory(); String ex = "http://example.com/"; IRI ali = vf.createIRI(ex, "Ali"); @@ -714,6 +715,16 @@ public void sparqlJoinTest() throws IOException, NotFoundException { IRI guo = vf.createIRI(ex, "Guo"); IRI has = vf.createIRI(ex, "has"); connection.add(guo, has, FOAF.ACCOUNT); + connection.commit(); + } + + // force merge so that we can use merge join later + store.mergeStore(); + while (store.isMergeTriggered || store.isMerging()) { + Thread.onSpinWait(); + } + + try (RepositoryConnection connection = endpointStore.getConnection()) { TupleQuery tupleQuery = connection.prepareTupleQuery( String.join("\n", "", "PREFIX rdf: ", diff --git a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/Utility.java b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/Utility.java index e851a1f9..2e805247 100644 --- a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/Utility.java +++ b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/Utility.java @@ -37,9 +37,11 @@ import com.the_qa_company.qendpoint.core.hdt.HDTManager; import com.the_qa_company.qendpoint.core.options.HDTOptions; +import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; +import java.io.OutputStream; import java.util.Arrays; public class Utility { @@ -150,7 +152,7 @@ public static Statement getFakeStatement(ValueFactory vf, int id) { private static void writeBigIndex(File file) throws IOException { ValueFactory vf = new MemValueFactory(); - try (FileOutputStream out = new FileOutputStream(file)) { + try (OutputStream out = new BufferedOutputStream(new FileOutputStream(file))) { RDFWriter writer = Rio.createWriter(RDFFormat.NTRIPLES, out); writer.startRDF(); for (int i = 1; i <= COUNT; i++) { diff --git a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/experimental/ExperimentalQEndpointSPARQL11ComplianceQueryTest.java b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/experimental/ExperimentalQEndpointSPARQL11ComplianceQueryTest.java new file mode 100644 index 00000000..5442e38c --- /dev/null +++ b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/experimental/ExperimentalQEndpointSPARQL11ComplianceQueryTest.java @@ -0,0 +1,57 @@ +package com.the_qa_company.qendpoint.store.experimental; + +import com.the_qa_company.qendpoint.core.options.HDTOptions; +import com.the_qa_company.qendpoint.core.options.HDTOptionsKeys; +import com.the_qa_company.qendpoint.store.Utility; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.sail.SailRepository; +import org.eclipse.rdf4j.sail.NotifyingSailConnection; +import org.eclipse.rdf4j.testsuite.query.parser.sparql.manifest.SPARQL11QueryComplianceTest; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +public class ExperimentalQEndpointSPARQL11ComplianceQueryTest extends SPARQL11QueryComplianceTest { + /* + * Set this to true to print the call to the store when doing the compliance + * tests + */ + private static final boolean PRINT_CALLS = false; + + /* + * Set this to false to enable the graph tests + */ + private static final boolean DISABLE_GRAPH_TESTS = true; + + @TempDir + public Path tempDir; + + public ExperimentalQEndpointSPARQL11ComplianceQueryTest() { + + if (DISABLE_GRAPH_TESTS) { + this.setIgnoredTests(new ArrayList<>(List.of("constructwhere04 - CONSTRUCT WHERE", + "Exists within graph pattern", "(pp07) Path with one graph", "(pp34) Named Graph 1", + "(pp35) Named Graph 2", "sq01 - Subquery within graph pattern", + "sq02 - Subquery within graph pattern, graph variable is bound", + "sq03 - Subquery within graph pattern, graph variable is not bound", + "sq04 - Subquery within graph pattern, default graph does not apply", + "sq05 - Subquery within graph pattern, from named applies", + "sq06 - Subquery with graph pattern, from named applies", "sq07 - Subquery with from", + "sq11 - Subquery limit per resource", "sq13 - Subqueries don't inject bindings"))); + } + } + + @Override + protected Repository newRepository() throws Exception { + HDTOptions spec = HDTOptions.of(HDTOptionsKeys.DICTIONARY_TYPE_KEY, + HDTOptionsKeys.DICTIONARY_TYPE_VALUE_MULTI_OBJECTS_LANG); + ExperimentalQEndpointSail sail = new ExperimentalQEndpointSail(tempDir, spec); + + if (PRINT_CALLS) { + return Utility.convertToDumpRepository(new SailRepository(Utility.convertToDumpSail(sail))); + } + return new SailRepository(sail); + } +} diff --git a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/experimental/ExperimentalQEndpointSPARQL11ComplianceTest.java b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/experimental/ExperimentalQEndpointSPARQL11ComplianceTest.java deleted file mode 100644 index 45b9c1bd..00000000 --- a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/experimental/ExperimentalQEndpointSPARQL11ComplianceTest.java +++ /dev/null @@ -1,99 +0,0 @@ -package com.the_qa_company.qendpoint.store.experimental; - -import com.the_qa_company.qendpoint.core.options.HDTOptions; -import com.the_qa_company.qendpoint.core.options.HDTOptionsKeys; -import com.the_qa_company.qendpoint.store.Utility; -import org.eclipse.rdf4j.model.IRI; -import org.eclipse.rdf4j.query.Dataset; -import org.eclipse.rdf4j.repository.Repository; -import org.eclipse.rdf4j.repository.sail.SailRepository; -import org.eclipse.rdf4j.testsuite.query.parser.sparql.manifest.SPARQL11QueryComplianceTest; -import org.eclipse.rdf4j.testsuite.query.parser.sparql.manifest.SPARQL11UpdateComplianceTest; -import org.junit.Rule; -import org.junit.rules.TemporaryFolder; -import org.junit.runner.RunWith; -import org.junit.runners.Suite; - -import java.io.IOException; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -@RunWith(Suite.class) -@Suite.SuiteClasses({ ExperimentalQEndpointSPARQL11ComplianceTest.UpdateTest.class, - ExperimentalQEndpointSPARQL11ComplianceTest.QueryTest.class }) -public class ExperimentalQEndpointSPARQL11ComplianceTest { - /* - * Set this to true to print the call to the store when doing the compliance - * tests - */ - private static final boolean PRINT_CALLS = false; - /* - * Set this to false to enable the graph tests - */ - private static final boolean DISABLE_GRAPH_TESTS = true; - - private static Repository createRepo(TemporaryFolder tempDir) throws IOException { - Path root = tempDir.newFolder().toPath(); - HDTOptions spec = HDTOptions.of(HDTOptionsKeys.DICTIONARY_TYPE_KEY, - HDTOptionsKeys.DICTIONARY_TYPE_VALUE_MULTI_OBJECTS_LANG); - ExperimentalQEndpointSail sail = new ExperimentalQEndpointSail(root, spec); - if (PRINT_CALLS) { - return Utility.convertToDumpRepository(new SailRepository(Utility.convertToDumpSail(sail))); - } - return new SailRepository(sail); - } - - public static class QueryTest extends SPARQL11QueryComplianceTest { - @Rule - public TemporaryFolder tempDir = TemporaryFolder.builder().assureDeletion().build(); - - public QueryTest(String displayName, String testURI, String name, String queryFileURL, String resultFileURL, - Dataset dataset, boolean ordered, boolean laxCardinality) { - super(displayName, testURI, name, queryFileURL, resultFileURL, dataset, ordered, laxCardinality); - - if (DISABLE_GRAPH_TESTS) { - this.setIgnoredTests(new ArrayList<>(List.of("constructwhere04 - CONSTRUCT WHERE", - "Exists within graph pattern", "(pp07) Path with one graph", "(pp34) Named Graph 1", - "(pp35) Named Graph 2", "sq01 - Subquery within graph pattern", - "sq02 - Subquery within graph pattern, graph variable is bound", - "sq03 - Subquery within graph pattern, graph variable is not bound", - "sq04 - Subquery within graph pattern, default graph does not apply", - "sq05 - Subquery within graph pattern, from named applies", - "sq06 - Subquery with graph pattern, from named applies", "sq07 - Subquery with from", - "sq11 - Subquery limit per resource", "sq13 - Subqueries don't inject bindings"))); - } - } - - @Override - protected Repository newRepository() throws Exception { - return createRepo(tempDir); - } - } - - public static class UpdateTest extends SPARQL11UpdateComplianceTest { - @Rule - public TemporaryFolder tempDir = TemporaryFolder.builder().assureDeletion().build(); - - public UpdateTest(String displayName, String testURI, String name, String requestFile, IRI defaultGraphURI, - Map inputNamedGraphs, IRI resultDefaultGraphURI, Map resultNamedGraphs) { - super(displayName, testURI, name, requestFile, defaultGraphURI, inputNamedGraphs, resultDefaultGraphURI, - resultNamedGraphs); - - if (DISABLE_GRAPH_TESTS) { - this.setIgnoredTests(new ArrayList<>(List.of("INSERT 03", "INSERT 04", "INSERT USING 01", - "DELETE INSERT 1b", "DELETE INSERT 1c", "INSERT same bnode twice", "CLEAR NAMED", "DROP NAMED", - "DROP GRAPH", "DROP DEFAULT", "CLEAR GRAPH", "CLEAR DEFAULT", "COPY 1", "COPY 3", "COPY 6", - "MOVE 1", "MOVE 3", "MOVE 6", "Graph-specific DELETE DATA 1", "Graph-specific DELETE DATA 2", - "Graph-specific DELETE 1", "Graph-specific DELETE 1 (WITH)", "Graph-specific DELETE 1 (USING)", - "Simple DELETE 1 (USING)", "Simple DELETE 2 (WITH)", "Simple DELETE 4 (WITH)"))); - } - } - - @Override - protected Repository newRepository() throws Exception { - return createRepo(tempDir); - } - } -} diff --git a/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/experimental/ExperimentalQEndpointSPARQL11ComplianceUpdateTest.java b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/experimental/ExperimentalQEndpointSPARQL11ComplianceUpdateTest.java new file mode 100644 index 00000000..83d95182 --- /dev/null +++ b/qendpoint-store/src/test/java/com/the_qa_company/qendpoint/store/experimental/ExperimentalQEndpointSPARQL11ComplianceUpdateTest.java @@ -0,0 +1,56 @@ +package com.the_qa_company.qendpoint.store.experimental; + +import com.the_qa_company.qendpoint.core.options.HDTOptions; +import com.the_qa_company.qendpoint.core.options.HDTOptionsKeys; +import com.the_qa_company.qendpoint.store.Utility; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.sail.SailRepository; +import org.eclipse.rdf4j.sail.NotifyingSailConnection; +import org.eclipse.rdf4j.testsuite.query.parser.sparql.manifest.SPARQL11UpdateComplianceTest; +import org.junit.jupiter.api.io.TempDir; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +public class ExperimentalQEndpointSPARQL11ComplianceUpdateTest extends SPARQL11UpdateComplianceTest { + /* + * Set this to true to print the call to the store when doing the compliance + * tests + */ + private static final boolean PRINT_CALLS = false; + + /* + * Set this to false to enable the graph tests + */ + private static final boolean DISABLE_GRAPH_TESTS = true; + + @TempDir + public Path tempDir; + + public ExperimentalQEndpointSPARQL11ComplianceUpdateTest() { + + if (DISABLE_GRAPH_TESTS) { + this.setIgnoredTests(new ArrayList<>(List.of("INSERT 03", "INSERT 04", "INSERT USING 01", + "DELETE INSERT 1b", "DELETE INSERT 1c", "INSERT same bnode twice", "CLEAR NAMED", "DROP NAMED", + "DROP GRAPH", "DROP DEFAULT", "CLEAR GRAPH", "CLEAR DEFAULT", "COPY 1", "COPY 3", "COPY 6", + "MOVE 1", "MOVE 3", "MOVE 6", "Graph-specific DELETE DATA 1", "Graph-specific DELETE DATA 2", + "Graph-specific DELETE 1", "Graph-specific DELETE 1 (WITH)", "Graph-specific DELETE 1 (USING)", + "Simple DELETE 1 (USING)", "Simple DELETE 2 (WITH)", "Simple DELETE 4 (WITH)"))); + } + } + + @Override + protected Repository newRepository() throws Exception { + HDTOptions spec = HDTOptions.of(HDTOptionsKeys.DICTIONARY_TYPE_KEY, + HDTOptionsKeys.DICTIONARY_TYPE_VALUE_MULTI_OBJECTS_LANG); + ExperimentalQEndpointSail sail = new ExperimentalQEndpointSail(tempDir, spec); + + if (PRINT_CALLS) { + return Utility.convertToDumpRepository(new SailRepository(Utility.convertToDumpSail(sail))); + } + return new SailRepository(sail); + } + +}