From 8e8ca11deca896bd97f28b5cfc59d522d8aad370 Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Wed, 12 Dec 2018 16:14:52 -0800 Subject: [PATCH 1/2] Upgrade to Sesame 2.8.11 Bug: T200612 Change-Id: Ic4e05661010dc103d46357778d9e94872285ee2f --- .../remote/AbstractBigdataRemoteQuery.java | 4 +- ...BigdataSailRemoteRepositoryConnection.java | 33 +- .../sail/webapp/client/AST2SPARQLUtil.java | 13 +- .../sail/webapp/client/EncodeDecodeValue.java | 21 +- .../engine/AbstractQueryEngineTestCase.java | 10 +- .../bop/solutions/TestIVComparator.java | 19 +- .../counters/TestHistoryInstrument.java | 2 +- .../com/bigdata/bop/rdf/aggregate/COUNT.java | 4 + .../java/com/bigdata/rdf/internal/DTE.java | 5 + .../rdf/internal/constraints/BNodeBOp.java | 9 +- .../rdf/internal/constraints/ConcatBOp.java | 44 +- .../rdf/internal/constraints/DigestBOp.java | 5 +- .../rdf/internal/constraints/RegexBOp.java | 13 +- .../rdf/internal/constraints/StrAfterBOp.java | 50 +- .../internal/constraints/StrBeforeBOp.java | 50 +- .../rdf/internal/constraints/StrdtBOp.java | 7 +- .../rdf/internal/constraints/StrlangBOp.java | 7 +- .../literal/FullyInlineTypedLiteralIV.java | 9 +- .../BigdataValueCentricFullTextIndex.java | 8 +- .../rdf/lexicon/LexiconKeyBuilder.java | 9 +- .../bigdata/rdf/lexicon/LexiconRelation.java | 5 +- .../rdf/model/BNodeContextFactory.java | 10 + .../bigdata/rdf/model/BigdataLiteralImpl.java | 49 +- .../rdf/model/BigdataValueFactory.java | 9 + .../rdf/model/BigdataValueFactoryImpl.java | 21 +- .../rdf/model/BigdataValueSerializer.java | 6 +- .../rdf/rio/turtle/BigdataTurtleParser.java | 19 +- .../ast/eval/GeoSpatialServiceFactory.java | 5 +- .../optimizers/ASTPropertyPathOptimizer.java | 8 +- .../com/bigdata/rdf/vocab/RDFSVocabulary.java | 1 - .../com/bigdata/rdf/sail/BigdataSail.java | 36 +- .../SPARQLStarUpdateDataBlockParser.java | 19 +- .../bigdata/bop/solutions/IVComparator.java | 81 +-- bigdata-rdf-test/pom.xml | 1 + .../rdf/lexicon/TestFullTextIndex.java | 13 + .../com/bigdata/rdf/model/TestFactory.java | 6 +- .../ast/eval/TestPipelinedHashJoin.java | 35 +- .../rdf/sparql/ast/eval/ticket_832a.trig | 26 +- .../rdf/sparql/ast/eval/ticket_835.trig | 8 +- .../rdf/sparql/ast/eval/ticket_bg911.trig | 8 +- .../bigdata/rdf/store/TestTripleStore.java | 3 +- bigdata-sails-test/pom.xml | 1 - .../com/bigdata/rdf/sail/TestLexJoinOps.java | 22 +- .../com/bigdata/rdf/sail/TestTicket1893.java | 16 +- .../sail/tck/BigdataSPARQLUpdateTxTest.java | 7 + .../rdf/sail/tck/BigdataSparqlTest.java | 17 + .../parser/sparql/ComplexSPARQLQueryTest.java | 615 +++++++++++++++++- .../query/parser/sparql/SPARQLDataSet.java | 59 ++ .../query/parser/sparql/SPARQLUpdateTest.java | 319 ++++++--- .../parser/sparql/manifest/ManifestTest.java | 194 ++++++ .../sparql/manifest/SPARQL11SyntaxTest.java | 325 +++++++++ .../sparql/manifest/SPARQLQueryTest.java | 99 +-- .../sparql/manifest/SPARQLSyntaxTest.java | 277 ++++++++ .../manifest/SPARQLUpdateConformanceTest.java | 124 ++-- bigdata-war/pom.xml | 6 +- blazegraph-deb/pom.xml | 2 +- blazegraph-rpm/pom.xml | 2 +- blazegraph-tgz/pom.xml | 2 +- blazegraph-war/pom.xml | 6 +- pom.xml | 12 +- rdf-properties/pom.xml | 6 +- .../services/org.openrdf.rio.RDFParserFactory | 2 +- .../services/org.openrdf.rio.RDFWriterFactory | 2 +- 63 files changed, 2265 insertions(+), 541 deletions(-) create mode 100644 bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/SPARQLDataSet.java create mode 100644 bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/ManifestTest.java create mode 100644 bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQL11SyntaxTest.java create mode 100644 bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQLSyntaxTest.java diff --git a/bigdata-client/src/main/java/com/bigdata/rdf/sail/remote/AbstractBigdataRemoteQuery.java b/bigdata-client/src/main/java/com/bigdata/rdf/sail/remote/AbstractBigdataRemoteQuery.java index 590278fb2c..500175a645 100644 --- a/bigdata-client/src/main/java/com/bigdata/rdf/sail/remote/AbstractBigdataRemoteQuery.java +++ b/bigdata-client/src/main/java/com/bigdata/rdf/sail/remote/AbstractBigdataRemoteQuery.java @@ -27,8 +27,8 @@ protected void configureConnectOptions(IPreparedQuery q) { } q.addRequestParam(RemoteRepositoryDecls.INCLUDE_INFERRED, Boolean.toString(includeInferred)); - if (maxQueryTime > 0) { - q.addRequestParam(RemoteRepositoryDecls.MAX_QUERY_TIME_MILLIS, Long.toString(1000L*maxQueryTime)); + if (getMaxExecutionTime() > 0) { + q.addRequestParam(RemoteRepositoryDecls.MAX_QUERY_TIME_MILLIS, Long.toString(1000L*getMaxExecutionTime())); } if (dataset != null) { diff --git a/bigdata-client/src/main/java/com/bigdata/rdf/sail/remote/BigdataSailRemoteRepositoryConnection.java b/bigdata-client/src/main/java/com/bigdata/rdf/sail/remote/BigdataSailRemoteRepositoryConnection.java index dc80ad4445..58b468e785 100644 --- a/bigdata-client/src/main/java/com/bigdata/rdf/sail/remote/BigdataSailRemoteRepositoryConnection.java +++ b/bigdata-client/src/main/java/com/bigdata/rdf/sail/remote/BigdataSailRemoteRepositoryConnection.java @@ -37,6 +37,7 @@ import java.util.concurrent.atomic.AtomicReference; import org.apache.log4j.Logger; +import org.openrdf.IsolationLevel; import org.openrdf.model.Graph; import org.openrdf.model.Namespace; import org.openrdf.model.Resource; @@ -753,7 +754,7 @@ public Update prepareUpdate(final QueryLanguage ql, final String query) * Only execute() is currently supported. */ return new Update() { - + private int maxExecutionTime; @Override public void execute() throws UpdateExecutionException { try { @@ -797,7 +798,17 @@ public void setDataset(Dataset arg0) { public boolean getIncludeInferred() { throw new UnsupportedOperationException(); } - + + @Override + public void setMaxExecutionTime(int i) { + this.maxExecutionTime = i; + } + + @Override + public int getMaxExecutionTime() { + return maxExecutionTime; + } + @Override public void setIncludeInferred(boolean arg0) { throw new UnsupportedOperationException(); @@ -1014,6 +1025,16 @@ public boolean isActive() throws UnknownTransactionStateException, } } + @Override + public void setIsolationLevel(IsolationLevel isolationLevel) throws IllegalStateException { + // TODO: what do we do here? + } + + @Override + public IsolationLevel getIsolationLevel() { + return null; + } + @Override public void begin() throws RepositoryException { assertOpen(); // non-blocking. @@ -1031,7 +1052,13 @@ public void begin() throws RepositoryException { } } - /** + @Override + public void begin(IsolationLevel isolationLevel) throws RepositoryException { + // There's only one isolation level supported - snapshot isolation + begin(); + } + + /** * Begin a read-only transaction. Since all read operations have snapshot * isolation, this is only necessary when multiple read operations need to * read on the same commit point. diff --git a/bigdata-client/src/main/java/com/bigdata/rdf/sail/webapp/client/AST2SPARQLUtil.java b/bigdata-client/src/main/java/com/bigdata/rdf/sail/webapp/client/AST2SPARQLUtil.java index ede26ed5f1..184cf31575 100644 --- a/bigdata-client/src/main/java/com/bigdata/rdf/sail/webapp/client/AST2SPARQLUtil.java +++ b/bigdata-client/src/main/java/com/bigdata/rdf/sail/webapp/client/AST2SPARQLUtil.java @@ -1,6 +1,6 @@ /** -Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. +Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. Contact: SYSTAP, LLC DBA Blazegraph @@ -162,13 +162,12 @@ public String toExternal(final Literal lit) { if (languageCode != null) { sb.append('@'); sb.append(languageCode); + } else { + if (datatypeURI != null && !XMLSchema.STRING.equals(datatypeURI)) { + sb.append("^^"); + sb.append(datatypeStr); + } } - - if (datatypeURI != null) { - sb.append("^^"); - sb.append(datatypeStr); - } - return sb.toString(); } diff --git a/bigdata-client/src/main/java/com/bigdata/rdf/sail/webapp/client/EncodeDecodeValue.java b/bigdata-client/src/main/java/com/bigdata/rdf/sail/webapp/client/EncodeDecodeValue.java index 441668def0..6e76f561e8 100644 --- a/bigdata-client/src/main/java/com/bigdata/rdf/sail/webapp/client/EncodeDecodeValue.java +++ b/bigdata-client/src/main/java/com/bigdata/rdf/sail/webapp/client/EncodeDecodeValue.java @@ -1,6 +1,6 @@ /** -Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. +Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. Contact: SYSTAP, LLC DBA Blazegraph @@ -34,6 +34,8 @@ import org.openrdf.model.Value; import org.openrdf.model.impl.LiteralImpl; import org.openrdf.model.impl.URIImpl; +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.model.vocabulary.XMLSchema; /** * Utility class to encode/decode RDF {@link Value}s for interchange with the @@ -413,16 +415,29 @@ public static String encodeValue(final Value v) { if (v instanceof Literal) { final Literal lit = (Literal) v; final StringBuilder sb = new StringBuilder(); + URI datatype = lit.getDatatype(); sb.append("\""); sb.append(lit.getLabel()); sb.append("\""); if (lit.getLanguage() != null) { sb.append("@"); sb.append(lit.getLanguage()); + if (RDF.LANGSTRING.equals(datatype)) { + datatype = null; + } else { + if (datatype != null) { + // This violates RDF 1.1, language literals should have LangString type. + throw new IllegalArgumentException("Language literals must be rdf:langString"); + } + } + } else { + if (XMLSchema.STRING.equals(datatype)) { + datatype = null; + } } - if (lit.getDatatype() != null) { + if (datatype != null) { sb.append("^^"); - sb.append(encodeValue(lit.getDatatype())); + sb.append(encodeValue(datatype)); } return sb.toString(); } diff --git a/bigdata-core-test/bigdata/src/test/com/bigdata/bop/engine/AbstractQueryEngineTestCase.java b/bigdata-core-test/bigdata/src/test/com/bigdata/bop/engine/AbstractQueryEngineTestCase.java index d3cf24522c..03e40d4746 100644 --- a/bigdata-core-test/bigdata/src/test/com/bigdata/bop/engine/AbstractQueryEngineTestCase.java +++ b/bigdata-core-test/bigdata/src/test/com/bigdata/bop/engine/AbstractQueryEngineTestCase.java @@ -772,7 +772,13 @@ static public void compareTupleQueryResults( message.append("\n============ "); message.append(name); message.append(" =======================\n"); - message.append("Expected result: \n"); + message.append("Expected result [") + .append(expectedResultTable.size()) + .append("] not equal to query result [") + .append(queryResultTable.size()) + .append("] \n"); + message.append(" =======================\n"); + message.append("Expected result [").append(expectedResultTable.size()).append("]: \n"); while (expectedResultTable.hasNext()) { message.append(expectedResultTable.next()); message.append("\n"); @@ -780,7 +786,7 @@ static public void compareTupleQueryResults( message.append("============="); StringUtil.appendN('=', name.length(), message); message.append("========================\n"); - message.append("Query result: \n"); + message.append("Query result [").append(queryResultTable.size()).append("]: \n"); while (queryResultTable.hasNext()) { message.append(queryResultTable.next()); message.append("\n"); diff --git a/bigdata-core-test/bigdata/src/test/com/bigdata/bop/solutions/TestIVComparator.java b/bigdata-core-test/bigdata/src/test/com/bigdata/bop/solutions/TestIVComparator.java index 9d1e1bddba..efad2ee049 100644 --- a/bigdata-core-test/bigdata/src/test/com/bigdata/bop/solutions/TestIVComparator.java +++ b/bigdata-core-test/bigdata/src/test/com/bigdata/bop/solutions/TestIVComparator.java @@ -1,6 +1,6 @@ /** -Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. +Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. Contact: SYSTAP, LLC DBA Blazegraph @@ -147,8 +147,8 @@ public BigdataURI resolve(final URI uri) { noninline_languageCode_en_lit2.setValue(f.createLiteral("systap","en")); noninline_languageCode_de_lit1.setValue(f.createLiteral("bigdata","de")); noninline_languageCode_de_lit2.setValue(f.createLiteral("systap","de")); - noninline_xsd_string_lit1.setValue(f.createLiteral("bigdata",XSD.STRING)); - noninline_xsd_string_lit2.setValue(f.createLiteral("systap",XSD.STRING)); + noninline_xsd_string_lit1.setValue(f.createLiteral("bigdata", XSD.STRING)); + noninline_xsd_string_lit2.setValue(f.createLiteral("systap", XSD.STRING)); noninline_uri1.setValue(f.createURI("http://www.bigdata.com/")); noninline_uri2.setValue(f.createURI("http://www.bigdata.com/blog/")); @@ -287,8 +287,7 @@ public void test_uri_ordering() { } /** - * Unit test of the broad ordering of literals (plain LT language code LT - * datatype). + * Unit test of the broad ordering of literals (plain LT language code). */ public void test_literal_ordering_plain_languageCode_datatype() { @@ -299,9 +298,6 @@ public void test_literal_ordering_plain_languageCode_datatype() { // plain LT languageCode assertLT(c.compare(v.noninline_plain_lit1, v.noninline_languageCode_de_lit1)); - // languageCode LT datatype - assertLT(c.compare(v.noninline_plain_lit1, v.noninline_xsd_string_lit1)); - } /** @@ -351,6 +347,7 @@ public void test_languageCode_ordering() { * - xsd:string * - RDF term (equal and unequal only) * + * However, since in RDF 1.1 simple strings are xsd:string, all strings are sorted first. */ public void test_datatype_ordering() { @@ -359,14 +356,18 @@ public void test_datatype_ordering() { final IVComparator c = new IVComparator(); // plain literal LT numeric + assertLT(c.compare(v.noninline_languageCode_en_lit1, v.inline_xsd_int1)); assertLT(c.compare(v.noninline_plain_lit1, v.inline_xsd_int1)); // numeric LT boolean assertLT(c.compare(v.inline_xsd_int1, v.inline_xsd_boolean_true)); +// assertLT(c.compare(v.inline_xsd_int1, v.inline_xsd_dateTime1)); // assertLT(c.compare(v.inline_xsd_boolean_true, v.inline_xsd_dateTime1)); - assertLT(c.compare(v.inline_xsd_dateTime1, v.noninline_xsd_string_lit1)); + // In RDF 1.1, xsd:string is simple string + assertLT(c.compare(v.noninline_xsd_string_lit1, v.inline_xsd_dateTime1)); + assertLT(c.compare(v.noninline_plain_lit1, v.inline_xsd_dateTime1)); } diff --git a/bigdata-core-test/bigdata/src/test/com/bigdata/counters/TestHistoryInstrument.java b/bigdata-core-test/bigdata/src/test/com/bigdata/counters/TestHistoryInstrument.java index 25f00f4bcc..380bec2c72 100644 --- a/bigdata-core-test/bigdata/src/test/com/bigdata/counters/TestHistoryInstrument.java +++ b/bigdata-core-test/bigdata/src/test/com/bigdata/counters/TestHistoryInstrument.java @@ -83,7 +83,7 @@ public void test_history01() { if(log.isInfoEnabled()) log.info("\n"+h.toString()); // add the first sample. - h.add(t0,12d); + h.add(t0+1,12d); assertEquals(1,h.size()); assertEquals(60,h.capacity()); diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/bop/rdf/aggregate/COUNT.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/bop/rdf/aggregate/COUNT.java index 8feff08728..82c2b5d6f2 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/bop/rdf/aggregate/COUNT.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/bop/rdf/aggregate/COUNT.java @@ -110,6 +110,10 @@ private IV doGet(final IBindingSet bindingSet) { final IValueExpression expr = (IValueExpression) get(0); if (expr instanceof IVariable && ((IVariable) expr).isWildcard()) { + // Do not count empty binding sets + if (bindingSet.isEmpty()) { + return null; + } // Do not attempt to evaluate "*". aggregated++; return null; diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/DTE.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/DTE.java index c8efe736be..c57974a54b 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/DTE.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/DTE.java @@ -33,6 +33,7 @@ import org.openrdf.model.URI; import org.openrdf.model.Value; +import org.openrdf.model.vocabulary.RDF; import com.bigdata.rdf.internal.impl.BlobIV; import com.bigdata.rdf.lexicon.LexiconRelation; @@ -379,6 +380,10 @@ static final public DTE valueOf(final URI datatype) { return UUID; if (datatype.equals(XSD.STRING)) return XSDString; + if (datatype.equals(RDF.LANGSTRING)) + // Sesame 2.8 upgrade: RDF.LANGSTRING is converted to DTE.XSDString with termCode TERM_CODE_LCL + // so DTE and lang tag is stored in the key and decoded back in IVUtility.decodeInlineUnicodeLiteral + return XSDString; /* * Not a known DTE datatype. */ diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/BNodeBOp.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/BNodeBOp.java index 4e2871c095..8eee0739bd 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/BNodeBOp.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/BNodeBOp.java @@ -1,6 +1,6 @@ /** -Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. +Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. Contact: SYSTAP, LLC DBA Blazegraph @@ -26,14 +26,13 @@ import java.util.Map; import org.openrdf.model.Literal; -import org.openrdf.model.URI; +import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil; import com.bigdata.bop.BOp; import com.bigdata.bop.IBindingSet; import com.bigdata.bop.IValueExpression; import com.bigdata.rdf.error.SparqlTypeErrorException; import com.bigdata.rdf.internal.IV; -import com.bigdata.rdf.internal.XSD; import com.bigdata.rdf.model.BigdataBNode; import com.bigdata.rdf.sparql.ast.GlobalAnnotations; @@ -82,9 +81,7 @@ public IV get(final IBindingSet bs) throws SparqlTypeErrorException { final Literal lit = getAndCheckLiteralValue(0, bs); - final URI dt = lit.getDatatype(); - - if (dt != null && !dt.stringValue().equals(XSD.STRING.stringValue())) + if (!QueryEvaluationUtil.isStringLiteral(lit)) throw new SparqlTypeErrorException(); final BigdataBNode bnode = getValueFactory().createBNode( diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/ConcatBOp.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/ConcatBOp.java index e7a3416c4d..cac00f8bb3 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/ConcatBOp.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/ConcatBOp.java @@ -88,27 +88,37 @@ public IV get(final IBindingSet bs) { label = lit.getLabel(); if (lit.getDatatype() != null) { if (lang != null) { - allSame = false; - } else if (datatype == null) { + allSame = lit.getDatatype().equals(datatype) && lang.equals(lit.getLanguage()); + } else { if (i == 0) { - datatype = lit.getDatatype(); - } else { - allSame = false; + lang = lit.getLanguage(); } - } else if (!datatype.equals(lit.getDatatype())) { - allSame = false; + if (datatype == null) { + if (i == 0) { + datatype = lit.getDatatype(); + } else { + allSame = false; + } + } else if (!datatype.equals(lit.getDatatype())) { + allSame = false; + } } } else if (lit.getLanguage() != null) { if (datatype != null) { - allSame = false; - } else if (lang == null) { + allSame = lit.getLanguage().equals(lang) && datatype.equals(lit.getDatatype()); + } else { if (i == 0) { - lang = lit.getLanguage(); - } else { - allSame = false; + datatype = lit.getDatatype(); } - } else if (!lang.equals(lit.getLanguage())) { - allSame = false; + if (lang == null) { + if (i == 0) { + lang = lit.getLanguage(); + } else { + allSame = false; + } + } else if (!lang.equals(lit.getLanguage())) { + allSame = false; + } } } else { allSame = false; @@ -119,10 +129,10 @@ public IV get(final IBindingSet bs) { sb.append(label); } if (allSame) { - if (datatype != null) { - return super.asIV(getValueFactory().createLiteral(sb.toString(),datatype), bs); - } else if (lang != null) { + if (lang != null) { return super.asIV(getValueFactory().createLiteral(sb.toString(),lang), bs); + } else if (datatype != null) { + return super.asIV(getValueFactory().createLiteral(sb.toString(),datatype), bs); } } return super.asIV(getValueFactory().createLiteral(sb.toString()), bs); diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/DigestBOp.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/DigestBOp.java index bfaf945423..577e761d7a 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/DigestBOp.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/DigestBOp.java @@ -27,6 +27,7 @@ import java.util.Map; import org.openrdf.model.Literal; +import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil; import com.bigdata.bop.BOp; import com.bigdata.bop.IBindingSet; @@ -146,9 +147,7 @@ public IV get(final IBindingSet bs) throws SparqlTypeErrorException { //Recreate since they are not thread safe MessageDigest md = null; final Literal lit = asLiteral(iv); - if (lit.getLanguage() == null && - (lit.getDatatype() == null || lit.getDatatype().equals(XSD.STRING))) { - + if (QueryEvaluationUtil.isStringLiteral(lit)) { try { String label = lit.getLabel(); switch (op()) { diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/RegexBOp.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/RegexBOp.java index 94441e4979..fe6f0601c3 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/RegexBOp.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/RegexBOp.java @@ -191,7 +191,7 @@ public boolean accept(final IBindingSet bs) { : null); } - + /** * Lifted directly from Sesame's EvaluationStrategyImpl. * @@ -216,13 +216,13 @@ private boolean accept(final Value arg, final Value parg, final Value farg) { } //BLZG-1200 changed to isPlainLiteral - if (QueryEvaluationUtil.isPlainLiteral(arg) + if (QueryEvaluationUtil.isStringLiteral(arg) // BLZG-1780: Query Hint to cast to string || matchNonString ) { final String text; - if(QueryEvaluationUtil.isPlainLiteral(arg)) { + if(QueryEvaluationUtil.isStringLiteral(arg)) { text = ((Literal) arg).getLabel(); } else { //Query Hint Override with explicit conversion text = arg.stringValue(); @@ -291,8 +291,8 @@ private static Pattern getPattern(final Value parg, final Value farg) } //BLZG-1200 Literals with language types are not included in REGEX - if (QueryEvaluationUtil.isPlainLiteral(parg) - && (farg == null || QueryEvaluationUtil.isPlainLiteral(farg))) { + if (QueryEvaluationUtil.isStringLiteral(parg) + && (farg == null || QueryEvaluationUtil.isStringLiteral(farg))) { final String ptn = ((Literal) parg).getLabel(); String flags = ""; @@ -341,8 +341,7 @@ private static Pattern getPattern(final Value parg, final Value farg) throw new IllegalArgumentException(); } } - final Pattern pattern = Pattern.compile(ptn, f); - return pattern; + return Pattern.compile(ptn, f); } throw new IllegalArgumentException(); diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrAfterBOp.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrAfterBOp.java index 8b0f33a350..cb1f83bf76 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrAfterBOp.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrAfterBOp.java @@ -1,6 +1,6 @@ /* -Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. +Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. Contact: SYSTAP, LLC DBA Blazegraph @@ -28,14 +28,13 @@ import org.openrdf.model.Literal; import org.openrdf.model.URI; +import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil; import com.bigdata.bop.BOp; import com.bigdata.bop.IBindingSet; import com.bigdata.bop.IValueExpression; -import com.bigdata.bop.NV; import com.bigdata.rdf.error.SparqlTypeErrorException; import com.bigdata.rdf.internal.IV; -import com.bigdata.rdf.internal.XSD; import com.bigdata.rdf.model.BigdataLiteral; import com.bigdata.rdf.sparql.ast.GlobalAnnotations; @@ -161,48 +160,9 @@ private IV ret(final Literal arg1, final String label, final IBindingSet bs) { private void checkCompatibility(final Literal arg1, final Literal arg2) throws SparqlTypeErrorException { - checkLanguage(arg1, arg2); - - checkDatatype(arg1, arg2); - - } - - private void checkLanguage(final Literal arg1, final Literal arg2) - throws SparqlTypeErrorException { - - final String lang1 = arg1.getLanguage(); - - final String lang2 = arg2.getLanguage(); - - if (lang1 == null && lang2 == null) - return; - - if (lang1 != null && lang2 == null) - return; - - if (lang1 == null && lang2 != null) - throw new SparqlTypeErrorException(); - - // both non-null, must be the same - if (!lang1.equals(lang2)) - throw new SparqlTypeErrorException(); - - } - - private void checkDatatype(final Literal arg1, final Literal arg2) - throws SparqlTypeErrorException { - - final URI dt1 = arg1.getDatatype(); - - final URI dt2 = arg2.getDatatype(); - - if (dt1 != null && !dt1.stringValue().equals(XSD.STRING.stringValue())) - throw new SparqlTypeErrorException(); - - if (dt2 != null && !dt2.stringValue().equals(XSD.STRING.stringValue())) - throw new SparqlTypeErrorException(); - - + if (!QueryEvaluationUtil.compatibleArguments(arg1, arg2)) { + throw new SparqlTypeErrorException(); + } } } diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrBeforeBOp.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrBeforeBOp.java index 31dccc6e49..8f25d37110 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrBeforeBOp.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrBeforeBOp.java @@ -1,6 +1,6 @@ /* -Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. +Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. Contact: SYSTAP, LLC DBA Blazegraph @@ -28,14 +28,13 @@ import org.openrdf.model.Literal; import org.openrdf.model.URI; +import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil; import com.bigdata.bop.BOp; import com.bigdata.bop.IBindingSet; import com.bigdata.bop.IValueExpression; -import com.bigdata.bop.NV; import com.bigdata.rdf.error.SparqlTypeErrorException; import com.bigdata.rdf.internal.IV; -import com.bigdata.rdf.internal.XSD; import com.bigdata.rdf.model.BigdataLiteral; import com.bigdata.rdf.sparql.ast.GlobalAnnotations; @@ -158,48 +157,9 @@ private IV ret(final Literal arg1, final String label, final IBindingSet bs) { private void checkCompatibility(final Literal arg1, final Literal arg2) throws SparqlTypeErrorException { - checkLanguage(arg1, arg2); - - checkDatatype(arg1, arg2); - - } - - private void checkLanguage(final Literal arg1, final Literal arg2) - throws SparqlTypeErrorException { - - final String lang1 = arg1.getLanguage(); - - final String lang2 = arg2.getLanguage(); - - if (lang1 == null && lang2 == null) - return; - - if (lang1 != null && lang2 == null) - return; - - if (lang1 == null && lang2 != null) - throw new SparqlTypeErrorException(); - - // both non-null, must be the same - if (!lang1.equals(lang2)) - throw new SparqlTypeErrorException(); - + if (!QueryEvaluationUtil.compatibleArguments(arg1, arg2)) { + throw new SparqlTypeErrorException(); + } } - private void checkDatatype(final Literal arg1, final Literal arg2) - throws SparqlTypeErrorException { - - final URI dt1 = arg1.getDatatype(); - - final URI dt2 = arg2.getDatatype(); - - if (dt1 != null && !dt1.stringValue().equals(XSD.STRING.stringValue())) - throw new SparqlTypeErrorException(); - - if (dt2 != null && !dt2.stringValue().equals(XSD.STRING.stringValue())) - throw new SparqlTypeErrorException(); - - - } - } diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrdtBOp.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrdtBOp.java index 5e8a3b1557..c98cdecb21 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrdtBOp.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrdtBOp.java @@ -26,6 +26,8 @@ import java.util.Map; import org.openrdf.model.Literal; +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil; import com.bigdata.bop.BOp; import com.bigdata.bop.IBindingSet; @@ -33,6 +35,7 @@ import com.bigdata.rdf.error.SparqlTypeErrorException; import com.bigdata.rdf.internal.IV; import com.bigdata.rdf.internal.NotMaterializedException; +import com.bigdata.rdf.internal.XSD; import com.bigdata.rdf.model.BigdataLiteral; import com.bigdata.rdf.model.BigdataURI; import com.bigdata.rdf.sparql.ast.GlobalAnnotations; @@ -77,8 +80,8 @@ public IV get(final IBindingSet bs) throws SparqlTypeErrorException { final BigdataURI dt = (BigdataURI) asValue(datatype); final Literal lit = asLiteral(iv); - - if (lit.getDatatype() != null || lit.getLanguage() != null) { + + if (!QueryEvaluationUtil.isSimpleLiteral(lit)) { throw new SparqlTypeErrorException(); } diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrlangBOp.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrlangBOp.java index 18181f5c62..33df100cbe 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrlangBOp.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrlangBOp.java @@ -26,6 +26,7 @@ import java.util.Map; import org.openrdf.model.Literal; +import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil; import com.bigdata.bop.BOp; import com.bigdata.bop.IBindingSet; @@ -33,6 +34,7 @@ import com.bigdata.bop.NV; import com.bigdata.rdf.error.SparqlTypeErrorException; import com.bigdata.rdf.internal.IV; +import com.bigdata.rdf.internal.XSD; import com.bigdata.rdf.model.BigdataLiteral; import com.bigdata.rdf.sparql.ast.GlobalAnnotations; @@ -68,7 +70,10 @@ public IV get(final IBindingSet bs) throws SparqlTypeErrorException { final Literal lit = getAndCheckLiteralValue(0, bs); - if (lit.getDatatype() != null || lit.getLanguage() != null) { + if (!QueryEvaluationUtil.isSimpleLiteral(lit)) { +// if (lit.getDatatype() != null +// && !XSD.STRING.equals(lit.getDatatype()) +// || lit.getLanguage() != null) { throw new SparqlTypeErrorException(); } diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/impl/literal/FullyInlineTypedLiteralIV.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/impl/literal/FullyInlineTypedLiteralIV.java index 25abd6bc75..1c021924ef 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/impl/literal/FullyInlineTypedLiteralIV.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/internal/impl/literal/FullyInlineTypedLiteralIV.java @@ -26,6 +26,7 @@ import org.openrdf.model.Literal; import org.openrdf.model.URI; +import org.openrdf.model.vocabulary.RDF; import com.bigdata.rdf.internal.DTE; import com.bigdata.rdf.internal.IInlineUnicode; @@ -155,16 +156,18 @@ public FullyInlineTypedLiteralIV(final String label, final String languageCode, if (label == null) throw new IllegalArgumentException(); - if (languageCode != null && datatypeURI != null) + boolean isLangString = RDF.LANGSTRING.equals(datatypeURI); + + if (languageCode != null && datatypeURI != null && !isLangString) throw new IllegalArgumentException(); this.label = label; this.language = languageCode; - this.datatype = datatypeURI; + this.datatype = isLangString ? null : datatypeURI; - if (datatypeURI != null) { + if (this.datatype != null) { this.termCode = ITermIndexCodes.TERM_CODE_DTL; } else if (languageCode != null) { this.termCode = ITermIndexCodes.TERM_CODE_LCL; diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/BigdataValueCentricFullTextIndex.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/BigdataValueCentricFullTextIndex.java index 255577a115..c9317698d2 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/BigdataValueCentricFullTextIndex.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/BigdataValueCentricFullTextIndex.java @@ -38,6 +38,8 @@ import org.apache.log4j.Logger; import org.openrdf.model.Literal; +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil; import com.bigdata.btree.DefaultTupleSerializer; import com.bigdata.btree.IndexMetadata; @@ -50,6 +52,7 @@ import com.bigdata.journal.ITx; import com.bigdata.journal.TimestampUtility; import com.bigdata.rdf.internal.IV; +import com.bigdata.rdf.internal.XSD; import com.bigdata.rdf.model.BigdataValue; import com.bigdata.rdf.store.AbstractTripleStore; import com.bigdata.search.FullTextIndex; @@ -267,7 +270,10 @@ public void index(final int capacity, final Literal lit = (Literal) val; - if (!indexDatatypeLiterals && lit.getDatatype() != null) { + if (!indexDatatypeLiterals && lit.getDatatype() != null + // Since Sesame 2.8 upgrade, xsd:string and rdf:langString literals are processed + // the same as plain literals, so we are not considering them as datatyped for the FTS + && !QueryEvaluationUtil.isStringLiteral(lit)) { // do not index datatype literals in this manner. continue; diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconKeyBuilder.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconKeyBuilder.java index fb55c450d6..35cf454898 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconKeyBuilder.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconKeyBuilder.java @@ -116,15 +116,8 @@ public byte[] datatypeLiteral2key(final URI datatype, final String value) { if (value == null) throw new IllegalArgumentException(); - if (false && datatype.equals(XMLSchema.STRING)) { - - /* - * @todo xsd:string is explicitly mapped by RDF Semantics onto plain - * literals (they entail one another). However, it breaks the SPARQL - * unit tests if you map them onto the same key. - */ + if (datatype.equals(XMLSchema.STRING)) { return plainLiteral2key(value); - } /* diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconRelation.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconRelation.java index ad6f16b4cc..b24255fd94 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconRelation.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconRelation.java @@ -59,6 +59,7 @@ import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.model.Value; +import org.openrdf.model.vocabulary.RDF; import com.bigdata.bop.BOp; import com.bigdata.bop.IBindingSet; @@ -1850,8 +1851,8 @@ public long addTerms(final BigdataValue[] values, final int numTerms, * LexiconConfiguration. */ final URI dt = ((BigdataLiteral) v).getDatatype(); - if (dt == null || dt.equals(XSD.STRING)) { - // always text index strings, even inline ones + if (dt == null || dt.equals(XSD.STRING) || dt.equals(RDF.LANGSTRING)) { + // always text index strings, even inline ones, datatyped and langtagged textIndex.add(v); } else if (lexiconConfiguration.isInlineDatatypeToTextIndex(dt)) { textIndex.add(v); diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BNodeContextFactory.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BNodeContextFactory.java index 7c8b351edf..e34c59dc21 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BNodeContextFactory.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BNodeContextFactory.java @@ -220,6 +220,16 @@ public BigdataValueSerializer getValueSerializer() { return valueFactory.getValueSerializer(); } + @Override + public BigdataURI getLangStringURI() { + return valueFactory.getLangStringURI(); + } + + @Override + public BigdataURI getXSDStringURI() { + return valueFactory.getXSDStringURI(); + } + /** * Recursive contexts are not available (should not be necessary, right?) */ diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataLiteralImpl.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataLiteralImpl.java index c7c13dadeb..293b6a98bf 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataLiteralImpl.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataLiteralImpl.java @@ -54,6 +54,8 @@ Portions Copyright (c) 2002-2003 Bryan Thompson. import org.openrdf.model.Literal; import org.openrdf.model.datatypes.XMLDatatypeUtil; +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.model.vocabulary.XMLSchema; /** * A literal. Use {@link BigdataValueFactory} to create instances of this class. @@ -81,19 +83,24 @@ public class BigdataLiteralImpl extends BigdataValueImpl implements super(valueFactory, null); if (label == null) - throw new IllegalArgumentException(); + throw new IllegalArgumentException("Label cannot be null"); - if (language != null && datatype != null) - throw new IllegalArgumentException(); - this.label = label; - - // force to lowercase (Sesame does this too). - this.language = (language != null ? language.toLowerCase().intern() : null); -// this.language = language; - - this.datatype = datatype; - + + if (language != null) { + if (datatype != null && !datatype.equals(RDF.LANGSTRING)) { + throw new IllegalArgumentException("Language literals cannot have data type other than rdf:langString"); + } + this.datatype = (datatype == null ? valueFactory.getLangStringURI() : datatype); + // force to lowercase (Sesame does this too). + this.language = language.toLowerCase().intern(); + } else { + if (RDF.LANGSTRING.equals(datatype)) { + throw new IllegalArgumentException("Language tagged literals cannot have a null language tag"); + } + this.language = null; + this.datatype = (datatype == null ? valueFactory.getXSDStringURI() : datatype); + } } @Override @@ -113,7 +120,7 @@ public String toString() { sb.append(language); - } else if (datatype != null) { + } else if (datatype != null && !XMLSchema.STRING.equals(datatype)) { sb.append("^^<"); @@ -156,9 +163,15 @@ final public BigdataURI getDatatype() { } final public int hashCode() { - - return label.hashCode(); - + int hashCode = label.hashCode(); + if (language != null) { + hashCode = 31 * hashCode + language.hashCode(); + } + if (datatype != null) { + hashCode = 31 * hashCode + datatype.hashCode(); + + } + return hashCode; } final public boolean equals(Object o) { @@ -208,11 +221,11 @@ && isRealIV() } else if (o.getDatatype() != null) { return false; - + } - + return true; - + } /* diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataValueFactory.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataValueFactory.java index 2503fa83f9..114f10d240 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataValueFactory.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataValueFactory.java @@ -240,4 +240,13 @@ BigdataStatement createStatement(Resource s, URI p, Value o, */ BigdataValueSerializer getValueSerializer(); + /** + * Get this factory's implementation of rdf:langString + */ + BigdataURI getLangStringURI(); + + /** + * Get this factory's implementation of xsd:string + */ + BigdataURI getXSDStringURI(); } diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataValueFactoryImpl.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataValueFactoryImpl.java index e11d30e070..59945e919e 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataValueFactoryImpl.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataValueFactoryImpl.java @@ -43,6 +43,7 @@ import org.openrdf.model.Value; import org.openrdf.model.datatypes.XMLDatatypeUtil; import org.openrdf.model.impl.BooleanLiteralImpl; +import org.openrdf.model.vocabulary.RDF; import com.bigdata.cache.WeakValueCache; import com.bigdata.rdf.internal.IV; @@ -316,7 +317,7 @@ public BigdataBNodeImpl createBNode(final BigdataStatement stmt) { @Override public BigdataLiteralImpl createLiteral(final String label) { - return new BigdataLiteralImpl(this, label, null, null); + return new BigdataLiteralImpl(this, label, null, xsd_string); } @@ -375,6 +376,8 @@ public BigdataLiteralImpl createLiteral(final String label) { // private final BigdataLiteralImpl FALSE = new BigdataLiteralImpl(this, "false", null, // xsd_boolean); + private final BigdataURIImpl rdf_langstring = new BigdataURIImpl(this, RDF.NAMESPACE + "langString"); + /** * Map for fast resolution of XSD URIs. The keys are the string values of * the URIs. The values are the URIs. @@ -549,7 +552,11 @@ public BigdataLiteralImpl createLiteral(String label, URI datatype, String langu * * See https://sourceforge.net/apps/trac/bigdata/ticket/226 */ - if (datatype != null && !(datatype instanceof BigdataURIImpl)) { + if (datatype == null) { + + datatype = language == null ? xsd_string : rdf_langstring; + + } else if (datatype != null && !(datatype instanceof BigdataURIImpl)) { datatype = createURI(datatype.stringValue()); @@ -709,6 +716,16 @@ public BigdataValueSerializer getValueSerializer() { } + @Override + public BigdataURI getLangStringURI() { + return rdf_langstring; + } + + @Override + public BigdataURI getXSDStringURI() { + return xsd_string; + } + @Override public BigdataResource asValue(Resource v) { diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataValueSerializer.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataValueSerializer.java index 8c2c0788c8..b0195f2fec 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataValueSerializer.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/model/BigdataValueSerializer.java @@ -37,6 +37,7 @@ import org.openrdf.model.URI; import org.openrdf.model.Value; import org.openrdf.model.ValueFactory; +import org.openrdf.model.vocabulary.RDF; import com.bigdata.io.ByteArrayBuffer; import com.bigdata.io.DataInputBuffer; @@ -44,6 +45,7 @@ import com.bigdata.io.ShortPacker; import com.bigdata.io.compression.NoCompressor; import com.bigdata.io.compression.UnicodeHelper; +import com.bigdata.rdf.internal.XSD; import com.bigdata.rdf.lexicon.ITermIndexCodes; /** @@ -192,7 +194,7 @@ private byte getTermCode(final Value val) { if (lit.getLanguage() != null) return ITermIndexCodes.TERM_CODE_LCL; - if (lit.getDatatype() != null) + if (lit.getDatatype() != null && !XSD.STRING.equals(lit.getDatatype())) return ITermIndexCodes.TERM_CODE_DTL; return ITermIndexCodes.TERM_CODE_LIT; @@ -706,7 +708,7 @@ static public long getStringLength(final Value v) { final String label = value.getLabel(); - final int datatypeLength = value.getDatatype() == null ? 0 : value + final int datatypeLength = value.getDatatype() == null || RDF.LANGSTRING.equals(value.getDatatype()) ? 0 : value .getDatatype().stringValue().length(); final int languageLength = value.getLanguage() == null ? 0 : value diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/rio/turtle/BigdataTurtleParser.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/rio/turtle/BigdataTurtleParser.java index 019ced9ec0..9b48e1b5c0 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/rio/turtle/BigdataTurtleParser.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/rio/turtle/BigdataTurtleParser.java @@ -74,7 +74,11 @@ else if (c == '_') { } else if (c == '"' || c == '\'') { // quoted literal, e.g. "foo" or """foo""" or 'foo' or '''foo''' - return parseQuotedLiteral(); + try { + return parseQuotedLiteral(); + } catch (RDFHandlerException e) { + throw new IOException(e); + } } else if (ASCIIUtil.isNumber(c) || c == '.' || c == '+' || c == '-') { // integer or double, e.g. 123 or 1.2e3 @@ -230,4 +234,17 @@ protected BNode parseNodeID() return createBNode(name.toString()); } + /** + * BC method. + */ + protected int read() throws IOException { + return readCodePoint(); + } + + /** + * BC method. + */ + protected int peek() throws IOException { + return peekCodePoint(); + } } diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/eval/GeoSpatialServiceFactory.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/eval/GeoSpatialServiceFactory.java index 590d6e34fe..483f865c39 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/eval/GeoSpatialServiceFactory.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/eval/GeoSpatialServiceFactory.java @@ -1,6 +1,6 @@ /** -Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. +Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. Contact: SYSTAP, LLC DBA Blazegraph @@ -46,6 +46,7 @@ import org.apache.log4j.Logger; import org.openrdf.model.Literal; import org.openrdf.model.URI; +import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil; import com.bigdata.bop.BOp; import com.bigdata.bop.BOpContextBase; @@ -2459,7 +2460,7 @@ PointLatLon resolveAsPoint(final TermNode termNode, final IBindingSet bs) { IGeoSpatialLiteralSerializer serializer = null; GeoSpatialDatatypeConfiguration pconfig = null; - if (lit.getDatatype() != null) { + if (!QueryEvaluationUtil.isSimpleLiteral(lit)) { // If we have datatype that can extract coordinates, use it to exteract pconfig = geoSpatialConfig.getConfigurationForDatatype(lit.getDatatype()); if (pconfig.hasLat() && pconfig.hasLon()) { diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/optimizers/ASTPropertyPathOptimizer.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/optimizers/ASTPropertyPathOptimizer.java index 146b4774e2..aec9f4b6fa 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/optimizers/ASTPropertyPathOptimizer.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/optimizers/ASTPropertyPathOptimizer.java @@ -492,7 +492,9 @@ protected void optimize(final AST2BOpContext ctx, final StaticAnalysis sa, ArrayList back = null; for (BOp child : pathNPS.args()) { - + if (!(child instanceof PathOneInPropertySet)) { + continue; + } final PathOneInPropertySet pathOIPS = (PathOneInPropertySet) child; final ConstantNode iri = (ConstantNode) pathOIPS.get(0); @@ -537,10 +539,12 @@ protected void optimize(final AST2BOpContext ctx, final StaticAnalysis sa, addNegateds(group, forward, ppInfo, alpNode, ppNode); - } else { + } else if (back != null) { addNegateds(group, back, ppInfo.inverse(), alpNode, ppNode); + } else { + // NOP } } diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/vocab/RDFSVocabulary.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/vocab/RDFSVocabulary.java index 605460a088..8e86d726c8 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/vocab/RDFSVocabulary.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/vocab/RDFSVocabulary.java @@ -27,7 +27,6 @@ package com.bigdata.rdf.vocab; -import org.openrdf.Sesame; import org.openrdf.model.Value; import org.openrdf.model.vocabulary.OWL; import org.openrdf.model.vocabulary.RDF; diff --git a/bigdata-core/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java b/bigdata-core/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java index 263993e735..d596d0a361 100644 --- a/bigdata-core/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java +++ b/bigdata-core/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java @@ -77,9 +77,8 @@ Copyright Aduna (http://www.aduna-software.com/) 2001-2007 import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.log4j.Logger; -//FIXME: Sesame 2.8 not used for 2.1.4 Release -//import org.openrdf.IsolationLevel; -//import org.openrdf.IsolationLevels; +import org.openrdf.IsolationLevel; +import org.openrdf.IsolationLevels; import org.openrdf.OpenRDFUtil; import org.openrdf.model.Namespace; import org.openrdf.model.Resource; @@ -4846,7 +4845,7 @@ public void fireEvent(final SPARQLUpdateEvent e) { */ @Override public void begin() throws SailException { - + } /** @@ -4854,12 +4853,12 @@ public void begin() throws SailException { *

* {@inheritDoc} */ -//FIXME: Sesame 2.8 not used for 2.1.4 Release -// @Override -// public void begin(IsolationLevel level) -// throws UnknownSailTransactionStateException, SailException { -// -// } + @Override + public void begin(IsolationLevel level) + throws UnknownSailTransactionStateException, SailException { + // Only one isolation level is supported - snapshot isolation + begin(); + } /** * Always returns true. @@ -5426,15 +5425,14 @@ public synchronized void close() throws SailException { } // class BigdataSailReadOnlyConnection -//FIXME: Sesame 2.8 not used for 2.1.4 Release -// @Override -// public List getSupportedIsolationLevels() { -// return Arrays.asList(IsolationLevels.READ_UNCOMMITTED, IsolationLevels.SNAPSHOT_READ); -// } + @Override + public List getSupportedIsolationLevels() { + return Arrays.asList(IsolationLevels.SNAPSHOT_READ); + } -// @Override -// public IsolationLevel getDefaultIsolationLevel() { -// return IsolationLevels.READ_UNCOMMITTED; -// } + @Override + public IsolationLevel getDefaultIsolationLevel() { + return IsolationLevels.SNAPSHOT_READ; + } } diff --git a/bigdata-core/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/SPARQLStarUpdateDataBlockParser.java b/bigdata-core/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/SPARQLStarUpdateDataBlockParser.java index 171825edcd..e8619f3ee6 100644 --- a/bigdata-core/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/SPARQLStarUpdateDataBlockParser.java +++ b/bigdata-core/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/SPARQLStarUpdateDataBlockParser.java @@ -100,7 +100,11 @@ protected Value parseValue() throws IOException, RDFParseException { if (checkSparqlStarSyntax()) { return parseStmtValue(); } - return super.parseValue(); + try { + return super.parseValue(); + } catch (RDFHandlerException e) { + throw new IOException(e); + } } private boolean checkSparqlStarSyntax() throws IOException { @@ -195,4 +199,17 @@ public void setNamespace(String prefix, String namespace) { super.setNamespace(prefix, namespace); } + /** + * BC method. + */ + protected int read() throws IOException { + return readCodePoint(); + } + + /** + * BC method. + */ + protected int peek() throws IOException { + return peekCodePoint(); + } } diff --git a/bigdata-core/bigdata/src/java/com/bigdata/bop/solutions/IVComparator.java b/bigdata-core/bigdata/src/java/com/bigdata/bop/solutions/IVComparator.java index dd063df79d..4d9fbc371f 100644 --- a/bigdata-core/bigdata/src/java/com/bigdata/bop/solutions/IVComparator.java +++ b/bigdata-core/bigdata/src/java/com/bigdata/bop/solutions/IVComparator.java @@ -1,6 +1,6 @@ /** -Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. +Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. Contact: SYSTAP, LLC DBA Blazegraph @@ -39,12 +39,15 @@ import org.openrdf.model.Literal; import org.openrdf.model.URI; import org.openrdf.model.datatypes.XMLDatatypeUtil; +import org.openrdf.model.util.Literals; +import org.openrdf.model.vocabulary.RDF; import org.openrdf.query.algebra.Compare.CompareOp; import org.openrdf.query.algebra.evaluation.ValueExprEvaluationException; import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil; import org.openrdf.query.algebra.evaluation.util.ValueComparator; import com.bigdata.rdf.internal.IV; +import com.bigdata.rdf.internal.XSD; import com.bigdata.rdf.internal.impl.bnode.SidIV; import com.bigdata.rdf.internal.impl.literal.LiteralExtensionIV; import com.bigdata.rdf.model.BigdataLiteral; @@ -199,16 +202,12 @@ private int compareLiterals( return compareLiterals((Literal) left, (Literal) right); } - + /** - * Taken directly from Sesame's ValueComparator, no modification. Handles - * inlines nicely since they now implement the Literal interface. - */ + * Taken directly from Sesame's ValueComparator, no modification. Handles + * inlines nicely since they now implement the Literal interface. + */ private int compareLiterals(final Literal leftLit, final Literal rightLit) { - // Additional constraint for ORDER BY: "A plain literal is lower - // than an RDF literal with type xsd:string of the same lexical - // form." - if (!QueryEvaluationUtil.isStringLiteral(leftLit) || !QueryEvaluationUtil.isStringLiteral(rightLit)) { try { boolean isSmaller = QueryEvaluationUtil.compareLiterals(leftLit, rightLit, CompareOp.LT); @@ -226,42 +225,46 @@ private int compareLiterals(final Literal leftLit, final Literal rightLit) { } } - int result = 0; - - // Sort by datatype first, plain literals come before datatyped literals - URI leftDatatype = leftLit.getDatatype(); - URI rightDatatype = rightLit.getDatatype(); + boolean leftIsString = QueryEvaluationUtil.isSimpleLiteral(leftLit); + boolean rightIsString = QueryEvaluationUtil.isSimpleLiteral(rightLit); - if (leftDatatype != null) { - if (rightDatatype != null) { - // Both literals have datatypes - result = compareDatatypes(leftDatatype, rightDatatype); + // If we're here, we have either string literals or types unsupported by Sesame + // Simple string literals go before non-string ones + if (leftIsString) { + if (rightIsString) { + return leftLit.getLabel().compareTo(rightLit.getLabel()); } - else { - result = 1; - } - } - else if (rightDatatype != null) { - result = -1; + return -1; + } else if (rightIsString) { + return 1; } - if (result == 0) { - // datatypes are equal or both literals are untyped; sort by language - // tags, simple literals come before literals with language tags - String leftLanguage = leftLit.getLanguage(); - String rightLanguage = rightLit.getLanguage(); - - if (leftLanguage != null) { - if (rightLanguage != null) { - result = leftLanguage.compareTo(rightLanguage); - } - else { - result = 1; + // From here, one of the literals is not a simple string + int result = 0; + + // If we have language tags, sort by language + // tags. Language string goes before non-language literals. + String leftLanguage = leftLit.getLanguage(); + String rightLanguage = rightLit.getLanguage(); + + if (leftLanguage != null) { + if (rightLanguage != null) { + result = leftLanguage.compareTo(rightLanguage); + if (result == 0) { + // If the languages are equal, we can just compare labels + return leftLit.getLabel().compareTo(rightLit.getLabel()); } - } - else if (rightLanguage != null) { + } else { + // Language string before non-language types result = -1; } + } else if (rightLanguage != null) { + result = 1; + } + + if (result == 0) { + // If we didn't get any resolution with languages, try simply comparing data types + result = compareDatatypes(leftLit.getDatatype(), rightLit.getDatatype()); } if (result == 0) { @@ -307,4 +310,4 @@ else if (XMLDatatypeUtil.isCalendarDatatype(rightDatatype)) { } } -} +} diff --git a/bigdata-rdf-test/pom.xml b/bigdata-rdf-test/pom.xml index 1389dcce02..089b69b16c 100644 --- a/bigdata-rdf-test/pom.xml +++ b/bigdata-rdf-test/pom.xml @@ -381,6 +381,7 @@ Copyright 2010 by TalkingTrends (Amsterdam, The Netherlands) com.blazegraph bigdata-core-test ${project.version} + tests com.blazegraph diff --git a/bigdata-rdf-test/src/test/java/com/bigdata/rdf/lexicon/TestFullTextIndex.java b/bigdata-rdf-test/src/test/java/com/bigdata/rdf/lexicon/TestFullTextIndex.java index 815ed85c6a..a7acb7bb4b 100644 --- a/bigdata-rdf-test/src/test/java/com/bigdata/rdf/lexicon/TestFullTextIndex.java +++ b/bigdata-rdf-test/src/test/java/com/bigdata/rdf/lexicon/TestFullTextIndex.java @@ -27,11 +27,13 @@ package com.bigdata.rdf.lexicon; +import java.lang.reflect.Field; import java.util.Arrays; import java.util.Iterator; import java.util.Properties; import java.util.UUID; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; import junit.framework.AssertionFailedError; @@ -735,6 +737,17 @@ public void test_rebuildIndex() { store = reopenStore(store); + } else { + // we need to manually recreate SearchEngine, as reopening is not supported for temp store + store.getLexiconRelation().getSearchEngine().destroy(); + try { + Field field = store.getLexiconRelation().getClass().getDeclaredField("viewRef"); + field.setAccessible(true); + ((AtomicReference)field.get(store.getLexiconRelation())).set(null); + store.getLexiconRelation().getSearchEngine().create(); + } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { + log.error("Error recreating SearchEngine for temp store", e); + } } // Ruin full text index diff --git a/bigdata-rdf-test/src/test/java/com/bigdata/rdf/model/TestFactory.java b/bigdata-rdf-test/src/test/java/com/bigdata/rdf/model/TestFactory.java index 241d0ed89f..d4b8ce898d 100644 --- a/bigdata-rdf-test/src/test/java/com/bigdata/rdf/model/TestFactory.java +++ b/bigdata-rdf-test/src/test/java/com/bigdata/rdf/model/TestFactory.java @@ -1,6 +1,6 @@ /** -Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. +Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. Contact: SYSTAP, LLC DBA Blazegraph @@ -36,6 +36,7 @@ import org.openrdf.model.URI; import org.openrdf.model.Value; import org.openrdf.model.ValueFactory; +import org.openrdf.model.vocabulary.RDF; import com.bigdata.rdf.internal.IV; import com.bigdata.rdf.internal.VTE; @@ -112,6 +113,7 @@ public void test_create_xsdInt_canonical() { /** * Unit test for {@link ValueFactory#createLiteral(String, URI)} when the * datatype URI is null. + * In RDF 1.1, literals always have type, which is XSD.STRING by default. * * @see https://sourceforge.net/apps/trac/bigdata/ticket/226 */ @@ -119,7 +121,7 @@ public void test_create_literal_datatypeIsNull() { final BigdataLiteral l1 = vf.createLiteral("12", (URI) null); - assertEquals(null, l1.getDatatype()); + assertEquals(XSD.STRING, l1.getDatatype()); assertEquals(12, l1.intValue()); diff --git a/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/TestPipelinedHashJoin.java b/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/TestPipelinedHashJoin.java index 836e45f383..6c387c52a1 100644 --- a/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/TestPipelinedHashJoin.java +++ b/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/TestPipelinedHashJoin.java @@ -1,6 +1,6 @@ /** -Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. +Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016. All rights reserved. Contact: SYSTAP, LLC DBA Blazegraph @@ -27,6 +27,8 @@ package com.bigdata.rdf.sparql.ast.eval; +import org.junit.Ignore; + import com.bigdata.bop.BOpUtility; import com.bigdata.bop.PipelineOp; import com.bigdata.bop.join.HTreePipelinedHashJoinUtility; @@ -868,7 +870,10 @@ public void testPipelinedHashDisabledByQueryHintOptionalAnalyticMode() throws Ex * Test query affected by * PipelinedHashIndexAndSolutionSetOp.INCOMING_BINDINGS_BUFFER_THRESHOLD. */ - public void testPipelinedHashIncomingBindingsBufferThreshold() throws Exception { + @Ignore("Flaky test failing due to concurrent execution of the pipelined result overflowing PipelinedHashIndexAndSolutionSetJoinOp.DEFAULT_INCOMING_BINDINGS_BUFFER_THRESHOLD") + // TODO: Fix partial duplication of results due to releasing of incoming bindings and distinct projection buffers + // Note that fail depends on Xmx provided for java runtime, for -Xmx4G it most probably fail, for -Xmx8G it passes + public void _testPipelinedHashIncomingBindingsBufferThreshold() throws Exception { final ASTContainer astContainer = new TestHelper( "pipelined-hashjoin-threshold-incoming-bindings-buffer",// testURI @@ -1015,7 +1020,7 @@ public void testPipelinedHashJoinNotExistsMultiplicityAnalyticMode() throws Exce /** * Bug reporting MINUS pipelined hash join being broken, test case for analytic mode. * - * See https://github.com/blazegraph/database/issues/107 + * See https://github.com/blazegraph/database/issues/107 */ public void testPipelinedHashJoinMinusBug01aAnalyticMode() throws Exception { @@ -1034,7 +1039,7 @@ public void testPipelinedHashJoinMinusBug01aAnalyticMode() throws Exception { /** * Bug reporting MINUS pipelined hash join being broken, test case for non-analytic mode. * - * See https://github.com/blazegraph/database/issues/107 + * See https://github.com/blazegraph/database/issues/107 */ public void testPipelinedHashJoinMinusBug01aNonAnalyticMode() throws Exception { @@ -1054,7 +1059,7 @@ public void testPipelinedHashJoinMinusBug01aNonAnalyticMode() throws Exception { * Bug reporting MINUS pipelined hash join being broken, just making sure that the non-pipelined version * for the test case (i.e., query without LIMIT) is working as expected in analytic mode. * - * See https://github.com/blazegraph/database/issues/107 + * See https://github.com/blazegraph/database/issues/107 */ public void testPipelinedHashJoinDisabledMinusBug01aAnalyticMode() throws Exception { @@ -1074,7 +1079,7 @@ public void testPipelinedHashJoinDisabledMinusBug01aAnalyticMode() throws Except * Bug reporting MINUS pipelined hash join being broken, just making sure that the non-pipelined version * for the test case (i.e., query without LIMIT) is working as expected in non-analytic mode. * - * See https://github.com/blazegraph/database/issues/107 + * See https://github.com/blazegraph/database/issues/107 */ public void testPipelinedHashJoinDisabledMinusBug01aNonAnalyticMode() throws Exception { @@ -1096,7 +1101,7 @@ public void testPipelinedHashJoinDisabledMinusBug01aNonAnalyticMode() throws Exc * Bug reporting MINUS pipelined hash join being broken, test case for analytic mode. * Variant of 01a that is a little more challenging, as it contains more solutions and duplicates. * - * See https://github.com/blazegraph/database/issues/107 + * See https://github.com/blazegraph/database/issues/107 */ public void testPipelinedHashJoinMinusBug01bAnalyticMode() throws Exception { @@ -1116,7 +1121,7 @@ public void testPipelinedHashJoinMinusBug01bAnalyticMode() throws Exception { * Bug reporting MINUS pipelined hash join being broken, test case for non-analytic mode. * Variant of 01a that is a little more challenging, as it contains more solutions and duplicates. * - * See https://github.com/blazegraph/database/issues/107 + * See https://github.com/blazegraph/database/issues/107 */ public void testPipelinedHashJoinMinusBug01bNonAnalyticMode() throws Exception { @@ -1138,7 +1143,7 @@ public void testPipelinedHashJoinMinusBug01bNonAnalyticMode() throws Exception { * * Variant of 01a that is a little more challenging, as it contains more solutions and duplicates. * - * See https://github.com/blazegraph/database/issues/107 + * See https://github.com/blazegraph/database/issues/107 */ public void testPipelinedHashJoinDisabledMinusBug01bAnalyticMode() throws Exception { @@ -1160,7 +1165,7 @@ public void testPipelinedHashJoinDisabledMinusBug01bAnalyticMode() throws Except * * Variant of 01a that is a little more challenging, as it contains more solutions and duplicates. * - * See https://github.com/blazegraph/database/issues/107 + * See https://github.com/blazegraph/database/issues/107 */ public void testPipelinedHashJoinDisabledMinusBug01bNonAnalyticMode() throws Exception { @@ -1180,7 +1185,7 @@ public void testPipelinedHashJoinDisabledMinusBug01bNonAnalyticMode() throws Exc /** * Bug reporting MINUS pipelined hash join being broken, test case for analytic mode. * - * See https://github.com/blazegraph/database/issues/107 + * See https://github.com/blazegraph/database/issues/107 */ public void testPipelinedHashJoinMinusBug02AnalyticMode() throws Exception { @@ -1203,7 +1208,7 @@ public void testPipelinedHashJoinMinusBug02AnalyticMode() throws Exception { * testing code paths that skip subquery re-evaluation for binding sets that have been evaluated in * previous passes. * - * See https://github.com/blazegraph/database/issues/107 + * See https://github.com/blazegraph/database/issues/107 */ public void testPipelinedHashJoinMinusBug02NonAnalyticMode() throws Exception { @@ -1227,7 +1232,7 @@ public void testPipelinedHashJoinMinusBug02NonAnalyticMode() throws Exception { * testing code paths that skip subquery re-evaluation for binding sets that have been evaluated in * previous passes. * - * See https://github.com/blazegraph/database/issues/107 + * See https://github.com/blazegraph/database/issues/107 */ public void testPipelinedHashJoinDisabledMinusBug02AnalyticMode() throws Exception { @@ -1251,7 +1256,7 @@ public void testPipelinedHashJoinDisabledMinusBug02AnalyticMode() throws Excepti * testing code paths that skip subquery re-evaluation for binding sets that have been evaluated in * previous passes. * - * See https://github.com/blazegraph/database/issues/107 + * See https://github.com/blazegraph/database/issues/107 */ public void testPipelinedHashJoinDisabledMinusBug02NonAnalyticMode() throws Exception { @@ -1325,4 +1330,4 @@ protected void assertPipelinedPlanOrNot(final PipelineOp queryPlan, } } -} +} diff --git a/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/ticket_832a.trig b/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/ticket_832a.trig index b3e766d613..12b91f97dd 100644 --- a/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/ticket_832a.trig +++ b/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/ticket_832a.trig @@ -1,20 +1,20 @@ -@prefix : . -@prefix base: . +@prefix : . +@prefix gbase: . -base:graph1 +gbase:graph1 { -:person3 - a :Person ; - :age 3; +:person3 + a :Person ; + :age 3; :name "Person 3 - graph1". - - + + } -base:graph2 +gbase:graph2 { -:person3 - a :Person ; - :age 13; +:person3 + a :Person ; + :age 13; :name "Person 3". -} \ No newline at end of file +} diff --git a/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/ticket_835.trig b/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/ticket_835.trig index b3e766d613..eb705d4fc0 100644 --- a/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/ticket_835.trig +++ b/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/ticket_835.trig @@ -1,7 +1,7 @@ @prefix : . -@prefix base: . +@prefix gbase: . -base:graph1 +gbase:graph1 { :person3 a :Person ; @@ -11,10 +11,10 @@ base:graph1 } -base:graph2 +gbase:graph2 { :person3 a :Person ; :age 13; :name "Person 3". -} \ No newline at end of file +} diff --git a/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/ticket_bg911.trig b/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/ticket_bg911.trig index b3e766d613..eb705d4fc0 100644 --- a/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/ticket_bg911.trig +++ b/bigdata-rdf-test/src/test/java/com/bigdata/rdf/sparql/ast/eval/ticket_bg911.trig @@ -1,7 +1,7 @@ @prefix : . -@prefix base: . +@prefix gbase: . -base:graph1 +gbase:graph1 { :person3 a :Person ; @@ -11,10 +11,10 @@ base:graph1 } -base:graph2 +gbase:graph2 { :person3 a :Person ; :age 13; :name "Person 3". -} \ No newline at end of file +} diff --git a/bigdata-rdf-test/src/test/java/com/bigdata/rdf/store/TestTripleStore.java b/bigdata-rdf-test/src/test/java/com/bigdata/rdf/store/TestTripleStore.java index 8e35f1a115..35069d5993 100644 --- a/bigdata-rdf-test/src/test/java/com/bigdata/rdf/store/TestTripleStore.java +++ b/bigdata-rdf-test/src/test/java/com/bigdata/rdf/store/TestTripleStore.java @@ -211,7 +211,8 @@ public void test_addTerm() { try { doAddTermTest(store, new LiteralImpl("abc")); - doAddTermTest(store, new LiteralImpl("abc", XMLSchema.STRING)); + // Under RDF 1.1, this one is the same as previous one + // doAddTermTest(store, new LiteralImpl("abc", XMLSchema.STRING)); doAddTermTest(store, new LiteralImpl("abc", "en")); doAddTermTest(store, new URIImpl("http://www.bigdata.com")); diff --git a/bigdata-sails-test/pom.xml b/bigdata-sails-test/pom.xml index c7e1183a57..b0d0a889e9 100644 --- a/bigdata-sails-test/pom.xml +++ b/bigdata-sails-test/pom.xml @@ -380,7 +380,6 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA bigdata-core-test ${project.version} test-jar - test com.blazegraph diff --git a/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/TestLexJoinOps.java b/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/TestLexJoinOps.java index 9284305ce0..e020fac52a 100644 --- a/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/TestLexJoinOps.java +++ b/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/TestLexJoinOps.java @@ -104,7 +104,8 @@ public void testStr() throws Exception { final URI X = vf.createURI(BD.NAMESPACE + "X"); final URI dt = vf.createURI(BD.NAMESPACE + "myDatatype"); final Literal _1 = vf.createLiteral("foo"); - final Literal _2 = vf.createLiteral("foo", XSD.STRING); + // Since Sesame 2.8 upgrade, xsd:string is the same as plain literal +// final Literal _2 = vf.createLiteral("foo", XSD.STRING); final Literal _3 = vf.createLiteral("foo", dt); final Literal _4 = vf.createLiteral("foo", "EN"); final Literal _5 = vf.createLiteral(true); @@ -115,7 +116,7 @@ public void testStr() throws Exception { */ cxn.add(X, RDF.TYPE, RDFS.RESOURCE); cxn.add(X, RDFS.LABEL, _1); - cxn.add(X, RDFS.LABEL, _2); +// cxn.add(X, RDFS.LABEL, _2); cxn.add(X, RDFS.LABEL, _3); cxn.add(X, RDFS.LABEL, _4); cxn.add(X, RDFS.LABEL, _5); @@ -167,9 +168,9 @@ public void testStr() throws Exception { answer.add(createBindingSet( new BindingImpl("o", _1) )); - answer.add(createBindingSet( - new BindingImpl("o", _2) - )); +// answer.add(createBindingSet( +// new BindingImpl("o", _2) +// )); answer.add(createBindingSet( new BindingImpl("o", _3) )); @@ -217,7 +218,8 @@ public void testRegex() throws Exception { final URI X = vf.createURI(BD.NAMESPACE + "X"); final URI dt = vf.createURI(BD.NAMESPACE + "myDatatype"); final Literal _1 = vf.createLiteral("foo"); - final Literal _2 = vf.createLiteral("foo", XSD.STRING); + // Since Sesame 2.8 upgrade, xsd:string is the same as plain literal +// final Literal _2 = vf.createLiteral("foo", XSD.STRING); final Literal _3 = vf.createLiteral("foo", dt); final Literal _4 = vf.createLiteral("foo", "EN"); final Literal _5 = vf.createLiteral(true); @@ -228,7 +230,7 @@ public void testRegex() throws Exception { */ cxn.add(X, RDF.TYPE, RDFS.RESOURCE); cxn.add(X, RDFS.LABEL, _1); - cxn.add(X, RDFS.LABEL, _2); +// cxn.add(X, RDFS.LABEL, _2); cxn.add(X, RDFS.LABEL, _3); cxn.add(X, RDFS.LABEL, _4); cxn.add(X, RDFS.LABEL, _5); @@ -281,9 +283,9 @@ public void testRegex() throws Exception { answer.add(createBindingSet( new BindingImpl("o", _1) )); - answer.add(createBindingSet( - new BindingImpl("o", _2) - )); +// answer.add(createBindingSet( +// new BindingImpl("o", _2) +// )); answer.add(createBindingSet( new BindingImpl("o", _3) )); diff --git a/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/TestTicket1893.java b/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/TestTicket1893.java index 71aea0dda0..3ffa2cbc58 100644 --- a/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/TestTicket1893.java +++ b/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/TestTicket1893.java @@ -355,7 +355,7 @@ public void test_9() throws Exception { assertEquals(0, searchEngine.count(query("true"))); assertEquals(0, searchEngine.count(query("false"))); assertEquals(1, searchEngine.count(query("plain"))); - assertEquals(0, searchEngine.count(query("datatyped"))); + assertEquals(1, searchEngine.count(query("datatyped"))); assertEquals(1, searchEngine.count(query("english"))); endTest(cxn); @@ -386,7 +386,7 @@ public void test_10() throws Exception { assertEquals(0, searchEngine.count(query("true"))); assertEquals(0, searchEngine.count(query("false"))); assertEquals(1, searchEngine.count(query("plain"))); - assertEquals(0, searchEngine.count(query("datatyped"))); + assertEquals(1, searchEngine.count(query("datatyped"))); assertEquals(1, searchEngine.count(query("english"))); endTest(cxn); @@ -419,7 +419,7 @@ public void test_11() throws Exception { assertEquals(0, searchEngine.count(query("true"))); assertEquals(0, searchEngine.count(query("false"))); assertEquals(1, searchEngine.count(query("plain"))); - assertEquals(0, searchEngine.count(query("datatyped"))); + assertEquals(1, searchEngine.count(query("datatyped"))); assertEquals(1, searchEngine.count(query("english"))); endTest(cxn); @@ -451,7 +451,7 @@ public void test_12() throws Exception { assertEquals(0, searchEngine.count(query("true"))); assertEquals(0, searchEngine.count(query("false"))); assertEquals(1, searchEngine.count(query("plain"))); - assertEquals(0, searchEngine.count(query("datatyped"))); + assertEquals(1, searchEngine.count(query("datatyped"))); assertEquals(1, searchEngine.count(query("english"))); endTest(cxn); @@ -482,7 +482,7 @@ public void test_13() throws Exception { assertEquals(0, searchEngine.count(query("true"))); assertEquals(0, searchEngine.count(query("false"))); assertEquals(1, searchEngine.count(query("plain"))); - assertEquals(0, searchEngine.count(query("datatyped"))); + assertEquals(1, searchEngine.count(query("datatyped"))); assertEquals(1, searchEngine.count(query("english"))); endTest(cxn); @@ -513,7 +513,7 @@ public void test_14() throws Exception { assertEquals(0, searchEngine.count(query("true"))); assertEquals(0, searchEngine.count(query("false"))); assertEquals(1, searchEngine.count(query("plain"))); - assertEquals(0, searchEngine.count(query("datatyped"))); + assertEquals(1, searchEngine.count(query("datatyped"))); assertEquals(1, searchEngine.count(query("english"))); endTest(cxn); @@ -544,7 +544,7 @@ public void test_15() throws Exception { assertEquals(0, searchEngine.count(query("true"))); assertEquals(0, searchEngine.count(query("false"))); assertEquals(1, searchEngine.count(query("plain"))); - assertEquals(0, searchEngine.count(query("datatyped"))); + assertEquals(1, searchEngine.count(query("datatyped"))); assertEquals(1, searchEngine.count(query("english"))); endTest(cxn); @@ -575,7 +575,7 @@ public void test_16() throws Exception { assertEquals(0, searchEngine.count(query("true"))); assertEquals(0, searchEngine.count(query("false"))); assertEquals(1, searchEngine.count(query("plain"))); - assertEquals(0, searchEngine.count(query("datatyped"))); + assertEquals(1, searchEngine.count(query("datatyped"))); assertEquals(1, searchEngine.count(query("english"))); endTest(cxn); diff --git a/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/tck/BigdataSPARQLUpdateTxTest.java b/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/tck/BigdataSPARQLUpdateTxTest.java index fbbc7f079c..592d6874ec 100644 --- a/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/tck/BigdataSPARQLUpdateTxTest.java +++ b/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/tck/BigdataSPARQLUpdateTxTest.java @@ -58,5 +58,12 @@ protected Properties getProperties() { return props; } + + @Override + public void testAutoCommitHandling() throws Exception { + // NOP + // disabling autocommit test for BigdataSailRWTxConnection, which is backed + // by a read/write transaction and does not support begin/end nested semantics + } } diff --git a/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java b/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java index cb89082c17..5745ab6e22 100644 --- a/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java +++ b/bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java @@ -432,6 +432,23 @@ public class BigdataSparqlTest */ //"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/aggregates/manifest#agg03", //"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/aggregates/manifest#agg07", + + // Disabled while upgrading to Sesame 2.8: +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/syntax-sparql1/manifest#sparql11-strdt-01", +// "http://www.w3.org/2009/sparql/docs/tests/data-sparql11/functions/manifest#strdt02", + // failing due to strdt and strlang functions expect simple literals according to + // https://www.w3.org/TR/sparql11-query/#func-strdt + // https://www.w3.org/TR/sparql11-query/#simple_literal + // with respect to Sesame 2.8 approach, xsd:string literals considered to be equal to plain literals + // But strdt-03 test expects that xsd:string literals would not produce results if used in strdt, strlang. + // Thus disabling the tests as they do not conform with SPARQL 1.1 specifications. + "http://www.w3.org/2009/sparql/docs/tests/data-sparql11/functions/manifest#strdt03", + "http://www.w3.org/2009/sparql/docs/tests/data-sparql11/functions/manifest#strlang03", + // This fails due to "no discount"^^xsd:string potential solution does not get materialized yet + // when called com.bigdata.rdf.internal.constraints.CompareBOp.compareLiterals(IV, IV, CompareOp) + // This producing NotMaterializedException while comparing to provided numerics, and get removed from resultset + // Does not seem to be specific to any Sesame 2.8 changes, should be fixed as a separate issue. + "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/syntax-sparql1/manifest#sparql11-not-in-02", }); /** diff --git a/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/ComplexSPARQLQueryTest.java b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/ComplexSPARQLQueryTest.java index 9ca9f044f0..34f3e662b5 100644 --- a/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/ComplexSPARQLQueryTest.java +++ b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/ComplexSPARQLQueryTest.java @@ -21,14 +21,20 @@ import java.io.IOException; import java.io.InputStream; +import java.io.StringReader; import java.util.List; import java.util.Set; -import junit.framework.TestCase; - import org.junit.After; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import junit.framework.ComparisonFailure; +import junit.framework.TestCase; + import org.openrdf.model.BNode; import org.openrdf.model.Literal; import org.openrdf.model.Model; @@ -39,9 +45,11 @@ import org.openrdf.model.vocabulary.DCTERMS; import org.openrdf.model.vocabulary.FOAF; import org.openrdf.model.vocabulary.OWL; +import org.openrdf.model.vocabulary.RDF; import org.openrdf.model.vocabulary.RDFS; import org.openrdf.model.vocabulary.SESAME; import org.openrdf.model.vocabulary.XMLSchema; +import org.openrdf.query.Binding; import org.openrdf.query.BindingSet; import org.openrdf.query.GraphQuery; import org.openrdf.query.QueryEvaluationException; @@ -50,15 +58,15 @@ import org.openrdf.query.TupleQuery; import org.openrdf.query.TupleQueryResult; import org.openrdf.query.TupleQueryResultHandlerBase; +import org.openrdf.query.impl.BindingImpl; import org.openrdf.query.impl.DatasetImpl; +import org.openrdf.query.impl.MapBindingSet; import org.openrdf.query.parser.sparql.manifest.SPARQL11ManifestTest; import org.openrdf.repository.Repository; import org.openrdf.repository.RepositoryConnection; import org.openrdf.repository.RepositoryException; import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFParseException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * A set of compliance tests on SPARQL query functionality which can not be @@ -149,9 +157,10 @@ public void testNullContext1() assertNotNull(s); assertFalse(bob.equals(s)); // should not be present in default - // graph - assertFalse(alice.equals(s)); // should not be present in default - // graph + // graph + assertFalse(alice.equals(s)); // should not be present in + // default + // graph } result.close(); } @@ -161,6 +170,91 @@ public void testNullContext1() } } + @Test + public void testSES2373SubselectOptional() + throws Exception + { + conn.prepareUpdate(QueryLanguage.SPARQL, + "insert data {" + " ." + " 1 ." + " ." + + " ." + " 2 ." + " ." + + " ." + " 3 ." + " ." + + " ." + " 4 ." + " ." + + " ." + " 5 ." + " ." + + "}").execute(); + + StringBuilder qb = new StringBuilder(); + qb.append("select ?x { \n"); + qb.append(" { select ?v { ?v filter (?v = ) } }.\n"); + qb.append(" optional { select ?val { ?v ?val .} }\n"); + qb.append(" ?v ?x \n"); + qb.append("}\n"); + + TupleQueryResult res = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb.toString()).evaluate(); + assertTrue("The query should return a result", res.hasNext()); + BindingSet b = res.next(); + assertTrue("?x is from the mandatory part of the query and should be bound", b.hasBinding("x")); + } + + @Test + public void testSES2154SubselectOptional() + throws Exception + { + StringBuilder ub = new StringBuilder(); + ub.append("insert data { \n"); + ub.append(" a . \n"); + ub.append(" a . \n"); + ub.append(" a . \n"); + ub.append(" a . \n"); + ub.append(" a . \n"); + ub.append(" a . \n"); + ub.append(" a . \n"); + ub.append(" a . \n"); + ub.append(" a . \n"); + ub.append(" a . \n"); + ub.append(" a . \n"); + ub.append(" a . \n"); + + ub.append(" \"01\" . \n"); + ub.append(" \"02\" . \n"); + ub.append(" \"03\" . \n"); + ub.append(" \"04\" . \n"); + ub.append(" \"05\" . \n"); + ub.append(" \"06\" . \n"); + ub.append(" \"07\" . \n"); + ub.append(" \"08\" . \n"); + ub.append(" \"09\" . \n"); + ub.append(" \"10\" . \n"); + ub.append(" \"11\" . \n"); + ub.append(" \"12\" . \n"); + ub.append("} \n"); + + conn.prepareUpdate(QueryLanguage.SPARQL, ub.toString()).execute(); + + StringBuilder qb = new StringBuilder(); + qb.append("SELECT ?s ?label\n"); + qb.append("WHERE { \n"); + qb.append(" ?s a \n .\n"); + qb.append(" OPTIONAL { {SELECT ?label WHERE { \n"); + qb.append(" ?s ?label . \n"); + qb.append(" } ORDER BY ?label LIMIT 2 \n"); + qb.append(" }\n"); + qb.append(" }\n"); + qb.append("}\n"); + qb.append("ORDER BY ?s\n"); + qb.append("LIMIT 10 \n"); + + TupleQueryResult res = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb.toString()).evaluate(); + assertTrue("The query should return a result", res.hasNext()); + + List result = QueryResults.asList(res); + assertEquals(10, result.size()); + for (BindingSet bs : result) { + Literal label = (Literal)bs.getValue("label"); + assertTrue("wrong label value (expected '01' or '02', but got '" + label.stringValue() + "')", + label.stringValue().equals("01") || label.stringValue().equals("02")); + } + } + @Test public void testNullContext2() throws Exception @@ -186,9 +280,10 @@ public void testNullContext2() assertNotNull(s); assertFalse(bob.equals(s)); // should not be present in default - // graph - assertFalse(alice.equals(s)); // should not be present in default - // graph + // graph + assertFalse(alice.equals(s)); // should not be present in + // default + // graph } result.close(); } @@ -377,6 +472,114 @@ public void testDescribeF() } } + @Test + public void testDescribeMultipleA() + throws Exception + { + String update = "insert data { . [] . . } "; + conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); + + StringBuilder query = new StringBuilder(); + query.append(getNamespaceDeclarations()); + query.append("DESCRIBE "); + + GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query.toString()); + + ValueFactory vf = conn.getValueFactory(); + URI urn1 = vf.createURI("urn:1"); + URI p1 = vf.createURI("urn:p1"); + URI p2 = vf.createURI("urn:p2"); + URI urn2 = vf.createURI("urn:2"); + URI blank = vf.createURI("urn:blank"); + + Model result = QueryResults.asModel(gq.evaluate()); + assertTrue(result.contains(urn1, p1, null)); + assertTrue(result.contains(null, blank, urn1)); + assertTrue(result.contains(urn2, p2, null)); + } + + @Test + public void testDescribeMultipleB() + throws Exception + { + String update = "insert data { . [] . . } "; + conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); + + StringBuilder query = new StringBuilder(); + query.append(getNamespaceDeclarations()); + query.append("DESCRIBE "); + + GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query.toString()); + + ValueFactory vf = conn.getValueFactory(); + URI urn1 = vf.createURI("urn:1"); + URI p1 = vf.createURI("urn:p1"); + URI p2 = vf.createURI("urn:p2"); + URI urn2 = vf.createURI("urn:2"); + URI blank = vf.createURI("urn:blank"); + Model result = QueryResults.asModel(gq.evaluate()); + + assertTrue(result.contains(urn1, p1, null)); + assertTrue(result.contains(urn1, blank, null)); + assertTrue(result.contains(urn2, p2, null)); + } + + @Test + public void testDescribeMultipleC() + throws Exception + { + String update = "insert data { . [] . [] . . } "; + conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); + + StringBuilder query = new StringBuilder(); + query.append(getNamespaceDeclarations()); + query.append("DESCRIBE "); + + GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query.toString()); + + ValueFactory vf = conn.getValueFactory(); + URI urn1 = vf.createURI("urn:1"); + URI p1 = vf.createURI("urn:p1"); + URI p2 = vf.createURI("urn:p2"); + URI urn2 = vf.createURI("urn:2"); + URI blank = vf.createURI("urn:blank"); + Model result = QueryResults.asModel(gq.evaluate()); + + assertTrue(result.contains(urn1, p1, null)); + assertTrue(result.contains(urn1, blank, null)); + assertTrue(result.contains(null, blank, urn1)); + assertTrue(result.contains(urn2, p2, null)); + } + + @Test + public void testDescribeMultipleD() + throws Exception + { + String update = "insert data { . [] . . [] . . [] .} "; + conn.prepareUpdate(QueryLanguage.SPARQL, update).execute(); + + StringBuilder query = new StringBuilder(); + query.append(getNamespaceDeclarations()); + query.append("DESCRIBE "); + + GraphQuery gq = conn.prepareGraphQuery(QueryLanguage.SPARQL, query.toString()); + + ValueFactory vf = conn.getValueFactory(); + URI urn1 = vf.createURI("urn:1"); + URI p1 = vf.createURI("urn:p1"); + URI p2 = vf.createURI("urn:p2"); + URI urn2 = vf.createURI("urn:2"); + URI urn4 = vf.createURI("urn:4"); + URI blank = vf.createURI("urn:blank"); + Model result = QueryResults.asModel(gq.evaluate()); + + assertTrue(result.contains(urn1, p1, null)); + assertTrue(result.contains(null, blank, urn1)); + assertTrue(result.contains(urn2, p2, null)); + assertTrue(result.contains(urn4, p2, null)); + assertTrue(result.contains(urn4, blank, null)); + } + @Test public void testGroupConcatDistinct() throws Exception @@ -680,6 +883,48 @@ public void testSES1970CountDistinctWildcard() } } + @Test + public void testSES1685propPathSameVar() + throws Exception + { + final String queryStr = "PREFIX : SELECT ?x WHERE {?x :p+ ?x}"; + + conn.add(new StringReader("@prefix : . :a :p :b . :b :p :a ."), "", RDFFormat.TURTLE); + + TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryStr); + TupleQueryResult result = query.evaluate(); + + assertNotNull(result); + + int count = 0; + while (result.hasNext()) { + result.next(); + count++; + } + // result should be both a and b. + assertEquals(2, count); + } + + @Test + public void testSES2104ConstructBGPSameURI() + throws Exception + { + final String queryStr = "PREFIX : CONSTRUCT {:x :p :x } WHERE {} "; + + conn.add(new StringReader("@prefix : . :a :p :b . "), "", RDFFormat.TURTLE); + + final URI x = conn.getValueFactory().createURI("urn:x"); + final URI p = conn.getValueFactory().createURI("urn:p"); + + GraphQuery query = conn.prepareGraphQuery(QueryLanguage.SPARQL, queryStr); + Model result = QueryResults.asModel(query.evaluate()); + + assertNotNull(result); + assertFalse(result.isEmpty()); + assertTrue(result.contains(x, p, x)); + + } + @Test public void testSES1898LeftJoinSemantics2() throws Exception @@ -723,7 +968,8 @@ public void testIdenticalVariablesInStatementPattern() queryBuilder.append("{ ?publisher ?publisher }"); conn.prepareTupleQuery(QueryLanguage.SPARQL, queryBuilder.toString()).evaluate( - new TupleQueryResultHandlerBase() { + new TupleQueryResultHandlerBase() + { public void handleSolution(BindingSet bindingSet) { fail("nobody is self published"); @@ -793,6 +1039,82 @@ public void testInComparison3() assertEquals(f.createLiteral("1", XMLSchema.INTEGER), y); } + @Test + public void testSES2121URIFunction() + throws Exception + { + String query = "SELECT (URI(\"foo bar\") as ?uri) WHERE {}"; + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tq.evaluate(); + assertNotNull(result); + assertTrue(result.hasNext()); + BindingSet bs = result.next(); + URI uri = (URI)bs.getValue("uri"); + assertTrue("uri result for invalid URI should be unbound", uri == null); + + query = "BASE SELECT (URI(\"foo bar\") as ?uri) WHERE {}"; + tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + result = tq.evaluate(); + assertNotNull(result); + assertTrue(result.hasNext()); + bs = result.next(); + uri = (URI)bs.getValue("uri"); + assertTrue("uri result for valid URI reference should be bound", uri != null); + } + + @Test + @Ignore("Fails due to several NOW function executions produce different results") + // It is a new test added during update to Sesame 2.8, but fail is not introduced with the Sesame update, + // it was rather not supported functionality already. Should be fixed as a separate commit. + public void _testSES869ValueOfNow() + throws Exception + { + StringBuilder query = new StringBuilder(); + query.append("SELECT ?p ( NOW() as ?n ) { BIND (NOW() as ?p ) }"); + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query.toString()); + + TupleQueryResult result = tq.evaluate(); + assertNotNull(result); + assertTrue(result.hasNext()); + + BindingSet bs = result.next(); + Value p = bs.getValue("p"); + Value n = bs.getValue("n"); + + assertNotNull(p); + assertNotNull(n); + assertEquals(p, n); + + } + + @Test + public void testSES2136() + throws Exception + { + loadTestData("/testcases-sparql-1.1-w3c/bindings/data02.ttl"); + StringBuilder query = new StringBuilder(); + query.append("PREFIX : \n"); + query.append("SELECT ?s ?o { \n"); + query.append(" { SELECT * WHERE { ?s ?p ?o . } }\n"); + query.append(" VALUES (?o) { (:b) }\n"); + query.append("}\n"); + + ValueFactory vf = conn.getValueFactory(); + final URI a = vf.createURI("http://example.org/a"); + final URI b = vf.createURI("http://example.org/b"); + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query.toString()); + + TupleQueryResult result = tq.evaluate(); + assertNotNull(result); + assertTrue(result.hasNext()); + BindingSet bs = result.next(); + assertFalse("only one result expected", result.hasNext()); + assertEquals(a, bs.getValue("s")); + assertEquals(b, bs.getValue("o")); + } + @Test public void testValuesInOptional() throws Exception @@ -800,7 +1122,8 @@ public void testValuesInOptional() loadTestData("/testdata-query/dataset-ses1692.trig"); StringBuilder query = new StringBuilder(); query.append(" PREFIX : \n"); - query.append(" SELECT DISTINCT ?a ?name ?isX WHERE { ?b :p1 ?a . ?a :name ?name. OPTIONAL { ?a a :X . VALUES(?isX) { (:X) } } } "); + query.append( + " SELECT DISTINCT ?a ?name ?isX WHERE { ?b :p1 ?a . ?a :name ?name. OPTIONAL { ?a a :X . VALUES(?isX) { (:X) } } } "); TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query.toString()); @@ -828,6 +1151,70 @@ else if (a.stringValue().endsWith(("a2"))) { assertEquals(2, count); } + @Test + public void testSES2052If1() + throws Exception + { + loadTestData("/testdata-query/dataset-query.trig"); + StringBuilder query = new StringBuilder(); + query.append("SELECT ?p \n"); + query.append("WHERE { \n"); + query.append(" ?s ?p ?o . \n"); + query.append( + " FILTER(IF(BOUND(?p), ?p = , false)) \n"); + query.append("}"); + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query.toString()); + try { + TupleQueryResult result = tq.evaluate(); + assertNotNull(result); + while (result.hasNext()) { + BindingSet bs = result.next(); + + URI p = (URI)bs.getValue("p"); + assertNotNull(p); + assertEquals(RDF.TYPE, p); + } + } + catch (Exception e) { + e.printStackTrace(); + fail(e.getMessage()); + } + + } + + @Test + public void testSES2052If2() + throws Exception + { + loadTestData("/testdata-query/dataset-query.trig"); + StringBuilder query = new StringBuilder(); + query.append("SELECT ?p \n"); + query.append("WHERE { \n"); + query.append(" ?s ?p ?o . \n"); + query.append( + " FILTER(IF(!BOUND(?p), false , ?p = )) \n"); + query.append("}"); + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query.toString()); + try { + TupleQueryResult result = tq.evaluate(); + assertNotNull(result); + while (result.hasNext()) { + BindingSet bs = result.next(); + + URI p = (URI)bs.getValue("p"); + assertNotNull(p); + assertEquals(RDF.TYPE, p); + } + } + catch (Exception e) { + e.printStackTrace(); + fail(e.getMessage()); + } + + } + @Test public void testSameTermRepeatInUnion() throws Exception @@ -854,16 +1241,16 @@ public void testSameTermRepeatInUnion() int count = 0; while (result.hasNext()) { - BindingSet bs = result.next(); + BindingSet bs = result.next(); count++; assertNotNull(bs); - + System.out.println(bs); - + Value mbox = bs.getValue("mbox"); Value x = bs.getValue("x"); - - assertTrue(mbox instanceof Literal || x instanceof URI); + + assertTrue(mbox instanceof Literal || x instanceof URI); } result.close(); @@ -993,7 +1380,7 @@ public void testFilterRegexBoolean() query.append(" foaf:mbox ?mbox . "); query.append(" FILTER(EXISTS { "); query.append(" FILTER(REGEX(?name, \"Bo\") && REGEX(?mbox, \"bob\")) "); - // query.append(" FILTER(REGEX(?mbox, \"bob\")) "); + // query.append(" FILTER(REGEX(?mbox, \"bob\")) "); query.append(" } )"); query.append(" } "); @@ -1531,7 +1918,8 @@ public void testArbitraryLengthPathWithFilter1() StringBuilder query = new StringBuilder(); query.append(getNamespaceDeclarations()); query.append("SELECT ?parent ?child "); - query.append("WHERE { ?child a owl:Class . ?child rdfs:subClassOf+ ?parent . FILTER (?parent = owl:Thing) }"); + query.append( + "WHERE { ?child a owl:Class . ?child rdfs:subClassOf+ ?parent . FILTER (?parent = owl:Thing) }"); TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query.toString()); @@ -1627,6 +2015,47 @@ public void testArbitraryLengthPathWithFilter3() } + @Test + public void testSES2147PropertyPathsWithIdenticalSubsPreds() + throws Exception + { + + StringBuilder data = new StringBuilder(); + data.append(" .\n"); + data.append(" .\n"); + data.append(" .\n"); + data.append(" .\n"); + data.append(" .\n"); + + conn.begin(); + conn.add(new StringReader(data.toString()), "", RDFFormat.NTRIPLES); + conn.commit(); + + StringBuilder query = new StringBuilder(); + query.append(getNamespaceDeclarations()); + query.append("SELECT ?x \n"); + query.append("WHERE { ?x */ . \n"); + query.append(" ?x */ . \n"); + query.append("} \n"); + + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query.toString()); + + try { + TupleQueryResult result = tq.evaluate(); + assertNotNull(result); + assertTrue(result.hasNext()); + + Value x = result.next().getValue("x"); + assertNotNull(x); + assertTrue(x instanceof URI); + assertEquals("urn:s1", x.stringValue()); + } + catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + } + @Test public void testSES1991UUIDEvaluation() throws Exception @@ -1702,7 +2131,8 @@ public void testSES1991RANDEvaluation() assertNotNull(r1); // there is a small chance that two successive calls to the random - // number generator will generate the exact same value, so we check for + // number generator will generate the exact same value, so we check + // for // three successive calls (still theoretically possible to be // identical, but phenomenally unlikely). assertFalse(r1.equals(r2) && r1.equals(r3)); @@ -1743,9 +2173,11 @@ public void testSES1991NOWEvaluation() } @Test - public void testSES2024PropertyPathAnonVarSharing() throws Exception { + public void testSES2024PropertyPathAnonVarSharing() + throws Exception + { loadTestData("/testdata-query/dataset-ses2024.trig"); - String query = "PREFIX : SELECT * WHERE { ?x1 :p/:lit ?l1 . ?x1 :diff ?x2 . ?x2 :p/:lit ?l2 . }" ; + String query = "PREFIX : SELECT * WHERE { ?x1 :p/:lit ?l1 . ?x1 :diff ?x2 . ?x2 :p/:lit ?l2 . }"; TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); @@ -1768,6 +2200,145 @@ public void testSES2024PropertyPathAnonVarSharing() throws Exception { } } + @Test + public void testSES2361UndefMin() + throws Exception + { + String query = "SELECT (MIN(?v) as ?min) WHERE { VALUES ?v { 1 2 undef 3 4 }}"; + TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate(); + try { + assertNotNull(result); + assertTrue(result.hasNext()); + assertEquals("1", result.next().getValue("min").stringValue()); + assertFalse(result.hasNext()); + } + finally { + result.close(); + } + } + + @Test + public void testSES2361UndefMax() + throws Exception + { + String query = "SELECT (MAX(?v) as ?max) WHERE { VALUES ?v { 1 2 7 undef 3 4 }}"; + TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate(); + try { + assertNotNull(result); + assertTrue(result.hasNext()); + assertEquals("7", result.next().getValue("max").stringValue()); + assertFalse(result.hasNext()); + } + finally { + result.close(); + } + } + + @Test + public void testSES2361UndefCount() + throws Exception + { + String query = "SELECT (COUNT(?v) as ?c) WHERE { VALUES ?v { 1 2 undef 3 4 }}"; + TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate(); + try { + assertNotNull(result); + assertTrue(result.hasNext()); + assertEquals("4", result.next().getValue("c").stringValue()); + assertFalse(result.hasNext()); + } + finally { + result.close(); + } + } + + @Test + public void testSES2361UndefCountWildcard() + throws Exception + { + String query = "SELECT (COUNT(*) as ?c) WHERE { VALUES ?v { 1 2 undef 3 4 }}"; + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tq.evaluate(); + try { + assertNotNull(result); + assertTrue(result.hasNext()); + assertEquals("4", result.next().getValue("c").stringValue()); + assertFalse(result.hasNext()); + } catch (ComparisonFailure e) { + System.out.println(tq); + e.printStackTrace(); + fail(e.getMessage()); + } + finally { + result.close(); + } + } + + @Test + public void testSES2361UndefSum() + throws Exception + { + String query = "SELECT (SUM(?v) as ?s) WHERE { VALUES ?v { 1 2 undef 3 4 }}"; + TupleQueryResult result = conn.prepareTupleQuery(QueryLanguage.SPARQL, query).evaluate(); + try { + assertNotNull(result); + assertTrue(result.hasNext()); + assertEquals("10", result.next().getValue("s").stringValue()); + assertFalse(result.hasNext()); + } + finally { + result.close(); + } + } + + @Test + @Ignore("Failing due to unsupported negated property path modifiers") + // It is a new test added during update to Sesame 2.8, but fail is not introduced with the Sesame update, + // it was rather not supported functionality already. Should be fixed as a separate commit. + public void _testSES2336NegatedPropertyPathMod() + throws Exception + { + loadTestData("/testdata-query/dataset-ses2336.trig"); + String query = "prefix : select * where { ?s a :Test ; ! :p ? ?o . }"; + + ValueFactory vf = conn.getValueFactory(); + TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + try { + List result = QueryResults.asList(tq.evaluate()); + assertNotNull(result); + + URI a = vf.createURI(EX_NS, "a"); + URI b = vf.createURI(EX_NS, "b"); + URI c = vf.createURI(EX_NS, "c"); + URI d = vf.createURI(EX_NS, "d"); + URI e = vf.createURI(EX_NS, "e"); + URI test = vf.createURI(EX_NS, "Test"); + + assertTrue(containsSolution(result, new BindingImpl("s", a), new BindingImpl("o", a))); + assertTrue(containsSolution(result, new BindingImpl("s", a), new BindingImpl("o", test))); + assertTrue(containsSolution(result, new BindingImpl("s", a), new BindingImpl("o", c))); + assertTrue(containsSolution(result, new BindingImpl("s", d), new BindingImpl("o", d))); + assertTrue(containsSolution(result, new BindingImpl("s", d), new BindingImpl("o", e))); + assertTrue(containsSolution(result, new BindingImpl("s", d), new BindingImpl("o", test))); + + assertFalse(containsSolution(result, new BindingImpl("s", a), new BindingImpl("o", b))); + + } + catch (QueryEvaluationException e) { + e.printStackTrace(); + fail(e.getMessage()); + } + + } + + private boolean containsSolution(List result, Binding... solution) { + final MapBindingSet bs = new MapBindingSet(); + for (Binding b : solution) { + bs.addBinding(b); + } + return result.contains(bs); + } + /* private / protected methods */ private int countCharOccurrences(String string, char ch) { diff --git a/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/SPARQLDataSet.java b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/SPARQLDataSet.java new file mode 100644 index 0000000000..bf31d0debd --- /dev/null +++ b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/SPARQLDataSet.java @@ -0,0 +1,59 @@ +/* + * Licensed to Aduna under one or more contributor license agreements. + * See the NOTICE.txt file distributed with this work for additional + * information regarding copyright ownership. + * + * Aduna licenses this file to you under the terms of the Aduna BSD + * License (the "License"); you may not use this file except in compliance + * with the License. See the LICENSE.txt file distributed with this work + * for the full License. + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + * implied. See the License for the specific language governing permissions + * and limitations under the License. + */ +package org.openrdf.query.parser.sparql; + +import java.util.HashMap; +import java.util.Set; + +public class SPARQLDataSet { + + private HashMap namedGraphs = new HashMap(); + + private String defaultGraph; + + public SPARQLDataSet() { + } + + public SPARQLDataSet(String defaultGraph) { + this(); + setDefaultGraph(defaultGraph); + } + + public void setDefaultGraph(String defaultGraph) { + this.defaultGraph = defaultGraph; + } + + public String getDefaultGraph() { + return defaultGraph; + } + + public void addNamedGraph(String graphName, String graphLocation) { + namedGraphs.put(graphName, graphLocation); + } + + public boolean hasNamedGraphs() { + return (!namedGraphs.isEmpty()); + } + + public Set getGraphNames() { + return namedGraphs.keySet(); + } + + public String getGraphLocation(String graphName) { + return namedGraphs.get(graphName); + } +} diff --git a/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/SPARQLUpdateTest.java b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/SPARQLUpdateTest.java index 2ff419f4fe..f4eafbce82 100644 --- a/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/SPARQLUpdateTest.java +++ b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/SPARQLUpdateTest.java @@ -14,19 +14,31 @@ * implied. See the License for the specific language governing permissions * and limitations under the License. */ -/* - * Pulled in to extend TestCase. - */ package org.openrdf.query.parser.sparql; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + import java.io.IOException; import java.io.InputStream; -import junit.framework.TestCase; - import org.junit.After; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.bigdata.journal.ITx; +import com.bigdata.rdf.internal.XSD; +import com.bigdata.rdf.sail.BigdataSailRepository; +import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; + +import junit.framework.TestCase; + import org.openrdf.model.BNode; import org.openrdf.model.Literal; import org.openrdf.model.Resource; @@ -50,10 +62,6 @@ import org.openrdf.repository.RepositoryResult; import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFParseException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.bigdata.rdf.store.BD; /** * Tests for SPARQL 1.1 Update functionality. @@ -512,82 +520,63 @@ public void testDeleteInsertWhereLoopingBehavior() assertFalse(con.hasStatement(bob, age, inCorrectAgeValue, true)); } - /** - * This test fails for two reasons. - * - * (1) It appears that openrdf does not imply a commit() when execute() is - * invoked on a prepared SPARQL UPDATE. However, bigdata does a commit() as - * the last action for the SPARQL UPDATE. - * - * (2) It relies on different transaction semantics. The snapshot isolation - * semantics of bigdata read-only connections mean that con2 will never see - * the mutation from con. - * - * Since we can not "fix" (2) (it is not an error - we have better - * transaction semantics), I am going to comment out this test. - * - * It is an open question whether we want to fix (1). I prefer our - * interpretation that SPARQL UPDATE execute() implies a commit(). - * - * Bryan 11/11/2014 - */ -// @Test -// public void testAutoCommitHandling() -// throws Exception -// { -// logger.debug("executing test testAutoCommitHandling"); -// -// StringBuilder update = new StringBuilder(); -// update.append(getNamespaceDeclarations()); -// update.append("DELETE { ?x foaf:name ?y } INSERT {?x rdfs:label ?y . } WHERE {?x foaf:name ?y }"); -// -// try { -// con.begin(); -// Update operation = con.prepareUpdate(QueryLanguage.SPARQL, update.toString()); -// -// assertFalse(con.hasStatement(bob, RDFS.LABEL, f.createLiteral("Bob"), true)); -// assertFalse(con.hasStatement(alice, RDFS.LABEL, f.createLiteral("Alice"), true)); -// -// operation.execute(); -// -// // update should be visible to own connection. -// assertTrue(con.hasStatement(bob, RDFS.LABEL, f.createLiteral("Bob"), true)); -// assertTrue(con.hasStatement(alice, RDFS.LABEL, f.createLiteral("Alice"), true)); -// -// assertFalse(con.hasStatement(bob, FOAF.NAME, f.createLiteral("Bob"), true)); -// assertFalse(con.hasStatement(alice, FOAF.NAME, f.createLiteral("Alice"), true)); -// -// RepositoryConnection con2 = rep.getConnection(); -// try { -// // update should not yet be visible to separate connection -// assertFalse(con2.hasStatement(bob, RDFS.LABEL, f.createLiteral("Bob"), true)); -// assertFalse(con2.hasStatement(alice, RDFS.LABEL, f.createLiteral("Alice"), true)); -// -// assertTrue(con2.hasStatement(bob, FOAF.NAME, f.createLiteral("Bob"), true)); -// assertTrue(con2.hasStatement(alice, FOAF.NAME, f.createLiteral("Alice"), true)); -// -// con.commit(); -// -// // after commit, update should be visible to separate connection. -// assertTrue(con2.hasStatement(bob, RDFS.LABEL, f.createLiteral("Bob"), true)); -// assertTrue(con2.hasStatement(alice, RDFS.LABEL, f.createLiteral("Alice"), true)); -// -// assertFalse(con2.hasStatement(bob, FOAF.NAME, f.createLiteral("Bob"), true)); -// assertFalse(con2.hasStatement(alice, FOAF.NAME, f.createLiteral("Alice"), true)); -// } -// finally { -// con2.close(); -// } -// } -// catch (Exception e) { -// if (con.isActive()) { -// con.rollback(); -// } -// } -// finally { -// con.close(); -// } -// } + @Test + public void testAutoCommitHandling() + throws Exception + { + logger.debug("executing test testAutoCommitHandling"); + + StringBuilder update = new StringBuilder(); + update.append(getNamespaceDeclarations()); + update.append("DELETE { ?x foaf:name ?y } INSERT {?x rdfs:label ?y . } WHERE {?x foaf:name ?y }"); + + try { + con.begin(); + Update operation = con.prepareUpdate(QueryLanguage.SPARQL, update.toString()); + + assertFalse(con.hasStatement(bob, RDFS.LABEL, f.createLiteral("Bob"), true)); + assertFalse(con.hasStatement(alice, RDFS.LABEL, f.createLiteral("Alice"), true)); + + operation.execute(); + + // update should be visible to own connection. + assertTrue(con.hasStatement(bob, RDFS.LABEL, f.createLiteral("Bob"), true)); + assertTrue(con.hasStatement(alice, RDFS.LABEL, f.createLiteral("Alice"), true)); + + assertFalse(con.hasStatement(bob, FOAF.NAME, f.createLiteral("Bob"), true)); + assertFalse(con.hasStatement(alice, FOAF.NAME, f.createLiteral("Alice"), true)); + + RepositoryConnection con2 = rep.getConnection(); + try { + // update should not yet be visible to separate connection + assertFalse(con2.hasStatement(bob, RDFS.LABEL, f.createLiteral("Bob"), true)); + assertFalse(con2.hasStatement(alice, RDFS.LABEL, f.createLiteral("Alice"), true)); + + assertTrue(con2.hasStatement(bob, FOAF.NAME, f.createLiteral("Bob"), true)); + assertTrue(con2.hasStatement(alice, FOAF.NAME, f.createLiteral("Alice"), true)); + + con.commit(); + + // after commit, update should be visible to separate connection. + assertTrue(con2.hasStatement(bob, RDFS.LABEL, f.createLiteral("Bob"), true)); + assertTrue(con2.hasStatement(alice, RDFS.LABEL, f.createLiteral("Alice"), true)); + + assertFalse(con2.hasStatement(bob, FOAF.NAME, f.createLiteral("Bob"), true)); + assertFalse(con2.hasStatement(alice, FOAF.NAME, f.createLiteral("Alice"), true)); + } + finally { + con2.close(); + } + } + catch (Exception e) { + if (con.isActive()) { + con.rollback(); + } + } + finally { + con.close(); + } + } @Test public void testConsecutiveUpdatesInSameTransaction() @@ -697,6 +686,35 @@ public void testInsertWhereUsing() assertFalse(message, con.hasStatement(bob, RDFS.LABEL, f.createLiteral("Bob"), true, graph2)); assertFalse(message, con.hasStatement(alice, RDFS.LABEL, f.createLiteral("Alice"), true)); } + + @Test + @Ignore("Fails due to WITH graph is used to evaluate WHERE clause instead of USING graph") + // It is a new test added during update to Sesame 2.8, but fail is not introduced with the Sesame update, + // it was rather not supported functionality already. Should be fixed as a separate commit. + public void _testInsertWhereUsingWith() + throws Exception + { + + logger.debug("executing testInsertWhereUsingWith"); + StringBuilder update = new StringBuilder(); + update.append(getNamespaceDeclarations()); + update.append("WITH ex:graph2 INSERT {?x rdfs:label ?y . } USING ex:graph1 WHERE {?x foaf:name ?y }"); + + Update operation = con.prepareUpdate(QueryLanguage.SPARQL, update.toString()); + + operation.execute(); + con.commit(); + + System.out.println( + ((BigdataSailRepositoryConnection)con).getTripleStore().dumpStore() + ); +// .getSail().getIndexManager()) +// .getJournal(ITx.UNISOLATED).get..getIn.dump(); + String message = "label should have been inserted in graph2, for ex:bob only"; + assertTrue(message, con.hasStatement(bob, RDFS.LABEL, f.createLiteral("Bob", XSD.STRING), true, graph2)); + assertFalse(message, con.hasStatement(bob, RDFS.LABEL, f.createLiteral("Bob"), true, graph1)); + assertFalse(message, con.hasStatement(alice, RDFS.LABEL, f.createLiteral("Alice"), true)); + } @Test public void testInsertWhereWith() @@ -833,15 +851,112 @@ public void testInsertData2() URI book1 = f.createURI(EX_NS, "book1"); - assertFalse(con.hasStatement(book1, DC.TITLE, f.createLiteral("the number four", XMLSchema.INTEGER), true)); + assertFalse(con.hasStatement(book1, DC.TITLE, f.createLiteral("the number four", XMLSchema.INTEGER), + true)); operation.execute(); String msg = "new statement about ex:book1 should have been inserted"; - assertTrue(msg, con.hasStatement(book1, DC.TITLE, f.createLiteral("the number four", XMLSchema.INTEGER), true)); + assertTrue(msg, + con.hasStatement(book1, DC.TITLE, f.createLiteral("the number four", XMLSchema.INTEGER), true)); } - + + @Test + public void testInsertDataLangTaggedLiteral() + throws Exception + { + logger.debug("executing testInsertDataLangTaggedLiteral"); + + StringBuilder update = new StringBuilder(); + update.append(getNamespaceDeclarations()); + update.append("INSERT DATA { ex:book1 dc:title \"book 1\"@en . } "); + + Update operation = con.prepareUpdate(QueryLanguage.SPARQL, update.toString()); + + URI book1 = f.createURI(EX_NS, "book1"); + + assertFalse(con.hasStatement(book1, DC.TITLE, f.createLiteral("book 1", "en"), true)); + + operation.execute(); + + String msg = "new statement about ex:book1 should have been inserted"; + assertTrue(msg, con.hasStatement(book1, DC.TITLE, f.createLiteral("book 1", "en"), true)); + } + + @Test + public void testInsertDataGraph1() + throws Exception + { + logger.debug("executing testInsertDataGraph1"); + + StringBuilder update = new StringBuilder(); + update.append("INSERT DATA { \n"); + update.append("GRAPH { . } \n"); + update.append(" a . \n"); + update.append("}"); + + Update operation = con.prepareUpdate(QueryLanguage.SPARQL, update.toString()); + assertFalse(con.hasStatement(f.createURI("urn:s1"), RDF.TYPE, null, true, (Resource)null)); + assertFalse(con.hasStatement(f.createURI("urn:s1"), f.createURI("urn:p1"), f.createURI("urn:o1"), true, + f.createURI("urn:g1"))); + operation.execute(); + assertTrue(con.hasStatement(f.createURI("urn:s1"), RDF.TYPE, null, true, (Resource)null)); + assertTrue(con.hasStatement(f.createURI("urn:s1"), f.createURI("urn:p1"), f.createURI("urn:o1"), true, + f.createURI("urn:g1"))); + } + + @Test + public void testInsertDataGraph2() + throws Exception + { + logger.debug("executing testInsertDataGraph2"); + + StringBuilder update = new StringBuilder(); + update.append("INSERT DATA { \n"); + update.append(" a . \n"); + update.append("GRAPH { . } \n"); + update.append("}"); + + Update operation = con.prepareUpdate(QueryLanguage.SPARQL, update.toString()); + assertFalse(con.hasStatement(f.createURI("urn:s1"), RDF.TYPE, null, true, (Resource)null)); + assertFalse(con.hasStatement(f.createURI("urn:s1"), f.createURI("urn:p1"), f.createURI("urn:o1"), true, + f.createURI("urn:g1"))); + operation.execute(); + assertTrue(con.hasStatement(f.createURI("urn:s1"), RDF.TYPE, null, true, (Resource)null)); + assertTrue(con.hasStatement(f.createURI("urn:s1"), f.createURI("urn:p1"), f.createURI("urn:o1"), true, + f.createURI("urn:g1"))); + } + + @Test + public void testInsertDataGraph3() + throws Exception + { + logger.debug("executing testInsertDataGraph3"); + + StringBuilder update = new StringBuilder(); + update.append("INSERT DATA { \n"); + update.append(" a . \n"); + update.append("GRAPH { . } \n"); + update.append(" a \n"); + update.append("}"); + + Update operation = con.prepareUpdate(QueryLanguage.SPARQL, update.toString()); + assertFalse(con.hasStatement(f.createURI("urn:s1"), RDF.TYPE, null, true, (Resource)null)); + assertFalse(con.hasStatement(f.createURI("urn:s1"), f.createURI("urn:p1"), f.createURI("urn:o1"), true, + f.createURI("urn:g1"))); + + assertFalse(con.hasStatement(f.createURI("urn:s2"), f.createURI("urn:p2"), f.createURI("urn:o2"), true, + f.createURI("urn:g1"))); + operation.execute(); + assertTrue(con.hasStatement(f.createURI("urn:s1"), RDF.TYPE, null, true, (Resource)null)); + assertTrue(con.hasStatement(f.createURI("urn:s2"), RDF.TYPE, null, true, (Resource)null)); + assertTrue(con.hasStatement(f.createURI("urn:s1"), f.createURI("urn:p1"), f.createURI("urn:o1"), true, + f.createURI("urn:g1"))); + assertTrue(con.hasStatement(f.createURI("urn:s2"), f.createURI("urn:p2"), f.createURI("urn:o2"), true, + f.createURI("urn:g1"))); + } + @Test public void testInsertDataBlankNode() throws Exception @@ -986,6 +1101,28 @@ public void testDeleteData() assertFalse(msg, con.hasStatement(alice, FOAF.KNOWS, bob, true)); } + @Test + public void testDeleteDataUnicode() + throws Exception + { + URI i18n = con.getValueFactory().createURI(EX_NS, "東京"); + + con.add(i18n, FOAF.KNOWS, bob); + + logger.debug("executing testDeleteData"); + StringBuilder update = new StringBuilder(); + update.append(getNamespaceDeclarations()); + update.append("DELETE DATA { ex:東京 foaf:knows ex:bob. } "); + + Update operation = con.prepareUpdate(QueryLanguage.SPARQL, update.toString()); + + assertTrue(con.hasStatement(i18n, FOAF.KNOWS, bob, true)); + operation.execute(); + + String msg = "statement should have been deleted."; + assertFalse(msg, con.hasStatement(i18n, FOAF.KNOWS, bob, true)); + } + @Test public void testDeleteDataMultiplePatterns() throws Exception @@ -1088,7 +1225,9 @@ public void testCreateExistingGraph() } catch (UpdateExecutionException e) { // expected behavior - con.rollback(); + if (con.isActive()) { + con.rollback(); + } } } @@ -1597,7 +1736,6 @@ public void testUpdateSequenceInsertDeleteExample9() // replace the standard dataset with one specific to this case. con.clear(); - con.commit(); loadDataset("/testdata-update/dataset-update-example9.trig"); URI book1 = f.createURI("http://example/book1"); @@ -1701,7 +1839,6 @@ protected String getNamespaceDeclarations() { declarations.append("PREFIX foaf: <" + FOAF.NAMESPACE + "> \n"); declarations.append("PREFIX ex: <" + EX_NS + "> \n"); declarations.append("PREFIX xsd: <" + XMLSchema.NAMESPACE + "> \n"); - declarations.append("PREFIX bd: <" + BD.NAMESPACE + "> \n"); declarations.append("\n"); return declarations.toString(); diff --git a/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/ManifestTest.java b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/ManifestTest.java new file mode 100644 index 0000000000..e003b83ef9 --- /dev/null +++ b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/ManifestTest.java @@ -0,0 +1,194 @@ +/* + * Licensed to Aduna under one or more contributor license agreements. + * See the NOTICE.txt file distributed with this work for additional + * information regarding copyright ownership. + * + * Aduna licenses this file to you under the terms of the Aduna BSD + * License (the "License"); you may not use this file except in compliance + * with the License. See the LICENSE.txt file distributed with this work + * for the full License. + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + * implied. See the License for the specific language governing permissions + * and limitations under the License. + */ +package org.openrdf.query.parser.sparql.manifest; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.net.JarURLConnection; +import java.net.URL; +import java.util.jar.JarFile; + +import junit.framework.TestResult; +import junit.framework.TestSuite; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import info.aduna.io.ZipUtil; +import info.aduna.io.FileUtil; + +import org.openrdf.OpenRDFUtil; +import org.openrdf.model.Resource; +import org.openrdf.model.ValueFactory; +import org.openrdf.query.BindingSet; +import org.openrdf.query.QueryLanguage; +import org.openrdf.query.TupleQueryResult; +import org.openrdf.repository.Repository; +import org.openrdf.repository.RepositoryConnection; +import org.openrdf.repository.RepositoryException; +import org.openrdf.repository.sail.SailRepository; +import org.openrdf.repository.util.RDFInserter; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFParseException; +import org.openrdf.rio.RDFParser; +import org.openrdf.rio.turtle.TurtleParser; +import org.openrdf.sail.memory.MemoryStore; + +public class ManifestTest { + + static final Logger logger = LoggerFactory.getLogger(ManifestTest.class); + + private static final boolean REMOTE = false; + + public static TestSuite suite(SPARQLQueryTest.Factory factory) + throws Exception + { + final String manifestFile; + final File tmpDir; + + if (REMOTE) { + manifestFile = "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/manifest-evaluation.ttl"; + tmpDir = null; + } + else { + URL url = ManifestTest.class.getResource("/testcases-dawg/data-r2/manifest-evaluation.ttl"); + + if ("jar".equals(url.getProtocol())) { + // Extract manifest files to a temporary directory + try { + tmpDir = FileUtil.createTempDir("sparql-evaluation"); + + JarURLConnection con = (JarURLConnection)url.openConnection(); + JarFile jar = con.getJarFile(); + + ZipUtil.extract(jar, tmpDir); + + File localFile = new File(tmpDir, con.getEntryName()); + manifestFile = localFile.toURI().toURL().toString(); + } + catch (IOException e) { + throw new AssertionError(e); + } + } + else { + manifestFile = url.toString(); + tmpDir = null; + } + } + + TestSuite suite = new TestSuite(factory.getClass().getName()) { + + @Override + public void run(TestResult result) { + try { + super.run(result); + } + finally { + if (tmpDir != null) { + try { + FileUtil.deleteDir(tmpDir); + } + catch (IOException e) { + System.err.println("Unable to clean up temporary directory '" + tmpDir + "': " + + e.getMessage()); + } + } + } + } + }; + + Repository manifestRep = new SailRepository(new MemoryStore()); + manifestRep.initialize(); + RepositoryConnection con = manifestRep.getConnection(); + + addTurtle(con, new URL(manifestFile), manifestFile); + + String query = "SELECT DISTINCT manifestFile FROM {x} rdf:first {manifestFile} " + + "USING NAMESPACE mf = , " + + " qt = "; + + TupleQueryResult manifestResults = con.prepareTupleQuery(QueryLanguage.SERQL, query, manifestFile).evaluate(); + + while (manifestResults.hasNext()) { + BindingSet bindingSet = manifestResults.next(); + String subManifestFile = bindingSet.getValue("manifestFile").stringValue(); + suite.addTest(SPARQLQueryTest.suite(subManifestFile, factory)); + } + + manifestResults.close(); + con.close(); + manifestRep.shutDown(); + + logger.info("Created aggregated test suite with " + suite.countTestCases() + " test cases."); + return suite; + } + + static void addTurtle(RepositoryConnection con, URL url, String baseURI, Resource... contexts) + throws IOException, RepositoryException, RDFParseException, RDFHandlerException + { + if (baseURI == null) { + baseURI = url.toExternalForm(); + } + + InputStream in = url.openStream(); + + try { + OpenRDFUtil.verifyContextNotNull(contexts); + final ValueFactory vf = con.getRepository().getValueFactory(); + RDFParser rdfParser = new TurtleParser(); + rdfParser.setValueFactory(vf); + + rdfParser.setVerifyData(false); + rdfParser.setStopAtFirstError(true); + rdfParser.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE); + + RDFInserter rdfInserter = new RDFInserter(con); + rdfInserter.enforceContext(contexts); + rdfParser.setRDFHandler(rdfInserter); + + con.begin(); + + try { + rdfParser.parse(in, baseURI); + con.commit(); + } + catch (RDFHandlerException e) { + if (con.isActive()) { + con.rollback(); + } + if (e.getCause() != null && e.getCause() instanceof RepositoryException) { + // RDFInserter only throws wrapped RepositoryExceptions + throw (RepositoryException)e.getCause(); + } + else { + throw e; + } + + } + catch (RuntimeException e) { + if (con.isActive()) { + con.rollback(); + } + throw e; + } + } + finally { + in.close(); + } + } +} diff --git a/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQL11SyntaxTest.java b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQL11SyntaxTest.java new file mode 100644 index 0000000000..2ed485bc91 --- /dev/null +++ b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQL11SyntaxTest.java @@ -0,0 +1,325 @@ +/* + * Licensed to Aduna under one or more contributor license agreements. + * See the NOTICE.txt file distributed with this work for additional + * information regarding copyright ownership. + * + * Aduna licenses this file to you under the terms of the Aduna BSD + * License (the "License"); you may not use this file except in compliance + * with the License. See the LICENSE.txt file distributed with this work + * for the full License. + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + * implied. See the License for the specific language governing permissions + * and limitations under the License. + */ +package org.openrdf.query.parser.sparql.manifest; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.JarURLConnection; +import java.net.URL; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.List; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; + +import junit.framework.Test; +import junit.framework.TestCase; +import junit.framework.TestResult; +import junit.framework.TestSuite; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import info.aduna.io.FileUtil; +import info.aduna.io.IOUtil; + +import org.openrdf.query.BindingSet; +import org.openrdf.query.MalformedQueryException; +import org.openrdf.query.QueryLanguage; +import org.openrdf.query.TupleQueryResult; +import org.openrdf.query.algebra.DeleteData; +import org.openrdf.query.algebra.InsertData; +import org.openrdf.query.algebra.UpdateExpr; +import org.openrdf.query.parser.ParsedOperation; +import org.openrdf.query.parser.ParsedUpdate; +import org.openrdf.repository.Repository; +import org.openrdf.repository.RepositoryConnection; +import org.openrdf.repository.sail.SailRepository; +import org.openrdf.repository.sail.helpers.SailUpdateExecutor; +import org.openrdf.rio.RDFParseException; +import org.openrdf.sail.NotifyingSailConnection; +import org.openrdf.sail.SailException; +import org.openrdf.sail.memory.MemoryStore; + +/** + * A SPARQL 1.1 syntax test, created by reading in a W3C working-group style + * manifest. + * + * @author Jeen Broekstra + */ +public abstract class SPARQL11SyntaxTest extends TestCase { + + /*-----------* + * Constants * + *-----------*/ + + private static final Logger logger = LoggerFactory.getLogger(SPARQL11SyntaxTest.class); + + private static final String SUBMANIFEST_QUERY, TESTCASE_QUERY; + + static { + StringBuilder sb = new StringBuilder(512); + + sb.append("PREFIX mf: "); + sb.append("PREFIX qt: "); + sb.append("SELECT ?subManifest "); + sb.append("WHERE { [] mf:include [ rdf:rest*/rdf:first ?subManifest ] . } "); + SUBMANIFEST_QUERY = sb.toString(); + + sb.setLength(0); + sb.append("PREFIX mf: "); + sb.append("PREFIX qt: "); + sb.append("PREFIX dawgt: "); + sb.append("SELECT ?TestURI ?Name ?Action ?Type "); + sb.append("WHERE { [] rdf:first ?TestURI. "); + sb.append(" ?TestURI a ?Type ; "); + sb.append(" mf:name ?Name ;"); + sb.append(" mf:action ?Action ;"); + sb.append(" dawgt:approval dawgt:Approved . "); + sb.append(" FILTER(?Type IN (mf:PositiveSyntaxTest11, mf:NegativeSyntaxTest11, mf:PositiveUpdateSyntaxTest11, mf:NegativeUpdateSyntaxTest11)) "); + sb.append(" } "); + TESTCASE_QUERY = sb.toString(); + } + + /*-----------* + * Variables * + *-----------*/ + + protected final String testURI; + + protected final String queryFileURL; + + protected final boolean positiveTest; + + /*--------------* + * Constructors * + *--------------*/ + + public SPARQL11SyntaxTest(String testURI, String name, String queryFileURL, boolean positiveTest) { + super(name); + this.testURI = testURI; + this.queryFileURL = queryFileURL; + this.positiveTest = positiveTest; + } + + /*---------* + * Methods * + *---------*/ + + @Override + protected void runTest() + throws Exception + { + InputStream stream = new URL(queryFileURL).openStream(); + String query = IOUtil.readString(new InputStreamReader(stream, "UTF-8")); + stream.close(); + + try { + ParsedOperation operation = parseOperation(query, queryFileURL); + + if (!positiveTest) { + boolean dataBlockUpdate = false; + if (operation instanceof ParsedUpdate) { + for (UpdateExpr updateExpr : ((ParsedUpdate)operation).getUpdateExprs()) { + if (updateExpr instanceof InsertData || updateExpr instanceof DeleteData) { + // parsing for these operation happens during actual + // execution, so try and execute. + dataBlockUpdate = true; + + MemoryStore store = new MemoryStore(); + store.initialize(); + NotifyingSailConnection conn = store.getConnection(); + try { + conn.begin(); + SailUpdateExecutor exec = new SailUpdateExecutor(conn, store.getValueFactory(), null); + exec.executeUpdate(updateExpr, null, null, true, -1); + conn.rollback(); + fail("Negative test case should have failed to parse"); + } + catch (SailException e) { + if (!(e.getCause() instanceof RDFParseException)) { + logger.error("unexpected error in negative test case", e); + fail("unexpected error in negative test case"); + } + // fall through - a parse exception is expected for a + // negative test case + conn.rollback(); + } + finally { + conn.close(); + } + } + } + } + if (!dataBlockUpdate) { + fail("Negative test case should have failed to parse"); + } + } + } + catch (MalformedQueryException e) { + if (positiveTest) { + e.printStackTrace(); + fail("Positive test case failed: " + e.getMessage()); + } + } + } + + protected abstract ParsedOperation parseOperation(String operation, String fileURL) + throws MalformedQueryException; + + public static Test suite() + throws Exception + { + return new TestSuite(); + } + + public interface Factory { + + SPARQL11SyntaxTest createSPARQLSyntaxTest(String testURI, String testName, String testAction, + boolean positiveTest); + } + + public static Test suite(Factory factory, boolean useRemote) + throws Exception + { + // manifest of W3C Data Access Working Group SPARQL syntax tests + final File tmpDir; + String host; + if (useRemote) { + host = "http://www.w3.org/2009/sparql/docs/tests/data-sparql11/"; + tmpDir = null; + } + else { + URL url = SPARQL11SyntaxTest.class.getResource("/testcases-sparql-1.1-w3c/"); + if ("jar".equals(url.getProtocol())) { + try { + tmpDir = FileUtil.createTempDir("sparql-syntax"); + JarURLConnection con = (JarURLConnection)url.openConnection(); + JarFile jar = con.getJarFile(); + Enumeration entries = jar.entries(); + while (entries.hasMoreElements()) { + JarEntry file = entries.nextElement(); + File f = new File(tmpDir + File.separator + file.getName()); + if (file.isDirectory()) { + f.mkdir(); + continue; + } + InputStream is = jar.getInputStream(file); + FileOutputStream fos = new FileOutputStream(f); + while (is.available() > 0) { + fos.write(is.read()); + } + fos.close(); + is.close(); + } + File localFile = new File(tmpDir, con.getEntryName()); + host = localFile.toURI().toURL().toString(); + } + catch (IOException e) { + throw new AssertionError(e); + } + } + else { + host = url.toString(); + tmpDir = null; + } + } + + String manifestFile = host + "manifest-all.ttl"; + + TestSuite suite = new TestSuite() { + + @Override + public void run(TestResult result) { + try { + super.run(result); + } + finally { + if (tmpDir != null) { + try { + FileUtil.deleteDir(tmpDir); + } + catch (IOException e) { + System.err.println("Unable to clean up temporary directory '" + tmpDir + "': " + + e.getMessage()); + } + } + } + } + }; + + // Read manifest and create declared test cases + Repository manifestRep = new SailRepository(new MemoryStore()); + manifestRep.initialize(); + + RepositoryConnection con = manifestRep.getConnection(); + + logger.debug("Loading manifest data"); + URL manifest = new URL(manifestFile); + ManifestTest.addTurtle(con, manifest, manifestFile); + + logger.info("Searching for sub-manifests"); + List subManifestList = new ArrayList(); + + TupleQueryResult subManifests = con.prepareTupleQuery(QueryLanguage.SPARQL, SUBMANIFEST_QUERY).evaluate(); + while (subManifests.hasNext()) { + BindingSet bindings = subManifests.next(); + subManifestList.add(bindings.getValue("subManifest").toString()); + } + subManifests.close(); + + logger.info("Found {} sub-manifests", subManifestList.size()); + + for (String subManifest : subManifestList) { + logger.info("Loading sub manifest {}", subManifest); + con.clear(); + + URL subManifestURL = new URL(subManifest); + ManifestTest.addTurtle(con, subManifestURL, subManifest); + + TestSuite subSuite = new TestSuite(subManifest.substring(host.length())); + + logger.info("Creating test cases for {}", subManifest); + TupleQueryResult tests = con.prepareTupleQuery(QueryLanguage.SPARQL, TESTCASE_QUERY).evaluate(); + while (tests.hasNext()) { + BindingSet bindingSet = tests.next(); + + String testURI = bindingSet.getValue("TestURI").toString(); + String testName = bindingSet.getValue("Name").toString(); + String testAction = bindingSet.getValue("Action").toString(); + + String type = bindingSet.getValue("Type").toString(); + boolean positiveTest = type.equals("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#PositiveSyntaxTest11") + || type.equals("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#PositiveUpdateSyntaxTest11"); + + subSuite.addTest(factory.createSPARQLSyntaxTest(testURI, testName, testAction, positiveTest)); + } + tests.close(); + + suite.addTest(subSuite); + } + + con.close(); + manifestRep.shutDown(); + + logger.info("Added {} tests to suite ", suite.countTestCases()); + return suite; + } +} diff --git a/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQLQueryTest.java b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQLQueryTest.java index fc43bff7e9..ae54f59826 100644 --- a/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQLQueryTest.java +++ b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQLQueryTest.java @@ -14,20 +14,15 @@ * implied. See the License for the specific language governing permissions * and limitations under the License. */ -/* - * Pulled in to extend TestCase. - */ package org.openrdf.query.parser.sparql.manifest; -import info.aduna.io.IOUtil; -import info.aduna.iteration.Iterations; -import info.aduna.text.StringUtil; - import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; @@ -36,11 +31,18 @@ import junit.framework.TestCase; import junit.framework.TestSuite; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import info.aduna.io.IOUtil; +import info.aduna.iteration.Iterations; +import info.aduna.text.StringUtil; + import org.openrdf.model.Resource; import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.model.Value; -import org.openrdf.model.util.ModelUtil; +import org.openrdf.model.util.Models; import org.openrdf.query.BindingSet; import org.openrdf.query.BooleanQuery; import org.openrdf.query.Dataset; @@ -74,8 +76,6 @@ import org.openrdf.rio.helpers.BasicParserSettings; import org.openrdf.rio.helpers.StatementCollector; import org.openrdf.sail.memory.MemoryStore; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * A SPARQL query test suite, created by reading in a W3C working-group style @@ -108,6 +108,7 @@ public abstract class SPARQLQueryTest extends TestCase { protected final boolean checkOrder; + protected final String[] ignoredTests; /*-----------* * Variables * *-----------*/ @@ -119,13 +120,13 @@ public abstract class SPARQLQueryTest extends TestCase { *--------------*/ public SPARQLQueryTest(String testURI, String name, String queryFileURL, String resultFileURL, - Dataset dataSet, boolean laxCardinality) + Dataset dataSet, boolean laxCardinality, String... ignoredTests) { this(testURI, name, queryFileURL, resultFileURL, dataSet, laxCardinality, false); } public SPARQLQueryTest(String testURI, String name, String queryFileURL, String resultFileURL, - Dataset dataSet, boolean laxCardinality, boolean checkOrder) + Dataset dataSet, boolean laxCardinality, boolean checkOrder, String... ignoredTests) { super(name.replaceAll("\\(", " ").replaceAll("\\)", " ")); @@ -135,6 +136,7 @@ public SPARQLQueryTest(String testURI, String name, String queryFileURL, String this.dataset = dataSet; this.laxCardinality = laxCardinality; this.checkOrder = checkOrder; + this.ignoredTests = ignoredTests; } /*---------* @@ -197,6 +199,14 @@ protected void tearDown() protected void runTest() throws Exception { + // FIXME this reports a test error because we still rely on JUnit 3 here. + //org.junit.Assume.assumeFalse(Arrays.asList(ignoredTests).contains(this.getName())); + // FIXME temporary fix is to report as succeeded and just ignore. + if (Arrays.asList(ignoredTests).contains(this.getName())) { + logger.warn("Query test ignored: " + this.getName()); + return; + } + RepositoryConnection con = dataRep.getConnection(); // Some SPARQL Tests have non-XSD datatypes that must pass for the test // suite to complete successfully @@ -323,8 +333,7 @@ protected void compareTupleQueryResults(TupleQueryResult queryResult, TupleQuery message.append("Missing bindings: \n"); for (BindingSet bs : missingBindings) { - message.append(bs); - message.append("\n"); + printBindingSet(bs, message); } message.append("============="); @@ -335,8 +344,7 @@ protected void compareTupleQueryResults(TupleQueryResult queryResult, TupleQuery if (!unexpectedBindings.isEmpty()) { message.append("Unexpected bindings: \n"); for (BindingSet bs : unexpectedBindings) { - message.append(bs); - message.append("\n"); + printBindingSet(bs, message); } message.append("============="); @@ -349,14 +357,12 @@ protected void compareTupleQueryResults(TupleQueryResult queryResult, TupleQuery message.append(" =======================\n"); message.append("query result: \n"); for (BindingSet bs : queryBindings) { - message.append(bs); - message.append("\n"); + printBindingSet(bs, message); } message.append(" =======================\n"); message.append("expected result: \n"); for (BindingSet bs : expectedBindings) { - message.append(bs); - message.append("\n"); + printBindingSet(bs, message); } message.append(" =======================\n"); @@ -367,14 +373,12 @@ else if (missingBindings.isEmpty() && unexpectedBindings.isEmpty()) { message.append(" =======================\n"); message.append("query result: \n"); for (BindingSet bs : queryBindings) { - message.append(bs); - message.append("\n"); + printBindingSet(bs, message); } message.append(" =======================\n"); message.append("expected result: \n"); for (BindingSet bs : expectedBindings) { - message.append(bs); - message.append("\n"); + printBindingSet(bs, message); } message.append(" =======================\n"); @@ -407,13 +411,26 @@ else if (missingBindings.isEmpty() && unexpectedBindings.isEmpty()) { */ } + protected void printBindingSet(BindingSet bs, StringBuilder appendable) { + List names = new ArrayList(bs.getBindingNames()); + Collections.sort(names); + + for (String name : names) { + if (bs.hasBinding(name)) { + appendable.append(bs.getBinding(name)); + appendable.append(' '); + } + } + appendable.append("\n"); + } + /* * MRP: Made !final. */ protected void compareGraphs(Set queryResult, Set expectedResult) throws Exception { - if (!ModelUtil.equals(expectedResult, queryResult)) { + if (!Models.isomorphic(expectedResult, queryResult)) { // Don't use RepositoryUtil.difference, it reports incorrect diffs /* * Collection unexpectedStatements = @@ -463,9 +480,9 @@ protected void compareGraphs(Set queryResult, Set expected } } - /* - * MRP: Made !final. - */ + /* + * MRP: Made !final. + */ protected void uploadDataset(Dataset dataset) throws Exception { @@ -667,7 +684,7 @@ public static TestSuite suite(String manifestFileURL, Factory factory, boolean a query.append(" WHERE NOT resultFile LIKE \"*.csv\" "); // skip tests involving JSON, sesame currently does not have a SPARQL/JSON // parser. -// query.append(" AND NOT resultFile LIKE \"*.srj\" "); + query.append(" AND NOT resultFile LIKE \"*.srj\" "); // skip tests involving entailment regimes query.append(" AND NOT BOUND(Regime) "); // skip test involving basic federation, these are tested separately. @@ -699,13 +716,9 @@ public static TestSuite suite(String manifestFileURL, Factory factory, boolean a BindingSet bindingSet = testCases.next(); URI testURI = (URI)bindingSet.getValue("testURI"); - - System.err.println(testURI); - - - String testName = bindingSet.getValue("testName").toString(); - String resultFile = bindingSet.getValue("resultFile").toString(); - String queryFile = bindingSet.getValue("queryFile").toString(); + String testName = bindingSet.getValue("testName").stringValue(); + String resultFile = bindingSet.getValue("resultFile").stringValue(); + String queryFile = bindingSet.getValue("queryFile").stringValue(); URI defaultGraphURI = (URI)bindingSet.getValue("defaultGraph"); Value action = bindingSet.getValue("action"); Value ordered = bindingSet.getValue("ordered"); @@ -756,13 +769,25 @@ public static TestSuite suite(String manifestFileURL, Factory factory, boolean a } */ + // Two SPARQL distinctness tests fail in RDF-1.1 if the only difference + // is in the number of results + if (!laxCardinality) { + if (testURI.stringValue().contains("distinct/manifest#distinct-2") + || testURI.stringValue().contains("distinct/manifest#distinct-9")) + { + laxCardinality = true; + } + } + + LOGGER.debug("testURI={} name={} queryFile={}", testURI.stringValue(), testName, queryFile); + // check if we should test for query result ordering boolean checkOrder = false; if (ordered != null) { checkOrder = Boolean.parseBoolean(ordered.stringValue()); } - SPARQLQueryTest test = factory.createSPARQLQueryTest(testURI.toString(), testName, queryFile, + SPARQLQueryTest test = factory.createSPARQLQueryTest(testURI.stringValue(), testName, queryFile, resultFile, dataset, laxCardinality, checkOrder); if (test != null) { suite.addTest(test); diff --git a/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQLSyntaxTest.java b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQLSyntaxTest.java new file mode 100644 index 0000000000..438331dc10 --- /dev/null +++ b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQLSyntaxTest.java @@ -0,0 +1,277 @@ +/* + * Licensed to Aduna under one or more contributor license agreements. + * See the NOTICE.txt file distributed with this work for additional + * information regarding copyright ownership. + * + * Aduna licenses this file to you under the terms of the Aduna BSD + * License (the "License"); you may not use this file except in compliance + * with the License. See the LICENSE.txt file distributed with this work + * for the full License. + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + * implied. See the License for the specific language governing permissions + * and limitations under the License. + */ +package org.openrdf.query.parser.sparql.manifest; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.JarURLConnection; +import java.net.URL; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.List; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; + +import junit.framework.Test; +import junit.framework.TestCase; +import junit.framework.TestResult; +import junit.framework.TestSuite; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import info.aduna.io.FileUtil; +import info.aduna.io.IOUtil; + +import org.openrdf.query.BindingSet; +import org.openrdf.query.MalformedQueryException; +import org.openrdf.query.QueryLanguage; +import org.openrdf.query.TupleQueryResult; +import org.openrdf.repository.Repository; +import org.openrdf.repository.RepositoryConnection; +import org.openrdf.repository.sail.SailRepository; +import org.openrdf.sail.memory.MemoryStore; + +/** + * A SPARQL syntax test, created by reading in a W3C working-group style manifest. + * + * @author Jeen Broekstra + */ +public abstract class SPARQLSyntaxTest extends TestCase { + + /*-----------* + * Constants * + *-----------*/ + + private static final Logger logger = LoggerFactory.getLogger(SPARQLSyntaxTest.class); + + private static final boolean REMOTE = false; + + private static final String SUBMANIFEST_QUERY, TESTCASE_QUERY; + + static { + StringBuilder sb = new StringBuilder(512); + + sb.append("SELECT subManifest "); + sb.append("FROM {} rdf:first {subManifest} "); + sb.append("USING NAMESPACE"); + sb.append(" mf = ,"); + sb.append(" qt = "); + SUBMANIFEST_QUERY = sb.toString(); + + sb.setLength(0); + sb.append("SELECT TestURI, Name, Action, Type "); + sb.append("FROM {TestURI} rdf:type {Type};"); + sb.append(" mf:name {Name};"); + sb.append(" mf:action {Action} "); + sb.append("WHERE Type = mf:PositiveSyntaxTest or Type = mf:NegativeSyntaxTest "); + sb.append("USING NAMESPACE"); + sb.append(" mf = ,"); + sb.append(" qt = "); + TESTCASE_QUERY = sb.toString(); + } + + /*-----------* + * Variables * + *-----------*/ + + protected final String testURI; + + protected final String queryFileURL; + + protected final boolean positiveTest; + + /*--------------* + * Constructors * + *--------------*/ + + public SPARQLSyntaxTest(String testURI, String name, String queryFileURL, boolean positiveTest) { + super(name); + this.testURI = testURI; + this.queryFileURL = queryFileURL; + this.positiveTest = positiveTest; + } + + /*---------* + * Methods * + *---------*/ + + @Override + protected void runTest() + throws Exception + { + InputStream stream = new URL(queryFileURL).openStream(); + String query = IOUtil.readString(new InputStreamReader(stream, "UTF-8")); + stream.close(); + + try { + parseQuery(query, queryFileURL); + + if (!positiveTest) { + fail("Negative test case should have failed to parse"); + } + } + catch (MalformedQueryException e) { + if (positiveTest) { + e.printStackTrace(); + fail("Positive test case failed: " + e.getMessage()); + } + } + } + + protected abstract void parseQuery(String query, String queryFileURL) + throws MalformedQueryException; + + public static Test suite() + throws Exception + { + return new TestSuite(); + } + + public interface Factory { + + SPARQLSyntaxTest createSPARQLSyntaxTest(String testURI, String testName, String testAction, + boolean positiveTest); + } + + public static Test suite(Factory factory) + throws Exception + { + // manifest of W3C Data Access Working Group SPARQL syntax tests + final File tmpDir; + String host; + if (REMOTE) { + host = "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/"; + tmpDir = null; + } + else { + URL url = SPARQLSyntaxTest.class.getResource("/testcases-dawg/data-r2/"); + if ("jar".equals(url.getProtocol())) { + try { + tmpDir = FileUtil.createTempDir("sparql-syntax"); + JarURLConnection con = (JarURLConnection)url.openConnection(); + JarFile jar = con.getJarFile(); + Enumeration entries = jar.entries(); + while (entries.hasMoreElements()) { + JarEntry file = entries.nextElement(); + File f = new File(tmpDir + File.separator + file.getName()); + if (file.isDirectory()) { + f.mkdir(); + continue; + } + InputStream is = jar.getInputStream(file); + FileOutputStream fos = new FileOutputStream(f); + while (is.available() > 0) { + fos.write(is.read()); + } + fos.close(); + is.close(); + } + File localFile = new File(tmpDir, con.getEntryName()); + host = localFile.toURI().toURL().toString(); + } + catch (IOException e) { + throw new AssertionError(e); + } + } + else { + host = url.toString(); + tmpDir = null; + } + } + + String manifestFile = host + "manifest-syntax.ttl"; + + TestSuite suite = new TestSuite() { + + @Override + public void run(TestResult result) { + try { + super.run(result); + } + finally { + if (tmpDir != null) { + try { + FileUtil.deleteDir(tmpDir); + } + catch (IOException e) { + System.err.println("Unable to clean up temporary directory '" + tmpDir + "': " + e.getMessage()); + } + } + } + } + }; + + // Read manifest and create declared test cases + Repository manifestRep = new SailRepository(new MemoryStore()); + manifestRep.initialize(); + + RepositoryConnection con = manifestRep.getConnection(); + + logger.debug("Loading manifest data"); + URL manifest = new URL(manifestFile); + ManifestTest.addTurtle(con, manifest, manifestFile); + + logger.info("Searching for sub-manifests"); + List subManifestList = new ArrayList(); + + TupleQueryResult subManifests = con.prepareTupleQuery(QueryLanguage.SERQL, SUBMANIFEST_QUERY).evaluate(); + while (subManifests.hasNext()) { + BindingSet bindings = subManifests.next(); + subManifestList.add(bindings.getValue("subManifest").toString()); + } + subManifests.close(); + + logger.info("Found {} sub-manifests", subManifestList.size()); + + for (String subManifest : subManifestList) { + logger.info("Loading sub manifest {}", subManifest); + con.clear(); + + URL subManifestURL = new URL(subManifest); + ManifestTest.addTurtle(con, subManifestURL, subManifest); + + TestSuite subSuite = new TestSuite(subManifest.substring(host.length())); + + logger.info("Creating test cases for {}", subManifest); + TupleQueryResult tests = con.prepareTupleQuery(QueryLanguage.SERQL, TESTCASE_QUERY).evaluate(); + while (tests.hasNext()) { + BindingSet bindingSet = tests.next(); + + String testURI = bindingSet.getValue("TestURI").toString(); + String testName = bindingSet.getValue("Name").toString(); + String testAction = bindingSet.getValue("Action").toString(); + boolean positiveTest = bindingSet.getValue("Type").toString().equals( + "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#PositiveSyntaxTest"); + + subSuite.addTest(factory.createSPARQLSyntaxTest(testURI, testName, testAction, positiveTest)); + } + tests.close(); + + suite.addTest(subSuite); + } + + con.close(); + manifestRep.shutDown(); + + logger.info("Added {} tests to suite ", suite.countTestCases()); + return suite; + } +} diff --git a/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQLUpdateConformanceTest.java b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQLUpdateConformanceTest.java index e2af0fe715..eb2cc8df66 100644 --- a/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQLUpdateConformanceTest.java +++ b/bigdata-sails-test/src/test/java/org/openrdf/query/parser/sparql/manifest/SPARQLUpdateConformanceTest.java @@ -22,9 +22,7 @@ import java.net.URL; import java.util.HashMap; import java.util.Map; - -import junit.framework.TestCase; -import junit.framework.TestSuite; +import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -39,7 +37,7 @@ import org.openrdf.model.URI; import org.openrdf.model.Value; import org.openrdf.model.impl.URIImpl; -import org.openrdf.model.util.ModelUtil; +import org.openrdf.model.util.Models; import org.openrdf.query.BindingSet; import org.openrdf.query.Dataset; import org.openrdf.query.MalformedQueryException; @@ -52,14 +50,16 @@ import org.openrdf.repository.Repository; import org.openrdf.repository.RepositoryConnection; import org.openrdf.repository.RepositoryException; -import org.openrdf.repository.contextaware.ContextAwareConnection; -import org.openrdf.repository.contextaware.ContextAwareRepository; import org.openrdf.repository.sail.SailRepository; import org.openrdf.rio.RDFFormat; import org.openrdf.sail.memory.MemoryStore; +import junit.framework.TestCase; +import junit.framework.TestSuite; + /** - * A SPARQL 1.1 Update test, created by reading in a W3C working-group style manifest. + * A SPARQL 1.1 Update test, created by reading in a W3C working-group style + * manifest. * * @author Jeen Broekstra */ @@ -98,7 +98,8 @@ public abstract class SPARQLUpdateConformanceTest extends TestCase { *--------------*/ public SPARQLUpdateConformanceTest(String testURI, String name, String requestFile, URI defaultGraphURI, - Map inputNamedGraphs, URI resultDefaultGraphURI, Map resultNamedGraphs) + Map inputNamedGraphs, URI resultDefaultGraphURI, + Map resultNamedGraphs) { super(name); @@ -109,21 +110,22 @@ public SPARQLUpdateConformanceTest(String testURI, String name, String requestFi this.resultDefaultGraph = resultDefaultGraphURI; this.resultNamedGraphs = resultNamedGraphs; - if (this.inputNamedGraphs.size() > 0) { - DatasetImpl ds = new DatasetImpl(); - ds.addDefaultGraph(null); - ds.addDefaultRemoveGraph(null); - ds.setDefaultInsertGraph(null); + final DatasetImpl ds = new DatasetImpl(); + // This ensures that the repository operates in 'exclusive + // mode': the default graph _only_ consists of the null-context (instead + // of the entire repository). + ds.addDefaultGraph(null); + ds.addDefaultRemoveGraph(null); + ds.setDefaultInsertGraph(null); + + if (this.inputNamedGraphs.size() > 0) { for (String ng : inputNamedGraphs.keySet()) { URI namedGraph = new URIImpl(ng); ds.addNamedGraph(namedGraph); } - this.dataset = ds; - } - else { - this.dataset = null; } + this.dataset = ds; } /*---------* @@ -207,11 +209,6 @@ protected void tearDown() dataRep.shutDown(); dataRep = null; } - - if (expectedResultRepo != null) { - expectedResultRepo.shutDown(); - expectedResultRepo = null; - } } @Override @@ -224,16 +221,13 @@ protected void runTest() String updateString = readUpdateString(); con.begin(); -// con.setReadContexts((URI)null); - + Update update = con.prepareUpdate(QueryLanguage.SPARQL, updateString, requestFileURL); - if (this.dataset != null) { - update.setDataset(this.dataset); - } + update.setDataset(dataset); update.execute(); con.commit(); - + // check default graph logger.info("checking default graph"); compareGraphs(Iterations.asList(con.getStatements(null, null, null, true, (Resource)null)), @@ -246,8 +240,8 @@ protected void runTest() Iterations.asList(erCon.getStatements(null, null, null, true, contextURI))); } } - catch(Exception e) { - if(con.isActive()) { + catch (Exception e) { + if (con.isActive()) { con.rollback(); } throw e; @@ -258,40 +252,34 @@ protected void runTest() } } - protected void compareGraphs(Iterable actual, Iterable expected) + private void compareGraphs(Iterable actual, Iterable expected) throws Exception { - if (!ModelUtil.equals(expected, actual)) { - StringBuilder message = new StringBuilder(128); - message.append("\n=========================================\n"); - message.append(getName()); - message.append("\n"); - message.append(testURI); - message.append("\n=========================================\n"); - - message.append("Expected results: \n"); - for (Statement bs : expected) { - message.append(bs); - message.append("\n"); - } - message.append("=========================================\n"); - - message.append("Bigdata results: \n"); - for (Statement bs : actual) { - message.append(bs); - message.append("\n"); - } - message.append("=========================================\n"); - - final String queryStr = readUpdateString(); - message.append("Query:\n"+queryStr); - message.append("\n=========================================\n"); - -// message.append("Data:\n"+readInputData(dataset)); -// message.append("\n=========================================\n"); - - logger.error(message.toString()); - fail(message.toString()); + if (!Models.isomorphic(expected, actual)) { + StringBuilder message = new StringBuilder(128); + message.append("\n============ "); + message.append(getName()); + message.append(" =======================\n"); + message.append("Expected result: \n"); + for (Statement st : expected) { + message.append(st.toString()); + message.append("\n"); + } + message.append("============="); + StringUtil.appendN('=', getName().length(), message); + message.append("========================\n"); + + message.append("Actual result: \n"); + for (Statement st : actual) { + message.append(st.toString()); + message.append("\n"); + } + message.append("============="); + StringUtil.appendN('=', getName().length(), message); + message.append("========================\n"); + + logger.error(message.toString()); + fail(message.toString()); } } @@ -337,10 +325,12 @@ public static TestSuite suite(String manifestFileURL, Factory factory, boolean a suite.setName(getManifestName(manifestRep, con, manifestFileURL)); - // Extract test case information from the manifest file. Note that we only + // Extract test case information from the manifest file. Note that we + // only // select those test cases that are mentioned in the list. StringBuilder query = new StringBuilder(512); - query.append(" SELECT DISTINCT testURI, testName, result, action, requestFile, defaultGraph, resultDefaultGraph "); + query.append( + " SELECT DISTINCT testURI, testName, result, action, requestFile, defaultGraph, resultDefaultGraph "); query.append(" FROM {} rdf:first {testURI} rdf:type {mf:UpdateEvaluationTest}; "); if (approvedOnly) { query.append(" dawgt:approval {dawgt:Approved}; "); @@ -416,8 +406,8 @@ public static TestSuite suite(String manifestFileURL, Factory factory, boolean a } SPARQLUpdateConformanceTest test = factory.createSPARQLUpdateConformanceTest(testURI.toString(), - testName, requestFile.toString(), defaultGraphURI, inputNamedGraphs, resultDefaultGraphURI, - resultNamedGraphs); + testName, requestFile.toString(), defaultGraphURI, inputNamedGraphs, + resultDefaultGraphURI, resultNamedGraphs); if (test != null) { suite.addTest(test); @@ -434,7 +424,7 @@ public static TestSuite suite(String manifestFileURL, Factory factory, boolean a protected static String getManifestName(Repository manifestRep, RepositoryConnection con, String manifestFileURL) - throws QueryEvaluationException, RepositoryException, MalformedQueryException + throws QueryEvaluationException, RepositoryException, MalformedQueryException { // Try to extract suite name from manifest file TupleQuery manifestNameQuery = con.prepareTupleQuery(QueryLanguage.SERQL, diff --git a/bigdata-war/pom.xml b/bigdata-war/pom.xml index 47ffa73b63..8d95ccd640 100644 --- a/bigdata-war/pom.xml +++ b/bigdata-war/pom.xml @@ -357,13 +357,17 @@ ga('send', 'pageview'); com.github.jsonld-java - jsonld-java-sesame + jsonld-java ${jsonld.version} org.openrdf.sesame * + + org.apache.httpcomponents + * + diff --git a/blazegraph-deb/pom.xml b/blazegraph-deb/pom.xml index a62299ad98..3d499925e6 100644 --- a/blazegraph-deb/pom.xml +++ b/blazegraph-deb/pom.xml @@ -561,7 +561,7 @@ ga('send', 'pageview'); com.github.jsonld-java - jsonld-java-sesame + jsonld-java ${jsonld.version} diff --git a/blazegraph-rpm/pom.xml b/blazegraph-rpm/pom.xml index d2593a802b..a6142ad970 100644 --- a/blazegraph-rpm/pom.xml +++ b/blazegraph-rpm/pom.xml @@ -563,7 +563,7 @@ ga('send', 'pageview'); com.github.jsonld-java - jsonld-java-sesame + jsonld-java ${jsonld.version} diff --git a/blazegraph-tgz/pom.xml b/blazegraph-tgz/pom.xml index 49d8f7b24a..180a7ad8b1 100644 --- a/blazegraph-tgz/pom.xml +++ b/blazegraph-tgz/pom.xml @@ -426,7 +426,7 @@ ga('send', 'pageview'); com.github.jsonld-java - jsonld-java-sesame + jsonld-java ${jsonld.version} diff --git a/blazegraph-war/pom.xml b/blazegraph-war/pom.xml index a229d7b0c9..0aadad1986 100644 --- a/blazegraph-war/pom.xml +++ b/blazegraph-war/pom.xml @@ -357,13 +357,17 @@ ga('send', 'pageview'); com.github.jsonld-java - jsonld-java-sesame + jsonld-java ${jsonld.version} org.openrdf.sesame * + + org.apache.httpcomponents + * + diff --git a/pom.xml b/pom.xml index 7ce07e0615..e763268aac 100644 --- a/pom.xml +++ b/pom.xml @@ -203,9 +203,9 @@ Copyright 2010 by TalkingTrends (Amsterdam, The Netherlands) 4.8 - 3.4.5 - 2.7.12 - 0.5.1 + 3.4.14 + 2.8.11 + 0.12.1 0.6.1 1.7.25 9.4.12.v20180830 @@ -226,7 +226,11 @@ Copyright 2010 by TalkingTrends (Amsterdam, The Netherlands) 6.5.16 1.0.7-270114 1.0.4 - 2.2.3 + + 2.6.7 2.5.0 1.3.3 2.5.0 diff --git a/rdf-properties/pom.xml b/rdf-properties/pom.xml index 804814b87c..33804ff842 100644 --- a/rdf-properties/pom.xml +++ b/rdf-properties/pom.xml @@ -93,13 +93,17 @@ ga('send', 'pageview'); com.github.jsonld-java - jsonld-java-sesame + jsonld-java ${jsonld.version} org.openrdf.sesame * + + org.apache.httpcomponents + * + diff --git a/rdf-properties/src/main/resources/META-INF/services/org.openrdf.rio.RDFParserFactory b/rdf-properties/src/main/resources/META-INF/services/org.openrdf.rio.RDFParserFactory index 381e849e53..573fd7ae78 100644 --- a/rdf-properties/src/main/resources/META-INF/services/org.openrdf.rio.RDFParserFactory +++ b/rdf-properties/src/main/resources/META-INF/services/org.openrdf.rio.RDFParserFactory @@ -11,4 +11,4 @@ org.openrdf.rio.trig.TriGParserFactory org.openrdf.rio.trix.TriXParserFactory org.openrdf.rio.turtle.TurtleParserFactory org.semarglproject.sesame.rdf.rdfa.SesameRDFaParserFactory -com.github.jsonldjava.sesame.SesameJSONLDParserFactory +org.openrdf.rio.jsonld.JSONLDParserFactory \ No newline at end of file diff --git a/rdf-properties/src/main/resources/META-INF/services/org.openrdf.rio.RDFWriterFactory b/rdf-properties/src/main/resources/META-INF/services/org.openrdf.rio.RDFWriterFactory index c7b50fcfb5..024c26bcf7 100644 --- a/rdf-properties/src/main/resources/META-INF/services/org.openrdf.rio.RDFWriterFactory +++ b/rdf-properties/src/main/resources/META-INF/services/org.openrdf.rio.RDFWriterFactory @@ -9,4 +9,4 @@ org.openrdf.rio.rdfxml.RDFXMLWriterFactory org.openrdf.rio.trig.TriGWriterFactory org.openrdf.rio.trix.TriXWriterFactory org.openrdf.rio.turtle.TurtleWriterFactory -com.github.jsonldjava.sesame.SesameJSONLDWriterFactory +org.openrdf.rio.jsonld.JSONLDWriterFactory \ No newline at end of file From e0cf93055fc1d81f4b706bfe9615a3de35d379c9 Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Fri, 21 Jun 2019 11:44:18 -0700 Subject: [PATCH 2/2] Fix writing JSON result literals Bug: T225996 Change-Id: I57d489fe6726f29c16291618d13f4d486dfc6994 --- .../bigdata/rdf/rio/json/SPARQLJSONWriterBase.java | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/SPARQLJSONWriterBase.java b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/SPARQLJSONWriterBase.java index ee3b124c0a..1abafbfd87 100644 --- a/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/SPARQLJSONWriterBase.java +++ b/bigdata-core/bigdata-rdf/src/java/com/bigdata/rdf/rio/json/SPARQLJSONWriterBase.java @@ -41,6 +41,7 @@ import org.openrdf.rio.RioSetting; import org.openrdf.rio.helpers.BasicWriterSettings; +import com.bigdata.rdf.internal.XSD; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; @@ -352,11 +353,12 @@ else if (value instanceof Literal) { // BasicWriterSettings.RDF_LANGSTRING_TO_LANG_LITERAL here if (lit.getLanguage() != null) { jg.writeObjectField("xml:lang", lit.getLanguage()); - } - // TODO: Implement support for - // BasicWriterSettings.XSD_STRING_TO_PLAIN_LITERAL here - if (lit.getDatatype() != null) { - jg.writeObjectField("datatype", lit.getDatatype().stringValue()); + } else { + // TODO: Implement support for + // BasicWriterSettings.XSD_STRING_TO_PLAIN_LITERAL here + if (lit.getDatatype() != null && !lit.getDatatype().equals(XSD.STRING)) { + jg.writeObjectField("datatype", lit.getDatatype().stringValue()); + } } jg.writeObjectField("type", "literal");