Java Code Examples for org.eclipse.rdf4j.repository.RepositoryConnection#add()
The following examples show how to use
org.eclipse.rdf4j.repository.RepositoryConnection#add() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AbstractSHACLTest.java From rdf4j with BSD 3-Clause "New" or "Revised" License | 6 votes |
protected void upload(Repository rep, Model dataGraph) { RepositoryConnection con = rep.getConnection(); try { con.begin(); con.add(dataGraph); con.commit(); } catch (Exception e) { if (con.isActive()) { con.rollback(); } throw e; } finally { con.close(); } }
Example 2
Source File: RdfCloudTripleStoreConnectionTest.java From rya with Apache License 2.0 | 6 votes |
public void testDuplicateLiterals() throws Exception { RepositoryConnection conn = repository.getConnection(); IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc"); Literal lit1 = VF.createLiteral(0.0); Literal lit2 = VF.createLiteral(0.0); Literal lit3 = VF.createLiteral(0.0); conn.add(cpu, loadPerc, lit1); conn.add(cpu, loadPerc, lit2); conn.add(cpu, loadPerc, lit3); conn.commit(); RepositoryResult<Statement> result = conn.getStatements(cpu, loadPerc, null, true); int count = 0; while (result.hasNext()) { count++; result.next(); } result.close(); assertEquals(1, count); //clean up conn.remove(cpu, loadPerc, lit1); conn.close(); }
Example 3
Source File: RyaAccumuloSailFactoryTest.java From rya with Apache License 2.0 | 6 votes |
@Ignore @Test public void testAddStatement() throws Exception { SailRepositoryFactory f = new SailRepositoryFactory(); Repository r = f.getRepository(getConfig()); r.initialize(); RepositoryConnection rc = r.getConnection(); ValueFactory vf = rc.getValueFactory(); Statement s = vf.createStatement(vf.createIRI("u:a"), vf.createIRI("u:b"), vf.createIRI("u:c")); assertFalse(rc.hasStatement(s, false)); rc.add(s); Assert.assertTrue(rc.hasStatement(s, false)); rc.close(); }
Example 4
Source File: RdfCloudTripleStoreConnectionTest.java From rya with Apache License 2.0 | 6 votes |
public void testRegexFilter() throws Exception { RepositoryConnection conn = repository.getConnection(); IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc"); IRI testClass = VF.createIRI(litdupsNS, "test"); Literal six = VF.createLiteral("6"); Literal sev = VF.createLiteral("7"); Literal ten = VF.createLiteral("10"); conn.add(cpu, loadPerc, six); conn.add(cpu, loadPerc, sev); conn.add(cpu, loadPerc, ten); conn.add(cpu, RDF.TYPE, testClass); conn.commit(); String query = "PREFIX org.apache: <" + NAMESPACE + ">\n" + "select * where {" + String.format("<%s> ?p ?o.\n", cpu.stringValue()) + "FILTER(regex(?o, '^1'))." + "}"; TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); CountTupleHandler cth = new CountTupleHandler(); tupleQuery.evaluate(cth); conn.close(); assertEquals(cth.getCount(), 1); }
Example 5
Source File: TriGParserTestCase.java From rdf4j with BSD 3-Clause "New" or "Revised" License | 6 votes |
public TestSuite createTestSuite() throws Exception { // Create test suite TestSuite suite = new TestSuite(TriGParserTestCase.class.getName()); // Add the manifest for W3C test cases to a repository and query it Repository w3cRepository = new SailRepository(new MemoryStore()); w3cRepository.initialize(); RepositoryConnection w3cCon = w3cRepository.getConnection(); InputStream inputStream = this.getClass().getResourceAsStream(TEST_W3C_MANIFEST_URL); w3cCon.add(inputStream, TEST_W3C_MANIFEST_URI_BASE, RDFFormat.TURTLE); parsePositiveTriGSyntaxTests(suite, TEST_W3C_FILE_BASE_PATH, TESTS_W3C_BASE_URL, TEST_W3C_TEST_URI_BASE, w3cCon); parseNegativeTriGSyntaxTests(suite, TEST_W3C_FILE_BASE_PATH, TESTS_W3C_BASE_URL, TEST_W3C_TEST_URI_BASE, w3cCon); parsePositiveTriGEvalTests(suite, TEST_W3C_FILE_BASE_PATH, TESTS_W3C_BASE_URL, TEST_W3C_TEST_URI_BASE, w3cCon); parseNegativeTriGEvalTests(suite, TEST_W3C_FILE_BASE_PATH, TESTS_W3C_BASE_URL, TEST_W3C_TEST_URI_BASE, w3cCon); w3cCon.close(); w3cRepository.shutDown(); return suite; }
Example 6
Source File: RdfCloudTripleStoreConnectionTest.java From rya with Apache License 2.0 | 6 votes |
public void testSPOObjRange() throws Exception { RepositoryConnection conn = repository.getConnection(); IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc"); Literal six = VF.createLiteral("6"); Literal sev = VF.createLiteral("7"); Literal ten = VF.createLiteral("10"); conn.add(cpu, loadPerc, six); conn.add(cpu, loadPerc, sev); conn.add(cpu, loadPerc, ten); conn.commit(); String query = "PREFIX org.apache: <" + NAMESPACE + ">\n" + "select * where {" + "<" + cpu.stringValue() + "> <" + loadPerc.stringValue() + "> ?o.\n" + "FILTER(org.apache:range(?o, '6', '8'))." + "}"; TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); CountTupleHandler cth = new CountTupleHandler(); tupleQuery.evaluate(cth); conn.close(); assertEquals(cth.getCount(), 2); }
Example 7
Source File: RdfCloudTripleStoreConnectionTest.java From rya with Apache License 2.0 | 6 votes |
public void testEvaluate() throws Exception { RepositoryConnection conn = repository.getConnection(); IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc"); IRI uri1 = VF.createIRI(litdupsNS, "uri1"); conn.add(cpu, loadPerc, uri1); conn.commit(); String query = "select * where {" + "?x <" + loadPerc.stringValue() + "> ?o1." + "}"; TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); CountTupleHandler cth = new CountTupleHandler(); tupleQuery.evaluate(cth); assertEquals(cth.getCount(), 1); conn.close(); }
Example 8
Source File: SPARQLServiceEvaluationTest.java From rdf4j with BSD 3-Clause "New" or "Revised" License | 6 votes |
/** * Load a dataset. Note: the repositories are cleared before loading data * * @param rep * @param datasetFile * @throws RDFParseException * @throws RepositoryException * @throws IOException */ protected void loadDataSet(Repository rep, String datasetFile) throws RDFParseException, RepositoryException, IOException { logger.debug("loading dataset..."); InputStream dataset = SPARQLServiceEvaluationTest.class.getResourceAsStream(datasetFile); if (dataset == null) { throw new IllegalArgumentException("Datasetfile " + datasetFile + " not found."); } RepositoryConnection con = rep.getConnection(); try { con.clear(); con.add(dataset, "", Rio.getParserFormatForFileName(datasetFile).orElseThrow(Rio.unsupportedFormat(datasetFile))); } finally { dataset.close(); con.close(); } logger.debug("dataset loaded."); }
Example 9
Source File: RdfCloudTripleStoreConnectionTest.java From rya with Apache License 2.0 | 6 votes |
public void testPOObjRange() throws Exception { RepositoryConnection conn = repository.getConnection(); IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc"); Literal six = VF.createLiteral("6"); Literal sev = VF.createLiteral("7"); Literal ten = VF.createLiteral("10"); conn.add(cpu, loadPerc, six); conn.add(cpu, loadPerc, sev); conn.add(cpu, loadPerc, ten); conn.commit(); String query = "PREFIX org.apache: <" + NAMESPACE + ">\n" + "select * where {" + "?x <" + loadPerc.stringValue() + "> ?o.\n" + "FILTER(org.apache:range(?o, '6', '8'))." + "}"; TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); CountTupleHandler cth = new CountTupleHandler(); tupleQuery.evaluate(cth); conn.close(); assertEquals(2, cth.getCount()); }
Example 10
Source File: PowsyblWriterSequenceFixTest.java From powsybl-core with Mozilla Public License 2.0 | 5 votes |
private void addObjectTypeStatement(RepositoryConnection cnx, IRI subject, String objectType) { IRI objectTypeIRI = cnx.getValueFactory().createIRI(objectType); Statement subjectTypeStatement = cnx.getValueFactory().createStatement( subject, RDF.TYPE, objectTypeIRI); Resource context = cnx.getValueFactory().createIRI(qualifiedContextName); cnx.add(subjectTypeStatement, context); }
Example 11
Source File: ReificationStrategy.java From inception with Apache License 2.0 | 5 votes |
default void upsert(RepositoryConnection aConnection, Collection<Statement> aOriginalTriples, Collection<Statement> aNewTriples) { // Delete all original triples except the ones which we would re-create anyway Set<Statement> triplesToDelete = new HashSet<>(); aOriginalTriples.forEach(triplesToDelete::add); triplesToDelete.removeAll(aNewTriples); aConnection.remove(triplesToDelete); // Store the new triples aConnection.add(aNewTriples); }
Example 12
Source File: RdfControllerAccumuloTest.java From rya with Apache License 2.0 | 5 votes |
@Before public void setup() { this.mockMvc = standaloneSetup(controller).build(); try { RepositoryConnection con = repository.getConnection(); con.add(getClass().getResourceAsStream("/test.nt"), "", RDFFormat.NTRIPLES); con.close(); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } }
Example 13
Source File: RdfCloudTripleStoreConnectionTest.java From rya with Apache License 2.0 | 5 votes |
public void testEquivPropOf() throws Exception { if(internalInferenceEngine == null) { return; //infer not supported; } RepositoryConnection conn = repository.getConnection(); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "undergradDegreeFrom"), OWL.EQUIVALENTPROPERTY, VF.createIRI(litdupsNS, "ugradDegreeFrom"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "UgradA"), VF.createIRI(litdupsNS, "undergradDegreeFrom"), VF.createIRI(litdupsNS, "Harvard"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "GradB"), VF.createIRI(litdupsNS, "ugradDegreeFrom"), VF.createIRI(litdupsNS, "Harvard"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "GradC"), VF.createIRI(litdupsNS, "ugraduateDegreeFrom"), VF.createIRI(litdupsNS, "Harvard"))); conn.commit(); conn.close(); internalInferenceEngine.refreshGraph(); conn = repository.getConnection(); String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select * where {?s lit:ugradDegreeFrom lit:Harvard.}"; TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); CountTupleHandler tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(2, tupleHandler.getCount()); conn.close(); }
Example 14
Source File: WritableConnection.java From rdf4j with BSD 3-Clause "New" or "Revised" License | 5 votes |
private void add(RepositoryConnection member, Resource subj, IRI pred, Value obj, Resource... contexts) throws SailException { try { member.add(subj, pred, obj, contexts); } catch (RepositoryException e) { throw new SailException(e); } }
Example 15
Source File: RdfCloudTripleStoreConnectionTest.java From rya with Apache License 2.0 | 4 votes |
public void testSymmPropOf() throws Exception { if(internalInferenceEngine == null) { return; //infer not supported; } RepositoryConnection conn = repository.getConnection(); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "friendOf"), RDF.TYPE, OWL.SYMMETRICPROPERTY)); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "Bob"), VF.createIRI(litdupsNS, "friendOf"), VF.createIRI(litdupsNS, "Jeff"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "James"), VF.createIRI(litdupsNS, "friendOf"), VF.createIRI(litdupsNS, "Jeff"))); conn.commit(); conn.close(); internalInferenceEngine.refreshGraph(); conn = repository.getConnection(); String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select * where {?s lit:friendOf lit:Bob.}"; TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); CountTupleHandler tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(1, tupleHandler.getCount()); query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select * where {?s lit:friendOf lit:James.}"; tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(1, tupleHandler.getCount()); query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select * where {?s lit:friendOf lit:Jeff.}"; tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(2, tupleHandler.getCount()); conn.close(); }
Example 16
Source File: AddStatementOperation.java From rdf4j with BSD 3-Clause "New" or "Revised" License | 4 votes |
@Override public void execute(RepositoryConnection con) throws RepositoryException { con.add(getSubject(), getPredicate(), getObject(), getContexts()); }
Example 17
Source File: RdfCloudTripleStoreConnectionTest.java From rya with Apache License 2.0 | 4 votes |
public void testSubPropertyOf() throws Exception { if(internalInferenceEngine == null) { return; //infer not supported; } RepositoryConnection conn = repository.getConnection(); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "undergradDegreeFrom"), RDFS.SUBPROPERTYOF, VF.createIRI(litdupsNS, "degreeFrom"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "gradDegreeFrom"), RDFS.SUBPROPERTYOF, VF.createIRI(litdupsNS, "degreeFrom"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "degreeFrom"), RDFS.SUBPROPERTYOF, VF.createIRI(litdupsNS, "memberOf"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "memberOf"), RDFS.SUBPROPERTYOF, VF.createIRI(litdupsNS, "associatedWith"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "UgradA"), VF.createIRI(litdupsNS, "undergradDegreeFrom"), VF.createIRI(litdupsNS, "Harvard"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "GradB"), VF.createIRI(litdupsNS, "gradDegreeFrom"), VF.createIRI(litdupsNS, "Yale"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "ProfessorC"), VF.createIRI(litdupsNS, "memberOf"), VF.createIRI(litdupsNS, "Harvard"))); conn.commit(); conn.close(); internalInferenceEngine.refreshGraph(); conn = repository.getConnection(); String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select * where {?s lit:degreeFrom lit:Harvard.}"; TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); CountTupleHandler tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(1, tupleHandler.getCount()); query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select * where {?s lit:memberOf lit:Harvard.}"; tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(2, tupleHandler.getCount()); query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select * where {?s lit:associatedWith ?o.}"; tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(3, tupleHandler.getCount()); query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select * where {?s lit:gradDegreeFrom lit:Yale.}"; tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(1, tupleHandler.getCount()); conn.close(); }
Example 18
Source File: CustomGraphQueryInferencerTest.java From rdf4j with BSD 3-Clause "New" or "Revised" License | 4 votes |
protected void runTest(final CustomGraphQueryInferencer inferencer) throws RepositoryException, RDFParseException, IOException, MalformedQueryException, UpdateExecutionException { // Initialize Repository sail = new SailRepository(inferencer); sail.initialize(); RepositoryConnection connection = sail.getConnection(); try { connection.begin(); connection.clear(); connection.add(new StringReader(initial), BASE, RDFFormat.TURTLE); // Test initial inferencer state Collection<Value> watchPredicates = inferencer.getWatchPredicates(); assertThat(watchPredicates).hasSize(testData.predCount); Collection<Value> watchObjects = inferencer.getWatchObjects(); assertThat(watchObjects).hasSize(testData.objCount); Collection<Value> watchSubjects = inferencer.getWatchSubjects(); assertThat(watchSubjects).hasSize(testData.subjCount); ValueFactory factory = connection.getValueFactory(); if (resourceFolder.startsWith(PREDICATE)) { assertThat(watchPredicates.contains(factory.createIRI(BASE, "brotherOf"))).isTrue(); assertThat(watchPredicates.contains(factory.createIRI(BASE, "parentOf"))).isTrue(); } else { IRI bob = factory.createIRI(BASE, "Bob"); IRI alice = factory.createIRI(BASE, "Alice"); assertThat(watchSubjects).contains(bob, alice); assertThat(watchObjects).contains(bob, alice); } // Test initial inferencing results assertThat(Iterations.asSet(connection.getStatements(null, null, null, true))) .hasSize(testData.initialCount); // Test results after removing some statements connection.prepareUpdate(QueryLanguage.SPARQL, delete).execute(); assertThat(Iterations.asSet(connection.getStatements(null, null, null, true))) .hasSize(testData.countAfterRemove); // Tidy up. Storage gets re-used for subsequent tests, so must clear here, // in order to properly clear out any inferred statements. connection.clear(); connection.commit(); } finally { connection.close(); } sail.shutDown(); }
Example 19
Source File: KBInstance.java From inception with Apache License 2.0 | 4 votes |
public void write(RepositoryConnection aConn, KnowledgeBase kb) { ValueFactory vf = aConn.getValueFactory(); IRI subject = vf.createIRI(identifier); originalStatements.clear(); Statement typeStmt = vf.createStatement(subject, kb.getTypeIri(), vf.createIRI(type.toString())); originalStatements.add(typeStmt); aConn.add(typeStmt); if (isNotBlank(name)) { Literal nameLiteral; if (language != null) { nameLiteral = vf.createLiteral(name, language); } else if (kb.getDefaultLanguage() != null) { nameLiteral = vf.createLiteral(name, kb.getDefaultLanguage()); } else { nameLiteral = vf.createLiteral(name); } Statement nameStmt = vf.createStatement(subject, kb.getLabelIri(), nameLiteral); originalStatements.add(nameStmt); aConn.add(nameStmt); } if (isNotBlank(description)) { Literal descriptionLiteral; if (language == null) { descriptionLiteral = vf.createLiteral(description); } else { descriptionLiteral = vf.createLiteral(description, language); } Statement descStmt = vf .createStatement(subject, kb.getDescriptionIri(), descriptionLiteral); originalStatements.add(descStmt); aConn.add(descStmt); } }
Example 20
Source File: RdfCloudTripleStoreConnectionTest.java From rya with Apache License 2.0 | 4 votes |
public void testSameAs() throws Exception { if(internalInferenceEngine == null) { return; //infer not supported; } RepositoryConnection conn = repository.getConnection(); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "StudentA1"), OWL.SAMEAS, VF.createIRI(litdupsNS, "StudentA2"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "StudentA2"), OWL.SAMEAS, VF.createIRI(litdupsNS, "StudentA3"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "StudentB1"), OWL.SAMEAS, VF.createIRI(litdupsNS, "StudentB2"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "StudentB2"), OWL.SAMEAS, VF.createIRI(litdupsNS, "StudentB3"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "StudentA1"), VF.createIRI(litdupsNS, "pred1"), VF.createIRI(litdupsNS, "StudentB3"))); conn.add(VF.createStatement(VF.createIRI(litdupsNS, "StudentB1"), VF.createIRI(litdupsNS, "pred2"), VF.createIRI(litdupsNS, "StudentA3"))); conn.commit(); conn.close(); internalInferenceEngine.refreshGraph(); conn = repository.getConnection(); // query where finds sameAs for obj, pred specified String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select ?s where {?s lit:pred1 lit:StudentB2.}"; TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); CountTupleHandler tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(1, tupleHandler.getCount()); // query where finds sameAs for obj only specified query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select ?s where {?s ?p lit:StudentB2.}"; tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(3, tupleHandler.getCount()); // including sameAs assertions // query where finds sameAs for subj, pred specified query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select ?s where {lit:StudentB2 lit:pred2 ?s.}"; tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(1, tupleHandler.getCount()); // including sameAs assertions // query where finds sameAs for subj only specified query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select ?s where {lit:StudentB2 ?p ?s.}"; tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(3, tupleHandler.getCount()); // including sameAs assertions // query where finds sameAs for subj, obj specified query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select ?s where {lit:StudentB2 ?s lit:StudentA2.}"; tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(1, tupleHandler.getCount()); conn.close(); }