Java Code Examples for org.eclipse.rdf4j.repository.RepositoryConnection#prepareTupleQuery()
The following examples show how to use
org.eclipse.rdf4j.repository.RepositoryConnection#prepareTupleQuery() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TBSSSummariesGenerator.java From CostFed with GNU Affero General Public License v3.0 | 6 votes |
public static Long getDistinctSubjectCount(String endpoint) { String strQuery = "SELECT (COUNT(distinct ?s) AS ?triples) " + // "WHERE " + "{" + "?s ?p ?o " + "} " ; SPARQLRepository repo = createSPARQLRepository(endpoint); RepositoryConnection conn = repo.getConnection(); try { TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, strQuery); TupleQueryResult rs = query.evaluate(); String v = rs.next().getValue("triples").stringValue(); rs.close(); return Long.parseLong(v); } finally { conn.close(); } }
Example 2
Source File: RdfCloudTripleStoreConnectionTest.java From rya with Apache License 2.0 | 6 votes |
public void testRegexFilter() throws Exception { RepositoryConnection conn = repository.getConnection(); IRI loadPerc = VF.createIRI(litdupsNS, "loadPerc"); IRI testClass = VF.createIRI(litdupsNS, "test"); Literal six = VF.createLiteral("6"); Literal sev = VF.createLiteral("7"); Literal ten = VF.createLiteral("10"); conn.add(cpu, loadPerc, six); conn.add(cpu, loadPerc, sev); conn.add(cpu, loadPerc, ten); conn.add(cpu, RDF.TYPE, testClass); conn.commit(); String query = "PREFIX org.apache: <" + NAMESPACE + ">\n" + "select * where {" + String.format("<%s> ?p ?o.\n", cpu.stringValue()) + "FILTER(regex(?o, '^1'))." + "}"; TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); CountTupleHandler cth = new CountTupleHandler(); tupleQuery.evaluate(cth); conn.close(); assertEquals(cth.getCount(), 1); }
Example 3
Source File: SummaryGenerator.java From CostFed with GNU Affero General Public License v3.0 | 6 votes |
public static Long getDistinctObjectCount(String endpoint) { String strQuery = "SELECT (COUNT(distinct ?o) AS ?triples) " + "WHERE " + "{" + "?s ?p ?o " + "FILTER isIRI(?o)" + "} " ; SPARQLRepository repo = createSPARQLRepository(endpoint); RepositoryConnection conn = repo.getConnection(); try { TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, strQuery); TupleQueryResult rs = query.evaluate(); String v = rs.next().getValue("triples").stringValue(); rs.close(); return Long.parseLong(v); } finally { conn.close(); } }
Example 4
Source File: TBSSSummariesGenerator.java From CostFed with GNU Affero General Public License v3.0 | 6 votes |
public static Long getDistinctObjectCount(String endpoint) { String strQuery = "SELECT (COUNT(distinct ?o) AS ?triples) " + // "WHERE " + "{" + "?s ?p ?o " + "FILTER isIRI(?o)" + "} " ; SPARQLRepository repo = createSPARQLRepository(endpoint); RepositoryConnection conn = repo.getConnection(); try { TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, strQuery); TupleQueryResult rs = query.evaluate(); String v = rs.next().getValue("triples").stringValue(); rs.close(); return Long.parseLong(v); } finally { conn.close(); } }
Example 5
Source File: SPARQLQueryExecutor.java From semagrow with Apache License 2.0 | 6 votes |
/** * Sends a tuple query to a given endpoint. * * @param endpoint The endpoint in which the query is to be sent * @param sparqlQuery The query string to be sent * @param bindings A set of bindings for the query * @param expr The tuple expression that corresponds to the query string * @return Stream with the query results * @throws QueryEvaluationException * @throws MalformedQueryException * @throws RepositoryException */ protected Flux<BindingSet> sendTupleQuery(URL endpoint, String sparqlQuery, BindingSet bindings, TupleExpr expr) throws QueryEvaluationException, MalformedQueryException, RepositoryException { RepositoryConnection conn = getConnection(endpoint); TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, sparqlQuery); for (Binding b : bindings) query.setBinding(b.getName(), b.getValue()); LoggingUtil.logRemote(logger, conn, sparqlQuery, endpoint, expr, query); return Flux.from(new TupleQueryResultPublisher(query, sparqlQuery, qfrHandler, mat, endpoint)) .doAfterTerminate(() -> closeQuietly(conn)); }
Example 6
Source File: ArbitraryLengthQueryTest.java From rya with Apache License 2.0 | 6 votes |
/** * This test works. The expected result is 6 rows ranging from "Model1Class 1" through "Model1Class 6". * * @throws RepositoryException * @throws QueryEvaluationException * @throws TupleQueryResultHandlerException * * @throws MalformedQueryException */ public void testWithoutSubquery() throws RepositoryException, QueryEvaluationException, TupleQueryResultHandlerException, MalformedQueryException { final String query = "SELECT ?i ?i_label ?i_class ?i_v1" + "WHERE {" + "?i <http://www.w3.org/2000/01/rdf-schema#label> ?i_label ." + "?i a ?i_class ." + "?i_class <http://www.w3.org/2000/01/rdf-schema#subClassOf>* <http://dragon-research.com/cham/model/model1#Model1Class> ." + "OPTIONAL { ?i <http://dragon-research.com/cham/model/model1#name> ?i_v1 } ." + "}" + "ORDER BY ?i_label"; final RepositoryConnection conn = repository.getConnection(); final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); RdfCloudTripleStoreConnectionTest.CountTupleHandler countTupleHandler = new RdfCloudTripleStoreConnectionTest.CountTupleHandler(); tupleQuery.evaluate(countTupleHandler); assertEquals(6, countTupleHandler.getCount()); conn.close(); }
Example 7
Source File: AbstractLuceneSailSpinTest.java From rdf4j with BSD 3-Clause "New" or "Revised" License | 6 votes |
@Test public void testDistanceFunction() throws Exception { RepositoryConnection connection = getConnection(); String queryStr = "prefix geo: <" + GEO.NAMESPACE + ">" + "prefix geof: <" + GEOF.NAMESPACE + ">" + "prefix search: <" + LuceneSailSchema.NAMESPACE + ">" + "select ?toUri ?fromUri ?dist where {(?from ?range ?units geo:asWKT search:distance)" + "search:withinDistance (?toUri ?to ?dist) ." + "?toUri a <urn:geo/Landmark>. ?fromUri geo:asWKT ?from; <urn:geo/maxDistance> ?range.}"; try { TupleQuery query = connection.prepareTupleQuery(QueryLanguage.SPARQL, queryStr); query.setBinding("units", GEOF.UOM_METRE); printTupleResult(query); try (TupleQueryResult result = query.evaluate()) { int count = countTupleResults(result); Assert.assertEquals(2, count); } } catch (Exception e) { connection.rollback(); throw e; } finally { connection.commit(); } }
Example 8
Source File: SemagrowSummariesGenerator.java From CostFed with GNU Affero General Public License v3.0 | 6 votes |
/** * Get total number of triple for a predicate * @param pred Predicate * @param m model * @return triples */ public static Long getTripleCount(String pred, String endpoint) { String strQuery = "SELECT (COUNT(?s) AS ?triples) " + // "WHERE " + "{" + "?s <"+pred+"> ?o " + "} " ; SPARQLRepository repo = new SPARQLRepository(endpoint); repo.initialize(); RepositoryConnection conn = repo.getConnection(); try { TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, strQuery); TupleQueryResult rs = query.evaluate(); return Long.parseLong(rs.next().getValue("triples").stringValue()); } finally { conn.close(); repo.shutDown(); } }
Example 9
Source File: RepositoryPerformance.java From rdf4j with BSD 3-Clause "New" or "Revised" License | 6 votes |
private int runQuery(RepositoryConnection conn, IRI instance) throws Exception { long start = System.currentTimeMillis(); TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, "SELECT * WHERE { <" + instance.stringValue() + "> ?p ?o }"); TupleQueryResult res = null; try { res = query.evaluate(); int count = 0; while (res.hasNext()) { res.next(); count++; } System.out.println("Instance " + instance.stringValue() + " has " + count + " results. Duration: " + (System.currentTimeMillis() - start) + "ms"); return count; } finally { if (res != null) { res.close(); } } }
Example 10
Source File: ElasticsearchSailExample.java From rdf4j with BSD 3-Clause "New" or "Revised" License | 6 votes |
private static void tupleQuery(String queryString, RepositoryConnection connection) throws QueryEvaluationException, RepositoryException, MalformedQueryException { System.out.println("Running query: \n" + queryString); TupleQuery query = connection.prepareTupleQuery(QueryLanguage.SPARQL, queryString); try (TupleQueryResult result = query.evaluate()) { // print the results System.out.println("Query results:"); while (result.hasNext()) { BindingSet bindings = result.next(); System.out.println("found match: "); for (Binding binding : bindings) { System.out.println("\t" + binding.getName() + ": " + binding.getValue()); } } } }
Example 11
Source File: SPARQLRepositoryPerformance.java From rdf4j with BSD 3-Clause "New" or "Revised" License | 5 votes |
/** * @param args */ public static void main(String[] args) throws Exception { SPARQLRepository repo = new SPARQLRepository("http://10.212.10.29:8081/openrdf-sesame/repositories/drugbank"); repo.init(); RepositoryConnection conn = null; TupleQueryResult qRes = null; try { conn = repo.getConnection(); TupleQuery q = conn.prepareTupleQuery(QueryLanguage.SPARQL, "SELECT * WHERE { ?x ?y ?z } LIMIT 100"); qRes = q.evaluate(); while (qRes.hasNext()) { qRes.next(); } System.out.println("Done."); ; } finally { if (qRes != null) { qRes.close(); } if (conn != null) { conn.close(); } } repo.shutDown(); }
Example 12
Source File: RdfCloudTripleStoreConnectionTest.java From rya with Apache License 2.0 | 5 votes |
public void testInsertDeleteData() throws Exception { RepositoryConnection conn = repository.getConnection(); String insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" + "INSERT DATA\n" + "{ <http://example/book3> dc:title \"A new book\" ;\n" + " dc:creator \"A.N.Other\" .\n" + "}"; Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); update.execute(); String query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" + "select * where { <http://example/book3> ?p ?o. }"; TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); CountTupleHandler tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(2, tupleHandler.getCount()); String delete = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" + "\n" + "DELETE DATA\n" + "{ <http://example/book3> dc:title \"A new book\" ;\n" + " dc:creator \"A.N.Other\" .\n" + "}"; update = conn.prepareUpdate(QueryLanguage.SPARQL, delete); update.execute(); query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" + "select * where { <http://example/book3> ?p ?o. }"; tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(0, tupleHandler.getCount()); conn.close(); }
Example 13
Source File: ProviderUtil.java From CostFed with GNU Affero General Public License v3.0 | 5 votes |
/** * Checks the connection by submitting a SPARQL SELECT query: * * SELECT * WHERE { ?s ?p ?o } LIMIT 1 * * Throws an exception if the query cannot be evaluated * successfully for some reason (indicating that the * endpoint is not ok) * * @param repo * @throws RepositoryException * @throws QueryEvaluationException * @throws MalformedQueryException */ public static long checkConnectionIfConfigured(Config cfg, Repository repo) { if (!cfg.isValidateRepositoryConnections()) { return 0; } long startTime = System.currentTimeMillis(); RepositoryConnection conn = repo.getConnection(); try { TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, "SELECT * WHERE { ?s ?p ?o } LIMIT 1"); TupleQueryResult qRes = null; try { qRes = query.evaluate(); if (!qRes.hasNext()) { log.warn("No data in provided repository (" + repo + ")"); } while (qRes.hasNext()) qRes.next(); } finally { if (qRes != null) { Iterations.closeCloseable(qRes); } } } finally { conn.close(); } return System.currentTimeMillis() - startTime; }
Example 14
Source File: MongoDbRyaSailFactoryLoadFilesIT.java From rya with Apache License 2.0 | 5 votes |
private static int performTupleQuery(final String query, final RepositoryConnection conn) throws RepositoryException, MalformedQueryException, QueryEvaluationException, TupleQueryResultHandlerException { final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tupleQuery.setMaxExecutionTime(10); final CountingResultHandler handler = new CountingResultHandler(); tupleQuery.evaluate(handler); return handler.getCount(); }
Example 15
Source File: PcjIntegrationTestingUtil.java From rya with Apache License 2.0 | 5 votes |
/** * Scan Rya for results that solve the PCJ's query and store them in the PCJ * table. * <p> * This method assumes the PCJ table has already been created. * * @param mongoClient - A connection to the mongoDB that hosts the PCJ table. (not null) * @param pcjTableName - The name of the PCJ table that will receive the results. (not null) * @param ryaConn - A connection to the Rya store that will be queried to find results. (not null) * @throws PcjException * If results could not be written to the PCJ table, the PCJ * table does not exist, or the query that is being execute was * malformed. */ public static void populatePcj(final MongoPcjDocuments pcj, final String pcjTableName, final RepositoryConnection ryaConn) throws PcjException { checkNotNull(pcj); checkNotNull(pcjTableName); checkNotNull(ryaConn); try { // Fetch the query that needs to be executed from the PCJ table. final PcjMetadata pcjMetadata = pcj.getPcjMetadata(pcjTableName); final String sparql = pcjMetadata.getSparql(); // Query Rya for results to the SPARQL query. final TupleQuery query = ryaConn.prepareTupleQuery(QueryLanguage.SPARQL, sparql); final TupleQueryResult results = query.evaluate(); // Load batches of 1000 of them at a time into the PCJ table final Set<BindingSet> batch = new HashSet<>(1000); while (results.hasNext()) { batch.add(results.next()); if (batch.size() == 1000) { writeResults(pcj, pcjTableName, batch); batch.clear(); } } if (!batch.isEmpty()) { writeResults(pcj, pcjTableName, batch); } } catch (RepositoryException | MalformedQueryException | QueryEvaluationException e) { throw new PcjException("Could not populate a PCJ table with Rya results for the table named: " + pcjTableName, e); } }
Example 16
Source File: SummaryGenerator.java From CostFed with GNU Affero General Public License v3.0 | 5 votes |
/** * Get Predicate List * @param endPointUrl SPARQL endPoint Url * @param graph Named graph * @return predLst Predicates List */ private static List<String> getPredicates(String endPointUrl, String graph) { String strQuery = "SELECT DISTINCT ?p"; if (null != graph) { strQuery += " FROM <" + graph + ">"; } strQuery += " WHERE { ?s ?p ?o }"; List<String> predLst = new ArrayList<String>(); SPARQLRepository repo = createSPARQLRepository(endPointUrl); RepositoryConnection conn = repo.getConnection(); try { TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, strQuery); TupleQueryResult res = query.evaluate(); while (res.hasNext()) { String pred = res.next().getValue("p").toString(); predLst.add(pred); } res.close(); } finally { conn.close(); } return predLst; }
Example 17
Source File: SPARQLQueryBuilder.java From inception with Apache License 2.0 | 5 votes |
/** * Execute the query and return {@code true} if the result set is not empty. This internally * limits the number of results requested via SPARQL to 1 and should complete faster than * retrieving the entire results set and checking whether it is empty. * * @param aConnection * a connection to a triple store. * @param aAll * True if entities with implicit namespaces (e.g. defined by RDF) * @return {@code true} if the result set is not empty. */ @Override public boolean exists(RepositoryConnection aConnection, boolean aAll) { long startTime = currentTimeMillis(); String queryId = toHexString(hashCode()); limit(1); SelectQuery query = selectQuery(); String queryString = query.getQueryString(); LOG.trace("[{}] Query: {}", queryId, queryString); if (returnEmptyResult) { LOG.debug("[{}] Query was skipped because it would not return any results anyway", queryId); return false; } else { TupleQuery tupleQuery = aConnection.prepareTupleQuery(queryString); boolean result = !evaluateListQuery(tupleQuery, aAll).isEmpty(); LOG.debug("[{}] Query returned {} in {}ms", queryId, result, currentTimeMillis() - startTime); return result; } }
Example 18
Source File: SparqlRepositoryTest.java From rdf4j with BSD 3-Clause "New" or "Revised" License | 4 votes |
public static void main(String[] args) throws Exception { ExecutorService executor = Executors.newFixedThreadPool(20); SPARQLRepository repo = new SPARQLRepository("http://dbpedia.org/sparql"); repo.init(); final RepositoryConnection conn = repo.getConnection(); TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, "SELECT DISTINCT ?President ?Party ?Articles ?TopicPage WHERE { ?President <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://dbpedia.org/class/yago/PresidentsOfTheUnitedStates> . }"); TupleQueryResult res = query.evaluate(); List<IRI> list = new ArrayList<>(); while (res.hasNext()) { list.add((IRI) res.next().getValue("President")); } res.close(); System.out.println("Retrieved " + list.size() + " instances"); List<Future<?>> tasks = new ArrayList<>(); for (int i = 0; i < 10; i++) { for (final IRI instance : list) { tasks.add(executor.submit(() -> { try { Thread.sleep(new Random().nextInt(300)); BooleanQuery bq = conn.prepareBooleanQuery(QueryLanguage.SPARQL, "ASK { <" + instance.stringValue() + "> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://dbpedia.org/class/yago/PresidentsOfTheUnitedStates> }"); bq.evaluate(); } catch (Exception e) { e.printStackTrace(); } })); } } System.out.println("All tasks submitted, awating termination."); // TupleQueryResult qRes2 = query.evaluate(); // while (qRes2.hasNext()) { // qRes2.next(); // } for (Future<?> t : tasks) { t.get(); } System.out.println("Done"); executor.shutdown(); System.exit(1); }
Example 19
Source File: PcjIntegrationTestingUtil.java From rya with Apache License 2.0 | 4 votes |
/** * Scan Rya for results that solve the PCJ's query and store them in the PCJ * table. * <p> * This method assumes the PCJ table has already been created. * * @param accumuloConn * - A connection to the Accumulo that hosts the PCJ table. (not * null) * @param pcjTableName * - The name of the PCJ table that will receive the results. * (not null) * @param ryaConn * - A connection to the Rya store that will be queried to find * results. (not null) * @throws PcjException * If results could not be written to the PCJ table, the PCJ * table does not exist, or the query that is being execute was * malformed. */ public static void populatePcj(final Connector accumuloConn, final String pcjTableName, final RepositoryConnection ryaConn) throws PcjException { checkNotNull(accumuloConn); checkNotNull(pcjTableName); checkNotNull(ryaConn); try { // Fetch the query that needs to be executed from the PCJ table. final PcjMetadata pcjMetadata = new PcjTables().getPcjMetadata( accumuloConn, pcjTableName); final String sparql = pcjMetadata.getSparql(); // Query Rya for results to the SPARQL query. final TupleQuery query = ryaConn.prepareTupleQuery(QueryLanguage.SPARQL, sparql); final TupleQueryResult results = query.evaluate(); // Load batches of 1000 of them at a time into the PCJ table final Set<BindingSet> batch = new HashSet<>(1000); while (results.hasNext()) { batch.add(results.next()); if (batch.size() == 1000) { addResults(accumuloConn, pcjTableName, batch); batch.clear(); } } if (!batch.isEmpty()) { addResults(accumuloConn, pcjTableName, batch); } } catch (RepositoryException | MalformedQueryException | QueryEvaluationException e) { throw new PcjException( "Could not populate a PCJ table with Rya results for the table named: " + pcjTableName, e); } }
Example 20
Source File: RdfCloudTripleStoreConnectionTest.java From rya with Apache License 2.0 | 4 votes |
public void testDropGraph() throws Exception { RepositoryConnection conn = repository.getConnection(); String insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" + "PREFIX ex: <http://example/addresses#>\n" + "INSERT DATA\n" + "{ GRAPH ex:G1 {\n" + "<http://example/book3> dc:title \"A new book\" ;\n" + " dc:creator \"A.N.Other\" .\n" + "}\n" + "}"; Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); update.execute(); insert = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" + "PREFIX ex: <http://example/addresses#>\n" + "INSERT DATA\n" + "{ GRAPH ex:G2 {\n" + "<http://example/book3> dc:title \"A new book\" ;\n" + " dc:creator \"A.N.Other\" .\n" + "}\n" + "}"; update = conn.prepareUpdate(QueryLanguage.SPARQL, insert); update.execute(); String query = "PREFIX dc: <http://purl.org/dc/elements/1.1/>\n" + "select * where { <http://example/book3> ?p ?o. }"; TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); CountTupleHandler tupleHandler = new CountTupleHandler(); tupleQuery.evaluate(tupleHandler); assertEquals(4, tupleHandler.getCount()); tupleHandler = new CountTupleHandler(); String drop = "PREFIX ex: <http://example/addresses#>\n" + "DROP GRAPH ex:G2 "; update = conn.prepareUpdate(QueryLanguage.SPARQL, drop); update.execute(); tupleQuery.evaluate(tupleHandler); assertEquals(2, tupleHandler.getCount()); tupleHandler = new CountTupleHandler(); drop = "PREFIX ex: <http://example/addresses#>\n" + "DROP GRAPH ex:G1 "; update = conn.prepareUpdate(QueryLanguage.SPARQL, drop); update.execute(); tupleQuery.evaluate(tupleHandler); assertEquals(0, tupleHandler.getCount()); conn.close(); }