Java Code Examples for org.apache.jena.query.QueryExecutionFactory#createServiceRequest()
The following examples show how to use
org.apache.jena.query.QueryExecutionFactory#createServiceRequest() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ERDDataset2.java From gerbil with GNU Affero General Public License v3.0 | 5 votes |
private List<Marking> findMarkings(String[] text, File annFile) throws GerbilException { List<Marking> markings = new ArrayList<Marking>(); try (BufferedReader breader = new BufferedReader(new InputStreamReader( new FileInputStream(annFile), Charset.forName("UTF-8")))) { String line; while ((line = breader.readLine()) != null) { if(line.isEmpty()){ continue; } String[] annotation = line.split("\t"); int searchID = getTrecID(text[0]); int annoID = getTrecID(annotation[0]); if(searchID == annoID){ int start = text[1].indexOf(annotation[3]); int length = annotation[3].length(); //FIXME time consuming! String freebaseID = annotation[2].substring(1, annotation[2].length()).replace("/","."); Query query = QueryFactory.create(queryTemp.replace("%%v%%", freebaseID)); QueryExecution qexec = QueryExecutionFactory.createServiceRequest(DBPEDIA_SERVICE, query); String uri = qexec.execSelect().next().getResource("s").getURI(); markings.add(new NamedEntity(start, length, uri)); } else if(annoID > searchID){ //There is no annotation for the given text break; } } } catch (IOException e) { throw new GerbilException("Exception while reading dataset.", e, ErrorTypes.DATASET_LOADING_ERROR); } return markings; }
Example 2
Source File: SPARQLEndpointExecution.java From hypergraphql with Apache License 2.0 | 4 votes |
@Override public SPARQLExecutionResult call() { Map<String, Set<String>> resultSet = new HashMap<>(); markers.forEach(marker -> resultSet.put(marker, new HashSet<>())); Model unionModel = ModelFactory.createDefaultModel(); SPARQLServiceConverter converter = new SPARQLServiceConverter(schema); String sparqlQuery = converter.getSelectQuery(query, inputSubset, rootType); logger.debug(sparqlQuery); CredentialsProvider credsProvider = new BasicCredentialsProvider(); Credentials credentials = new UsernamePasswordCredentials(this.sparqlEndpointService.getUser(), this.sparqlEndpointService.getPassword()); credsProvider.setCredentials(AuthScope.ANY, credentials); HttpClient httpclient = HttpClients.custom() .setDefaultCredentialsProvider(credsProvider) .build(); HttpOp.setDefaultHttpClient(httpclient); ARQ.init(); Query jenaQuery = QueryFactory.create(sparqlQuery); QueryEngineHTTP qEngine = QueryExecutionFactory.createServiceRequest(this.sparqlEndpointService.getUrl(), jenaQuery); qEngine.setClient(httpclient); //qEngine.setSelectContentType(ResultsFormat.FMT_RS_XML.getSymbol()); ResultSet results = qEngine.execSelect(); results.forEachRemaining(solution -> { markers.stream().filter(solution::contains).forEach(marker -> resultSet.get(marker).add(solution.get(marker).asResource().getURI())); unionModel.add(this.sparqlEndpointService.getModelFromResults(query, solution, schema)); }); SPARQLExecutionResult sparqlExecutionResult = new SPARQLExecutionResult(resultSet, unionModel); logger.debug("Result: {}", sparqlExecutionResult); return sparqlExecutionResult; }
Example 3
Source File: PatternQueryHandler.java From IGUANA with GNU Affero General Public License v3.0 | 4 votes |
protected ResultSet getInstanceVars(ParameterizedSparqlString pss, Set<String> varNames) { QueryExecution exec = QueryExecutionFactory.createServiceRequest(service, convertToSelect(pss,varNames)); //return result set return exec.execSelect(); }
Example 4
Source File: SparqlBasedRequestProcessorForTPFs.java From Server.Java with MIT License | 4 votes |
/** * * @param subject * @param predicate * @param object * @param offset * @param limit * @return */ @Override protected ILinkedDataFragment createFragment( final ITriplePatternElement<RDFNode,String,String> subject, final ITriplePatternElement<RDFNode,String,String> predicate, final ITriplePatternElement<RDFNode,String,String> object, final long offset, final long limit ) { // FIXME: The following algorithm is incorrect for cases in which // the requested triple pattern contains a specific variable // multiple times; // e.g., (?x foaf:knows ?x ) or (_:bn foaf:knows _:bn) // see https://github.com/LinkedDataFragments/Server.Java/issues/24 QuerySolutionMap map = new QuerySolutionMap(); if ( ! subject.isVariable() ) { map.add("s", subject.asConstantTerm()); } if ( ! predicate.isVariable() ) { map.add("p", predicate.asConstantTerm()); } if ( ! object.isVariable() ) { map.add("o", object.asConstantTerm()); } query.setOffset(offset); query.setLimit(limit); Model triples = ModelFactory.createDefaultModel(); // Build the SPARQL-endpoint URIBuilder uriBuilder = new URIBuilder(endpointURI); addCredentials(uriBuilder); final String endpoint; try { endpoint = uriBuilder.build().toString(); } catch (URISyntaxException e) { throw new RuntimeException(e); } ParameterizedSparqlString queryWithParams = new ParameterizedSparqlString(query.serialize(), map); try (QueryExecution qexec = QueryExecutionFactory.sparqlService(endpoint, queryWithParams.asQuery())) { qexec.execConstruct(triples); } if (triples.isEmpty()) { return createEmptyTriplePatternFragment(); } // Try to get an estimate long size = triples.size(); long estimate = -1; ParameterizedSparqlString countQueryWithParams = new ParameterizedSparqlString(countQuery.serialize(), map); try (QueryExecution qexec = QueryExecutionFactory.createServiceRequest(endpoint, countQueryWithParams.asQuery())) { ResultSet results = qexec.execSelect(); if (results.hasNext()) { QuerySolution soln = results.nextSolution() ; Literal literal = soln.getLiteral("count"); estimate = literal.getLong(); } } /*GraphStatisticsHandler stats = model.getGraph().getStatisticsHandler(); if (stats != null) { Node s = (subject != null) ? subject.asNode() : null; Node p = (predicate != null) ? predicate.asNode() : null; Node o = (object != null) ? object.asNode() : null; estimate = stats.getStatistic(s, p, o); }*/ // No estimate or incorrect if (estimate < offset + size) { estimate = (size == limit) ? offset + size + 1 : offset + size; } // create the fragment final boolean isLastPage = ( estimate < offset + limit ); return createTriplePatternFragment( triples, estimate, isLastPage ); }