org.openrdf.query.Dataset Java Examples
The following examples show how to use
org.openrdf.query.Dataset.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SPARQLQueryTest.java From database with GNU General Public License v2.0 | 6 votes |
protected void uploadDataset(Dataset dataset) throws Exception { RepositoryConnection con = dataRep.getConnection(); try { // Merge default and named graphs to filter duplicates Set<URI> graphURIs = new HashSet<URI>(); graphURIs.addAll(dataset.getDefaultGraphs()); graphURIs.addAll(dataset.getNamedGraphs()); for (Resource graphURI : graphURIs) { upload(((URI)graphURI), graphURI); } } finally { con.close(); } }
Example #2
Source File: BigdataSparqlTest.java From database with GNU General Public License v2.0 | 6 votes |
protected String readInputData(Dataset dataset) throws Exception { final StringBuilder sb = new StringBuilder(); if (dataset != null) { Set<URI> graphURIs = new HashSet<URI>(); graphURIs.addAll(dataset.getDefaultGraphs()); graphURIs.addAll(dataset.getNamedGraphs()); for (Resource graphURI : graphURIs) { URL graphURL = new URL(graphURI.toString()); InputStream in = graphURL.openStream(); sb.append(IOUtil.readString(in)); } } return sb.toString(); }
Example #3
Source File: BigdataSparqlTest.java From database with GNU General Public License v2.0 | 6 votes |
@Override protected void uploadDataset(Dataset dataset) throws Exception { // RepositoryConnection con = dataRep.getConnection(); // try { // Merge default and named graphs to filter duplicates Set<URI> graphURIs = new HashSet<URI>(); graphURIs.addAll(dataset.getDefaultGraphs()); graphURIs.addAll(dataset.getNamedGraphs()); for (Resource graphURI : graphURIs) { upload(((URI)graphURI), graphURI); } // } // finally { // con.close(); // } }
Example #4
Source File: CumulusQueryOptimizer.java From cumulusrdf with Apache License 2.0 | 5 votes |
@Override public void optimize(final TupleExpr tupleExpr, final Dataset dataset, final BindingSet bindings) { // use native support for range queries if (_ranges_indexed) { tupleExpr.visit(new RangeQueryVisitor(tupleExpr)); tupleExpr.visit(new OrderByVisitor(tupleExpr)); } // use native cumulus model tupleExpr.visit(new CumulusNativeModelVisitor()); }
Example #5
Source File: SPARQLQueryTest.java From database with GNU General Public License v2.0 | 5 votes |
public SPARQLQueryTest(String testURI, String name, String queryFileURL, String resultFileURL, Dataset dataSet, boolean laxCardinality, boolean checkOrder) { super(name.replaceAll("\\(", " ").replaceAll("\\)", " ")); this.testURI = testURI; this.queryFileURL = queryFileURL; this.resultFileURL = resultFileURL; this.dataset = dataSet; this.laxCardinality = laxCardinality; this.checkOrder = checkOrder; }
Example #6
Source File: CumulusRDFSailConnection.java From cumulusrdf with Apache License 2.0 | 5 votes |
@Override protected CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluateInternal(TupleExpr tupleExpr, Dataset dataset, BindingSet bindings, boolean includeInferred) throws SailException { // Lock stLock = _sail.getStatementsReadLock(); // Clone the tuple expression to allow for more aggressive optimizations tupleExpr = tupleExpr.clone(); if (!(tupleExpr instanceof QueryRoot)) { // Add a dummy root node to the tuple expressions to allow the // optimizers to modify the actual root node tupleExpr = new QueryRoot(tupleExpr); } TripleSource tripleSource = new CumulusRDFTripleSource(); EvaluationStrategy strategy = new RangeEvaluationStrategy(tripleSource, dataset); new BindingAssigner().optimize(tupleExpr, dataset, bindings); new ConstantOptimizer(strategy).optimize(tupleExpr, dataset, bindings); new CompareOptimizer().optimize(tupleExpr, dataset, bindings); new ConjunctiveConstraintSplitter().optimize(tupleExpr, dataset, bindings); new DisjunctiveConstraintOptimizer().optimize(tupleExpr, dataset, bindings); new SameTermFilterOptimizer().optimize(tupleExpr, dataset, bindings); new QueryModelNormalizer().optimize(tupleExpr, dataset, bindings); new CumulusQueryOptimizer(_crdf.isRangeIndexesSupportEnabled()).optimize(tupleExpr, dataset, bindings); new QueryJoinOptimizer(_select_est).optimize(tupleExpr, dataset, bindings); // new FilterOptimizer().optimize(tupleExpr, dataset, bindings); new IterativeEvaluationOptimizer().optimize(tupleExpr, dataset, bindings); new OrderLimitOptimizer().optimize(tupleExpr, dataset, bindings); try { return strategy.evaluate(tupleExpr, EmptyBindingSet.getInstance()); } catch (QueryEvaluationException e) { e.printStackTrace(); throw new SailException(e); } }
Example #7
Source File: BigdataSparqlTest.java From database with GNU General Public License v2.0 | 5 votes |
public BigdataSparqlTest(String testURI, String name, String queryFileURL, String resultFileURL, Dataset dataSet, boolean laxCardinality, boolean checkOrder) { super(testURI, name, queryFileURL, resultFileURL, dataSet, laxCardinality, checkOrder); }
Example #8
Source File: BigdataSparqlFullRWTxTest.java From database with GNU General Public License v2.0 | 5 votes |
public BigdataSparqlFullRWTxTest(String testURI, String name, String queryFileURL, String resultFileURL, Dataset dataSet, boolean laxCardinality, boolean checkOrder) { super(testURI, name, queryFileURL, resultFileURL, dataSet, laxCardinality, checkOrder); }
Example #9
Source File: BigdataEmbeddedFederationSparqlTest.java From database with GNU General Public License v2.0 | 5 votes |
public BigdataEmbeddedFederationSparqlTest(String testURI, String name, String queryFileURL, String resultFileURL, Dataset dataSet, boolean laxCardinality, boolean checkOrder) { super(testURI, name, queryFileURL, resultFileURL, dataSet, laxCardinality, checkOrder); }
Example #10
Source File: ASTEvalHelper.java From database with GNU General Public License v2.0 | 5 votes |
/** * Optimize a SELECT query. * * @param store * The {@link AbstractTripleStore} having the data. * @param queryPlan * The {@link ASTContainer}. * @param globallyScopedBS * The initial solution to kick things off. * * @return An optimized AST. * * @throws QueryEvaluationException */ static public QueryRoot optimizeQuery( final ASTContainer astContainer, final AST2BOpContext context, final QueryBindingSet globallyScopedBS, final Dataset dataset) throws QueryEvaluationException { final AbstractTripleStore store = context.getAbstractTripleStore(); final DeferredResolutionResult resolved; try { // @see https://jira.blazegraph.com/browse/BLZG-1176 resolved = ASTDeferredIVResolution.resolveQuery( store, astContainer, globallyScopedBS, dataset, context); } catch (MalformedQueryException e) { throw new QueryEvaluationException(e.getMessage(), e); } if (resolved.dataset != null) { astContainer.getOriginalAST().setDataset( new DatasetNode(resolved.dataset, false/* update */)); } // Clear the optimized AST. astContainer.clearOptimizedAST(); // Batch resolve Values to IVs and convert to bigdata binding set. final IBindingSet[] globallyScopedBSAsList = toBindingSet(resolved.bindingSet) ; // Convert the query (generates an optimized AST as a side-effect). AST2BOpUtility.convert(context, globallyScopedBSAsList); // The optimized AST. final QueryRoot optimizedQuery = astContainer.getOptimizedAST(); return optimizedQuery; }
Example #11
Source File: BigdataSail.java From database with GNU General Public License v2.0 | 5 votes |
/** * Bigdata now uses an internal query model which differs significantly * from the Sesame query model. Support is no longer provided for * {@link TupleExpr} evaluation. SPARQL queries must be prepared and * evaluated using a {@link BigdataSailRepositoryConnection}. * * @throws SailException * <em>always</em>. */ public CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluate( final TupleExpr tupleExpr, // final Dataset dataset,// final BindingSet bindings,// final boolean includeInferred// ) throws SailException { throw new SailException(ERR_OPENRDF_QUERY_MODEL); }
Example #12
Source File: ASTEvalHelper.java From database with GNU General Public License v2.0 | 4 votes |
/** * Evaluate a boolean query. * * @param store * The {@link AbstractTripleStore} having the data. * @param astContainer * The {@link ASTContainer}. * @param globallyScopedBS * The initial solution to kick things off. * * @return <code>true</code> if there are any solutions to the query. * * @throws QueryEvaluationException */ static public boolean evaluateBooleanQuery( final AbstractTripleStore store, final ASTContainer astContainer, final BindingSet globallyScopedBS, final Dataset dataset) throws QueryEvaluationException { final AST2BOpContext context = new AST2BOpContext(astContainer, store); final DeferredResolutionResult resolved; try { // @see https://jira.blazegraph.com/browse/BLZG-1176 resolved = ASTDeferredIVResolution.resolveQuery( store, astContainer, globallyScopedBS, dataset, context); } catch (MalformedQueryException e) { throw new QueryEvaluationException(e.getMessage(), e); } if (resolved.dataset != null) { astContainer.getOriginalAST().setDataset( new DatasetNode(resolved.dataset, false/* update */)); } // Clear the optimized AST. astContainer.clearOptimizedAST(); // Batch resolve Values to IVs and convert to bigdata binding set. final IBindingSet[] globallyScopedBSAsList = toBindingSet(resolved.bindingSet) ; // Convert the query (generates an optimized AST as a side-effect). AST2BOpUtility.convert(context, globallyScopedBSAsList); // The optimized AST. final QueryRoot optimizedQuery = astContainer.getOptimizedAST(); // Note: We do not need to materialize anything for ASK. final boolean materializeProjectionInQuery = context.materializeProjectionInQuery && !optimizedQuery.hasSlice(); CloseableIteration<BindingSet, QueryEvaluationException> itr = null; try { itr = ASTEvalHelper.evaluateQuery( astContainer, context, materializeProjectionInQuery, new IVariable[0]// required ); return itr.hasNext(); } finally { if (itr != null) { /** * Ensure query is terminated. An interrupt during hasNext() * should cause the query to terminate through itr.close(). * * @see <a * href="https://sourceforge.net/apps/trac/bigdata/ticket/707"> * BlockingBuffer.close() does not unblock threads </a> */ itr.close(); } } }
Example #13
Source File: RangeEvaluationStrategy.java From cumulusrdf with Apache License 2.0 | 4 votes |
public RangeEvaluationStrategy(TripleSource tripleSource, Dataset dataset) { super(tripleSource, dataset); }
Example #14
Source File: ASTEvalHelper.java From database with GNU General Public License v2.0 | 4 votes |
/** * Evaluate a SELECT query. * * @param store * The {@link AbstractTripleStore} having the data. * @param queryPlan * The {@link ASTContainer}. * @param globallyScopedBS * The initial solution to kick things off. * * @return An object from which the solutions may be drained. * * @throws QueryEvaluationException */ static public TupleQueryResult evaluateTupleQuery( final AbstractTripleStore store, final ASTContainer astContainer, final QueryBindingSet globallyScopedBS, final Dataset dataset) throws QueryEvaluationException { final AST2BOpContext context = new AST2BOpContext(astContainer, store); final QueryRoot optimizedQuery = optimizeQuery(astContainer, context, globallyScopedBS, dataset); // Get the projection for the query. final IVariable<?>[] projected = astContainer.getOptimizedAST() .getProjection().getProjectionVars(); final List<String> projectedSet = new LinkedList<String>(); for (IVariable<?> var : projected) projectedSet.add(var.getName()); final boolean materializeProjectionInQuery = context.materializeProjectionInQuery && !optimizedQuery.hasSlice(); final CloseableIteration<BindingSet, QueryEvaluationException> itr = ASTEvalHelper .evaluateQuery(astContainer, context, materializeProjectionInQuery, projected); TupleQueryResult r = null; try { r = new TupleQueryResultImpl(projectedSet, itr); return r; } finally { if (r == null) { /** * Ensure query is terminated if assignment to fails. E.g., if * interrupted during the ctor. * * @see <a * href="https://sourceforge.net/apps/trac/bigdata/ticket/707"> * BlockingBuffer.close() does not unblock threads </a> */ itr.close(); } } }
Example #15
Source File: ASTEvalHelper.java From database with GNU General Public License v2.0 | 4 votes |
/** * Evaluate a SPARQL UPDATE request (core method). * * @param astContainer * The query model. * @param ctx * The evaluation context. * @param dataset * A dataset which will override the data set declaration for * each {@link DeleteInsertGraph} operation in the update * sequence (optional). * @param includeInferred * if inferences should be included in various operations. * * @return The timestamp of the commit point. * * @throws SailException * * TODO timeout for update? */ static public long executeUpdate(// final BigdataSailRepositoryConnection conn,// final ASTContainer astContainer,// final Dataset dataset, final boolean includeInferred,// final QueryBindingSet bs ) throws UpdateExecutionException { if(conn == null) throw new IllegalArgumentException(); if(astContainer == null) throw new IllegalArgumentException(); final DeferredResolutionResult resolved; try { // @see https://jira.blazegraph.com/browse/BLZG-1176 resolved = ASTDeferredIVResolution.resolveUpdate(conn.getTripleStore(), astContainer, bs, dataset); } catch (MalformedQueryException e) { throw new UpdateExecutionException(e.getMessage(), e); } try { if (dataset != null) { /* * Apply the optional data set override. */ applyDataSet(conn.getTripleStore(), astContainer, resolved.dataset); } final AST2BOpUpdateContext ctx = new AST2BOpUpdateContext( astContainer, conn); doSparqlLogging(ctx); // Propagate attribute. ctx.setIncludeInferred(includeInferred); // Batch resolve Values to IVs and convert to bigdata binding set. final IBindingSet[] bindingSets = toBindingSet(resolved.bindingSet) ; // Propagate bindings ctx.setQueryBindingSet(bs); ctx.setBindings(bindingSets); ctx.setDataset(dataset); /* * Convert the query (generates an optimized AST as a side-effect). */ AST2BOpUpdate.optimizeUpdateRoot(ctx); /* * Generate and execute physical plans for the update operations. */ AST2BOpUpdate.convertUpdate(ctx); return ctx.getCommitTime(); } catch (Exception ex) { ex.printStackTrace(); throw new UpdateExecutionException(ex); } }
Example #16
Source File: ASTDeferredIVResolution.java From database with GNU General Public License v2.0 | 4 votes |
public DeferredResolutionResult(final BindingSet bindingSet, final Dataset dataset) { this.bindingSet = bindingSet; this.dataset = dataset; }
Example #17
Source File: ASTDeferredIVResolution.java From database with GNU General Public License v2.0 | 4 votes |
/** * Do deferred resolution of IVs, which were left unresolved while preparing the update * @param store - triple store, which will be used for values resolution * @param ast - AST model of the update, which should be resolved * @param bs - binding set, which should be resolved * @param dataset * @return * @throws MalformedQueryException */ public static DeferredResolutionResult resolveUpdate(final AbstractTripleStore store, final ASTContainer ast, final BindingSet bs, final Dataset dataset) throws MalformedQueryException { final ASTDeferredIVResolution termsResolver = new ASTDeferredIVResolution(store); // process provided binding set BindingSet resolvedBindingSet = termsResolver.handleBindingSet(store, bs); // process provided dataset final Dataset resolvedDataset = termsResolver.handleDataset(store, dataset); /* * Prevent running IV resolution more than once. * Property RESOLVED is set after resolution completed, * so subsequent repetitive calls to update execute * (for example with different bindings) would not result * in running resolution again. */ if (Boolean.TRUE.equals(ast.getProperty(Annotations.RESOLVED))) { /* * Resolve binding set or dataset if there are any values to be processed */ if (!termsResolver.deferred.isEmpty()) { termsResolver.resolveIVs(store); } return new DeferredResolutionResult(resolvedBindingSet, resolvedDataset); } final long beginNanos = System.nanoTime(); final UpdateRoot qc = (UpdateRoot)ast.getProperty(Annotations.ORIGINAL_AST); /* * Handle dataset declaration. It only appears for DELETE/INSERT * (aka ASTModify). It is attached to each DeleteInsertNode for * which it is given. */ final Map<IDataSetNode, List<ASTDatasetClause>> dcLists = new LinkedHashMap<>(); for (final Update update: qc.getChildren()) { if (update instanceof IDataSetNode) { final List<ASTDatasetClause> dcList = new ArrayList(); dcList.addAll(update.getDatasetClauses()); dcLists.put((IDataSetNode)update, dcList); } } termsResolver.resolve(store, qc, dcLists, bs); if (ast.getOriginalUpdateAST().getPrefixDecls()!=null && !ast.getOriginalUpdateAST().getPrefixDecls().isEmpty()) { qc.setPrefixDecls(ast.getOriginalUpdateAST().getPrefixDecls()); } ast.setOriginalUpdateAST(qc); ast.setResolveValuesTime(System.nanoTime() - beginNanos); ast.setProperty(Annotations.RESOLVED, Boolean.TRUE); return new DeferredResolutionResult(resolvedBindingSet, resolvedDataset); }
Example #18
Source File: ASTDeferredIVResolution.java From database with GNU General Public License v2.0 | 4 votes |
/** * Do deferred resolution of IVs, which were left unresolved after execution of each Update in UpdateRoot * @param store - triple store, which will be used for values resolution * @param ast - AST model of the update, which should be resolved * @param bs - binding set, which should be resolved * @param dataset * @return * @throws MalformedQueryException */ public static DeferredResolutionResult resolveUpdate(final AbstractTripleStore store, final Update update, final BindingSet bs, final Dataset dataset) throws MalformedQueryException { final ASTDeferredIVResolution termsResolver = new ASTDeferredIVResolution(store); // process provided binding set BindingSet resolvedBindingSet = termsResolver.handleBindingSet(store, bs); // process provided dataset final Dataset resolvedDataset = termsResolver.handleDataset(store, dataset); // final long beginNanos = System.nanoTime(); termsResolver.resolve(store, update, null/*datasetClauseLists*/, bs); // ast.setResolveValuesTime(System.nanoTime() - beginNanos); return new DeferredResolutionResult(resolvedBindingSet, resolvedDataset); }
Example #19
Source File: ObjectQuery.java From anno4j with Apache License 2.0 | 4 votes |
public Dataset getDataset() { return query.getDataset(); }
Example #20
Source File: AST2BOpUpdateContext.java From database with GNU General Public License v2.0 | 4 votes |
public Dataset getDataset() { return dataset; }
Example #21
Source File: AST2BOpUpdateContext.java From database with GNU General Public License v2.0 | 4 votes |
public void setDataset(Dataset dataset) { this.dataset = dataset; }
Example #22
Source File: BigdataParsedQuery.java From database with GNU General Public License v2.0 | 4 votes |
/** * Unsupported operation. */ public BigdataParsedQuery(TupleExpr tupleExpr, Dataset dataset) { throw new UnsupportedOperationException(); }
Example #23
Source File: BigdataParsedUpdate.java From database with GNU General Public License v2.0 | 4 votes |
/** * Unsupported operation. */ public BigdataParsedUpdate(TupleExpr tupleExpr, Dataset dataset) { throw new UnsupportedOperationException(); }
Example #24
Source File: SPARQLQueryTest.java From database with GNU General Public License v2.0 | 4 votes |
SPARQLQueryTest createSPARQLQueryTest(String testURI, String name, String queryFileURL, String resultFileURL, Dataset dataSet, boolean laxCardinality, boolean checkOrder);
Example #25
Source File: SPARQLQueryTest.java From database with GNU General Public License v2.0 | 4 votes |
SPARQLQueryTest createSPARQLQueryTest(String testURI, String name, String queryFileURL, String resultFileURL, Dataset dataSet, boolean laxCardinality);
Example #26
Source File: SPARQLQueryTest.java From database with GNU General Public License v2.0 | 4 votes |
public SPARQLQueryTest(String testURI, String name, String queryFileURL, String resultFileURL, Dataset dataSet, boolean laxCardinality) { this(testURI, name, queryFileURL, resultFileURL, dataSet, laxCardinality, false); }
Example #27
Source File: ObjectQuery.java From anno4j with Apache License 2.0 | 4 votes |
public void setDataset(Dataset arg0) { query.setDataset(arg0); }
Example #28
Source File: DatasetNode.java From database with GNU General Public License v2.0 | 3 votes |
public DatasetNode(final Dataset dataset, final boolean update) { this(DataSetSummary.toInternalValues(dataset.getDefaultGraphs()), DataSetSummary.toInternalValues(dataset.getNamedGraphs()), update); }
Example #29
Source File: BigdataSail.java From database with GNU General Public License v2.0 | 3 votes |
/** * Evaluate a bigdata query model. * * @param queryRoot * The query model. * @param dataset * The data set (optional). * @param bindings * The initial bindings. * @param includeInferred * <code>true</code> iff inferences will be considered when * reading on access paths. * * @return The {@link CloseableIteration} from which the solutions may * be drained. * * @throws SailException * * @deprecated Consider removing this method from our public API. It is * no longer in any code path for the bigdata code base. * Embedded applications requiring high level evaluation * should use {@link BigdataSailRepositoryConnection}. It * does not call through here, but goes directly to the * {@link ASTEvalHelper}. */ public CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluate( final QueryRoot queryRoot, // final Dataset dataset,// final BindingSet bindings,// final boolean includeInferred// ) throws SailException { final ASTContainer astContainer = new ASTContainer(queryRoot); final QueryRoot originalQuery = astContainer.getOriginalAST(); originalQuery.setIncludeInferred(includeInferred); try { flushStatementBuffers(true/* assertions */, true/* retractions */); return ASTEvalHelper.evaluateTupleQuery(getTripleStore(), astContainer, new QueryBindingSet(bindings), dataset); } catch (QueryEvaluationException e) { throw new SailException(e); } }
Example #30
Source File: BigdataFederationSparqlTest.java From database with GNU General Public License v2.0 | 3 votes |
public BigdataFederationSparqlTest(String URI, String name, String query, String results, Dataset dataSet, boolean laxCardinality, boolean checkOrder) { super(URI, name, query, results, dataSet, laxCardinality, checkOrder); }