org.apache.jena.query.ARQ Java Examples
The following examples show how to use
org.apache.jena.query.ARQ.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ConverterRDF2RDFStar.java From RDFstarTools with Apache License 2.0 | 6 votes |
public ConverterRDF2RDFStar(String[] argv) { super(argv); modVersion.addClass(Jena.class); modVersion.addClass(ARQ.class); modVersion.addClass(RIOT.class); super.addModule(modTime); registerArgumentToBeIgnored("sink"); registerArgumentToBeIgnored("nocheck"); registerArgumentToBeIgnored("validate"); registerArgumentToBeIgnored("rdfs"); super.addModule(modLangParse); unregisterArgumentToBeIgnored("sink"); registerArgumentToBeIgnored("nocheck"); unregisterArgumentToBeIgnored("validate"); unregisterArgumentToBeIgnored("rdfs"); super.getUsage().startCategory("Output options"); super.add( argOutputFile, "--out --outfile", "Output file (optional, printing to stdout if omitted)" ); }
Example #2
Source File: JenaUtil.java From shacl with Apache License 2.0 | 6 votes |
private static Node invokeFunction(Resource function, ExprList args, Dataset dataset) { if (dataset == null) { dataset = ARQFactory.get().getDataset(ModelFactory.createDefaultModel()); } E_Function expr = new E_Function(function.getURI(), args); DatasetGraph dsg = dataset.asDatasetGraph(); Context cxt = ARQ.getContext().copy(); cxt.set(ARQConstants.sysCurrentTime, NodeFactoryExtra.nowAsDateTime()); FunctionEnv env = new ExecutionContext(cxt, dsg.getDefaultGraph(), dsg, null); try { NodeValue r = expr.eval(BindingRoot.create(), env); if(r != null) { return r.asNode(); } } catch(ExprEvalException ex) { } return null; }
Example #3
Source File: ExecuteSPARQLStar.java From RDFstarTools with Apache License 2.0 | 5 votes |
@Override protected void exec() { StageBuilder.setGenerator(ARQ.getContext(), new StageGeneratorSPARQLStar()); super.exec(); }
Example #4
Source File: ConverterSPARQLStar2SPARQL.java From RDFstarTools with Apache License 2.0 | 5 votes |
public ConverterSPARQLStar2SPARQL(String[] argv) { super(argv); modVersion.addClass(Jena.class); modVersion.addClass(ARQ.class); modVersion.addClass(RIOT.class); super.addModule(modQuery); super.addModule(modOutput); }
Example #5
Source File: RdflintParserTurtle.java From rdflint with MIT License | 5 votes |
private String expandPrefixedName(String prefix, String localPart) { String expansion = getPrefixMap().expand(prefix, localPart); if (expansion == null) { if (ARQ.isTrue(ARQ.fixupUndefinedPrefixes)) { return RiotLib.fixupPrefixIRI(prefix, localPart); } } return expansion; }
Example #6
Source File: ContextUtils.java From sparql-generate with Apache License 2.0 | 5 votes |
private Builder() { this.context = new Context(ARQ.getContext()); this.commons = new Commons(); // update functionregistry FunctionRegistry registry = (FunctionRegistry) context.get(ARQConstants.registryFunctions); SPARQLExtFunctionRegistry newRegistry = new SPARQLExtFunctionRegistry(registry, context); context.set(ARQConstants.registryFunctions, newRegistry); // update iteratorregistry IteratorFunctionRegistry iteratorRegistry = (IteratorFunctionRegistry) context .get(SPARQLExt.REGISTRY_ITERATORS); IteratorFunctionRegistry newIteratorRegistry = new IteratorFunctionRegistry(iteratorRegistry, context); context.set(SPARQLExt.REGISTRY_ITERATORS, newIteratorRegistry); // default streammanager context.set(SysRIOT.sysStreamManager, SPARQLExtStreamManager.makeStreamManager()); // set variable parts context.set(DATASET, DatasetFactory.create()); // default prefix manager context.set(PREFIX_MANAGER, PrefixMapping.Standard); // default number of results and blank nodes context.set(SIZE, 0); // context.set(LIST_NODES, new HashMap<>()); context.set(COMMONS, commons); }
Example #7
Source File: IteratorFunctionRegistry.java From sparql-generate with Apache License 2.0 | 5 votes |
public synchronized static IteratorFunctionRegistry get() { // Intialize if there is no registry already set IteratorFunctionRegistry reg = get(ARQ.getContext()); if (reg == null) { reg = standardRegistry(); set(ARQ.getContext(), reg); } return reg; }
Example #8
Source File: LinkedDataFragmentServlet.java From Server.Java with MIT License | 5 votes |
/** * * @param servletConfig * @throws ServletException */ @Override public void init(ServletConfig servletConfig) throws ServletException { // Ensure ARQ has been initialized (needed for TDB requests) ARQ.init(); try { // load the configuration File configFile = getConfigFile(servletConfig); config = new ConfigReader(new FileReader(configFile)); // register data source types for ( Entry<String,IDataSourceType> typeEntry : config.getDataSourceTypes().entrySet() ) { DataSourceTypesRegistry.register( typeEntry.getKey(), typeEntry.getValue() ); } // register data sources for (Entry<String, JsonObject> dataSource : config.getDataSources().entrySet()) { dataSources.put(dataSource.getKey(), DataSourceFactory.create(dataSource.getValue())); } // register content types MIMEParse.register("text/html"); MIMEParse.register(Lang.RDFXML.getHeaderString()); MIMEParse.register(Lang.NTRIPLES.getHeaderString()); MIMEParse.register(Lang.JSONLD.getHeaderString()); MIMEParse.register(Lang.TTL.getHeaderString()); } catch (Exception e) { throw new ServletException(e); } }
Example #9
Source File: SPARQLEndpointExecution.java From hypergraphql with Apache License 2.0 | 4 votes |
@Override public SPARQLExecutionResult call() { Map<String, Set<String>> resultSet = new HashMap<>(); markers.forEach(marker -> resultSet.put(marker, new HashSet<>())); Model unionModel = ModelFactory.createDefaultModel(); SPARQLServiceConverter converter = new SPARQLServiceConverter(schema); String sparqlQuery = converter.getSelectQuery(query, inputSubset, rootType); logger.debug(sparqlQuery); CredentialsProvider credsProvider = new BasicCredentialsProvider(); Credentials credentials = new UsernamePasswordCredentials(this.sparqlEndpointService.getUser(), this.sparqlEndpointService.getPassword()); credsProvider.setCredentials(AuthScope.ANY, credentials); HttpClient httpclient = HttpClients.custom() .setDefaultCredentialsProvider(credsProvider) .build(); HttpOp.setDefaultHttpClient(httpclient); ARQ.init(); Query jenaQuery = QueryFactory.create(sparqlQuery); QueryEngineHTTP qEngine = QueryExecutionFactory.createServiceRequest(this.sparqlEndpointService.getUrl(), jenaQuery); qEngine.setClient(httpclient); //qEngine.setSelectContentType(ResultsFormat.FMT_RS_XML.getSymbol()); ResultSet results = qEngine.execSelect(); results.forEachRemaining(solution -> { markers.stream().filter(solution::contains).forEach(marker -> resultSet.get(marker).add(solution.get(marker).asResource().getURI())); unionModel.add(this.sparqlEndpointService.getModelFromResults(query, solution, schema)); }); SPARQLExecutionResult sparqlExecutionResult = new SPARQLExecutionResult(resultSet, unionModel); logger.debug("Result: {}", sparqlExecutionResult); return sparqlExecutionResult; }
Example #10
Source File: QueryIterTripleStarPatternTest.java From RDFstarTools with Apache License 2.0 | 4 votes |
protected ExecutionContext createTestExecCxt() { final DatasetGraph dsg = DatasetGraphFactory.create( createTestGraph() ); final Context context = ARQ.getContext(); return new ExecutionContext( context, dsg.getDefaultGraph(), dsg, QC.getFactory(context) ); }
Example #11
Source File: IteratorFunctionRegistry.java From sparql-generate with Apache License 2.0 | 4 votes |
public synchronized static IteratorFunctionRegistry standardRegistry() { IteratorFunctionRegistry reg = new IteratorFunctionRegistry(ARQ.getContext()); return reg; }
Example #12
Source File: IteratorFunctionRegistry.java From sparql-generate with Apache License 2.0 | 4 votes |
public IteratorFunctionRegistry() { this(ARQ.getContext()); }
Example #13
Source File: FunctionTestCaseType.java From shacl with Apache License 2.0 | 4 votes |
@Override public void run(Model results) { Resource testCase = getResource(); FunctionRegistry oldFR = FunctionRegistry.get(); CurrentThreadFunctionRegistry threadFR = new CurrentThreadFunctionRegistry(oldFR); FunctionRegistry.set(ARQ.getContext(), threadFR); CurrentThreadFunctions old = CurrentThreadFunctionRegistry.register(testCase.getModel()); try { for(TestCaseContextFactory contextFactory : contextFactories) { TestCaseContext context = contextFactory.createContext(); String expression = JenaUtil.getStringProperty(testCase, DASH.expression); Statement expectedResultS = testCase.getProperty(DASH.expectedResult); String queryString = "SELECT (" + expression + " AS ?result) WHERE {}"; Query query = ARQFactory.get().createQuery(testCase.getModel(), queryString); context.setUpTestContext(); try(QueryExecution qexec = ARQFactory.get().createQueryExecution(query, testCase.getModel())) { ResultSet rs = qexec.execSelect(); if(!rs.hasNext()) { if(expectedResultS != null) { createFailure(results, "Expression returned no result, but expected: " + expectedResultS.getObject(), context); return; } } else { RDFNode actual = rs.next().get("result"); if(expectedResultS == null) { if(actual != null) { createFailure(results, "Expression returned a result, but none expected: " + actual, context); return; } } else if(testCase.hasProperty(DASH.expectedResultIsTTL, JenaDatatypes.TRUE)) { Graph expectedGraph = parseGraph(expectedResultS.getObject()); Graph actualGraph = parseGraph(actual); if(!expectedGraph.isIsomorphicWith(actualGraph)) { createFailure(results, "Mismatching result graphs. Expected: " + expectedResultS.getObject() + ". Found: " + actual, context); return; } } else if(!expectedResultS.getObject().equals(actual)) { createFailure(results, "Mismatching result. Expected: " + expectedResultS.getObject() + ". Found: " + actual, context); return; } } } finally { context.tearDownTestContext(); } } } finally { CurrentThreadFunctionRegistry.unregister(old); FunctionRegistry.set(ARQ.getContext(), oldFR); } createResult(results, DASH.SuccessTestCaseResult); }
Example #14
Source File: JSTestCaseType.java From shacl with Apache License 2.0 | 4 votes |
@Override public void run(Model results) { Resource testCase = getResource(); FunctionRegistry oldFR = FunctionRegistry.get(); CurrentThreadFunctionRegistry threadFR = new CurrentThreadFunctionRegistry(oldFR); FunctionRegistry.set(ARQ.getContext(), threadFR); CurrentThreadFunctions old = CurrentThreadFunctionRegistry.register(testCase.getModel()); Statement expectedResultS = testCase.getProperty(DASH.expectedResult); String queryString = "SELECT (<" + getResource() + ">() AS ?result) WHERE {}"; Query query = ARQFactory.get().createQuery(testCase.getModel(), queryString); try(QueryExecution qexec = ARQFactory.get().createQueryExecution(query, testCase.getModel())) { ResultSet rs = qexec.execSelect(); if(!rs.hasNext()) { if(expectedResultS != null) { createFailure(results, "Expression returned no result, but expected: " + expectedResultS.getObject()); return; } } else { RDFNode result = rs.next().get("result"); if(expectedResultS == null) { if(result != null) { createFailure(results, "Expression returned a result, but none expected: " + result); return; } } else if(!expectedResultS.getObject().equals(result)) { createFailure(results, "Mismatching result. Expected: " + expectedResultS.getObject() + ". Found: " + result); return; } } } finally { CurrentThreadFunctionRegistry.unregister(old); FunctionRegistry.set(ARQ.getContext(), oldFR); } createResult(results, DASH.SuccessTestCaseResult); }
Example #15
Source File: RdfWriterImpl.java From Server.Java with MIT License | 4 votes |
public RdfWriterImpl(Map<String, String> prefixes, HashMap<String, IDataSource> datasources, String mimeType) { super(prefixes, datasources); this.contentType = RDFLanguages.contentTypeToLang(mimeType); ARQ.init(); }