org.openrdf.rio.RDFHandlerException Java Examples
The following examples show how to use
org.openrdf.rio.RDFHandlerException.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ChunkedCommitHandler.java From neo4j-sparql-extension with GNU General Public License v3.0 | 6 votes |
/** * Handles a statement. * * The statements will be added up until chunk size is reached. * After a chunk of statements is added the transaction will be committed * and new transaction will be started. * @param stmnt * @throws RDFHandlerException */ @Override public void handleStatement(Statement stmnt) throws RDFHandlerException { try { // check if triple should be added to a specific graph if (dctx != null) { conn.add(stmnt, dctx); } else { conn.add(stmnt); } // check if chunk size is reached and transaction should be // committed count++; if (count >= size) { count = 0; conn.commit(); conn.begin(); } } catch (RepositoryException ex) { throw new RDFHandlerException(ex); } }
Example #2
Source File: AbstractDataAndSPARQLTestCase.java From database with GNU General Public License v2.0 | 6 votes |
@Override public void handleStatement(final Statement stmt) throws RDFHandlerException { final Resource s = stmt.getSubject(); final URI p = stmt.getPredicate(); final Value o = stmt.getObject(); final Resource c = stmt.getContext() == null ? this.context : stmt.getContext(); // if (log.isDebugEnabled()) // log.debug("<" + s + "," + p + "," + o + "," + c + ">"); buffer.add(s, p, o, c, StatementEnum.Explicit); n++; }
Example #3
Source File: GraphStore.java From neo4j-sparql-extension with GNU General Public License v3.0 | 6 votes |
/** * Helper method for handleAdd. */ private void addToGraphstore( RepositoryConnection conn, InputStream in, String base, RDFFormat format, Resource dctx, boolean chunked) throws IOException, RDFParseException, RDFHandlerException, RepositoryException { if (chunked) { RDFParser parser = getRDFParser(format); parser.setRDFHandler( new ChunkedCommitHandler(conn, chunksize, dctx)); parser.parse(in, base); } else { if (dctx != null) { conn.add(in, base, format, dctx); } else { conn.add(in, base, format); } } }
Example #4
Source File: TestStandaloneRDRParsers.java From database with GNU General Public License v2.0 | 6 votes |
private void testStandaloneParser(RDFParser parser, String resourceName) throws IOException, RDFParseException, RDFHandlerException { bNodeFound = false; parser.setRDFHandler(new RDFHandlerBase(){ @Override public void handleStatement(Statement st) throws RDFHandlerException { if (st.getSubject() instanceof BNode) { bNodeFound = true; } super.handleStatement(st); } }); try (InputStream is = getClass().getClassLoader().getResourceAsStream(resourceName)) { parser.parse(is, ""); } assertTrue(bNodeFound); }
Example #5
Source File: SailGraphLoader.java From database with GNU General Public License v2.0 | 6 votes |
@Override protected void addStatement(final Statement stmt, final Resource[] c) throws RDFHandlerException { try { cxn.addStatement(// stmt.getSubject(), // stmt.getPredicate(), // stmt.getObject(), // c); if (c == null || c.length == 0) ntriples++; else ntriples += c.length; } catch (SailException e) { throw new RDFHandlerException(e); } }
Example #6
Source File: SPARQLGraphStreamingOutput.java From neo4j-sparql-extension with GNU General Public License v3.0 | 6 votes |
/** * Called by JAX-RS upon building a response. * * @param out the {@link OutputStream} to write the triples to * @throws IOException if there was an error during communication * @throws WebApplicationException if there was an error while serialising */ @Override public void write(OutputStream out) throws IOException, WebApplicationException { try { RDFWriter writer = factory.getWriter(out); // evaluate query and stream result query.evaluate(writer); conn.close(); } catch (RepositoryException | QueryEvaluationException | RDFHandlerException ex) { // server error close(conn, ex); throw new WebApplicationException(ex); } }
Example #7
Source File: BigdataNTriplesParserTestCase.java From database with GNU General Public License v2.0 | 6 votes |
public void testNTriplesFile() throws Exception { RDFParser turtleParser = createRDFParser(); turtleParser.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE); turtleParser.setRDFHandler(new RDFHandlerBase() { public void handleStatement(Statement st) throws RDFHandlerException { if (log.isInfoEnabled()) log.info("Statement: " + st); } }); // Note: This is a local copy. InputStream in = BigdataNTriplesParser.class.getResourceAsStream(NTRIPLES_TEST_FILE); try { turtleParser.parse(in, NTRIPLES_TEST_URL); } catch (RDFParseException e) { fail("Failed to parse N-Triples test document: " + e.getMessage()); } finally { in.close(); } }
Example #8
Source File: BigdataNTriplesParserTestCase.java From database with GNU General Public License v2.0 | 6 votes |
public void testNTriplesFileWithSIDS() throws Exception { RDFParser turtleParser = createRDFParser(); turtleParser.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE); turtleParser.setRDFHandler(new RDFHandlerBase() { public void handleStatement(final Statement st) throws RDFHandlerException { if (log.isInfoEnabled()) log.info("Statement: " + st); } }); final String fileUrl = BigdataNTriplesParser.class.getResource( NTRIPLES_TEST_FILE_WITH_SIDS).toExternalForm(); // Note: This is a local copy. InputStream in = BigdataNTriplesParser.class .getResourceAsStream(NTRIPLES_TEST_FILE_WITH_SIDS); try { turtleParser.parse(in, fileUrl); } catch (RDFParseException e) { fail("Failed to parse N-Triples test document: " + e.getMessage(), e); } finally { in.close(); } }
Example #9
Source File: BigdataNTriplesParserTestCase.java From database with GNU General Public License v2.0 | 6 votes |
public void testNTriplesFileWithReification() throws Exception { RDFParser turtleParser = createRDFParser(); turtleParser.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE); turtleParser.setRDFHandler(new RDFHandlerBase() { public void handleStatement(final Statement st) throws RDFHandlerException { if (log.isInfoEnabled()) log.info("Statement: " + st); } }); final String fileUrl = BigdataNTriplesParser.class.getResource( NTRIPLES_TEST_FILE_WITH_REIFICATION).toExternalForm(); // Note: This is a local copy. InputStream in = BigdataNTriplesParser.class .getResourceAsStream(NTRIPLES_TEST_FILE_WITH_REIFICATION); try { turtleParser.parse(in, fileUrl); } catch (RDFParseException e) { fail("Failed to parse N-Triples test document: " + e.getMessage(), e); } finally { in.close(); } }
Example #10
Source File: RepositoryConnectionTest.java From database with GNU General Public License v2.0 | 6 votes |
@Test public void testAddRemove() throws Exception { URI FOAF_PERSON = vf.createURI("http://xmlns.com/foaf/0.1/Person"); final Statement stmt = vf.createStatement(bob, name, nameBob); testCon.add(bob, RDF.TYPE, FOAF_PERSON); testCon.begin(); testCon.add(stmt); testCon.remove(stmt); testCon.commit(); testCon.exportStatements(null, null, null, false, new RDFHandlerBase() { @Override public void handleStatement(Statement st) throws RDFHandlerException { assertThat(st, is(not(equalTo(stmt)))); } }); }
Example #11
Source File: RML_Converter.java From GeoTriples with Apache License 2.0 | 6 votes |
/** * Convert a Spark Partition into triples. * * @param partition_iter: an iterator of rows. The rows must follow the predefined headers. * @return a String of triples */ public Iterator<String> convertPartition(Iterator<Row> partition_iter){ List<String> partitionTriples = new ArrayList<>(); partition_iter.forEachRemaining(row -> { try { for (int i = 0; i < mappingList.size(); i++) { rdfWriter.handleStatementIter(performer.perform(row, mappingList.get(i), tm_predicates.get(i), listPOM.get(i), processor, i)); } partitionTriples.add(rdfWriter.getString()); } catch (RDFHandlerException e) { System.out.println("ERROR while Handling Statement"); e.printStackTrace(); System.exit(1); } }); return partitionTriples.iterator(); }
Example #12
Source File: EntitySerializerTest.java From attic-polygene-java with Apache License 2.0 | 6 votes |
@Test public void testEntitySerializer() throws RDFHandlerException { EntityReference entityReference = EntityReference.parseEntityReference( "test2" ); Usecase usecase = UsecaseBuilder.newUsecase( "Test" ); Instant currentTime = SystemTime.now(); EntityStoreUnitOfWork unitOfWork = entityStore.newUnitOfWork( module, usecase, currentTime ); EntityState entityState = unitOfWork.entityStateOf( module, entityReference ); Iterable<Statement> graph = serializer.serialize( entityState ); String[] prefixes = new String[]{ "rdf", "dc", " vc" }; String[] namespaces = new String[]{ Rdfs.RDF, DcRdf.NAMESPACE, "http://www.w3.org/2001/vcard-rdf/3.0#" }; new RdfXmlSerializer().serialize( graph, new PrintWriter( System.out ), prefixes, namespaces ); }
Example #13
Source File: FileSesameDataset.java From GeoTriples with Apache License 2.0 | 6 votes |
/** * Insert Triple/Statement into graph * * @param s subject uriref * @param p predicate uriref * @param o value object (URIref or Literal) * @param contexts varArgs context objects (use default graph if null) */ @Override public void add(Resource s, URI p, Value o, Resource... contexts) { if (log.isDebugEnabled()) { log.debug("[FileSesameDataSet:add] Add triple (" + s.stringValue() + ", " + p.stringValue() + ", " + o.stringValue() + ")."); } Statement st = new StatementImpl(s, p, o); try { writer.handleStatement(st); size++; } catch (RDFHandlerException ex) { log.error(o.toString()); } }
Example #14
Source File: FilterableTripleHandler.java From anthelion with Apache License 2.0 | 6 votes |
public void handleStatement(Resource subject, URI predicate, Value object, Resource... contexts) throws RDFHandlerException { if (!started) { throw new IllegalStateException("Cannot handle statement without start parsing first."); } try { printResource(subject); printSpace(); printURI(predicate); printSpace(); printObject(object); printSpace(); for (int i = 0; i < contexts.length; i++) { printResource(contexts[i]); printSpace(); } printCloseStatement(); } catch (IOException ioe) { throw new RDFHandlerException("An error occurred while printing statement.", ioe); } }
Example #15
Source File: NTriplesAlternative.java From GeoTriples with Apache License 2.0 | 6 votes |
public void handleStatementIter(Collection<Statement> statements) throws RDFHandlerException { if (!writingStarted) throw new RuntimeException("Document writing has not yet been started"); try { // WARNING if you want to produce blank nodes replace all the .append("<").append(X).append(">"); // with NTriplesUtil.append(st.getSubject(), sb); for(Statement st: statements){ sb .append("<") .append(st.getSubject().toString()) .append("> <") .append(st.getPredicate().toString()) .append("> "); NTriplesUtilNoEscape.append(st.getObject(), sb); sb.append(" .\n"); } } catch (Exception e) { throw new RDFHandlerException(e); } }
Example #16
Source File: TestTicket276.java From database with GNU General Public License v2.0 | 6 votes |
private void addData(final RepositoryConnection conn) throws IOException, RDFParseException, RepositoryException, RDFHandlerException { final RDFParser rdfParser = Rio.createParser(RDFFormat.NTRIPLES, conn.getValueFactory()); rdfParser.setVerifyData(true); rdfParser.setStopAtFirstError(true); rdfParser.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE); rdfParser.setRDFHandler(new RDFHandlerBase() { @Override public void handleStatement(Statement st) throws RDFHandlerException { try { conn.add(st); } catch (OpenRDFException e) { throw new RDFHandlerException(e); } } }); rdfParser.parse(getClass().getResourceAsStream("TestTicket276.n3"), ""); }
Example #17
Source File: TestNoExceptions.java From database with GNU General Public License v2.0 | 6 votes |
private void executeQuery(final SailRepository repo, final String query) throws RepositoryException, MalformedQueryException, QueryEvaluationException, RDFParseException, IOException, RDFHandlerException { try { repo.initialize(); final RepositoryConnection conn = repo.getConnection(); conn.setAutoCommit(false); try { final ValueFactory vf = conn.getValueFactory(); conn.commit(); TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); TupleQueryResult tqr = tq.evaluate(); tqr.close(); } finally { conn.close(); } } finally { repo.shutDown(); } }
Example #18
Source File: BigdataSPARQLResultsJSONWriterForConstruct.java From database with GNU General Public License v2.0 | 5 votes |
@Override public void endRDF() throws RDFHandlerException { try { writer.endQueryResult(); // writer.endDocument(); // } catch (IOException e) { // throw new RDFHandlerException(e); } catch (TupleQueryResultHandlerException e) { throw new RDFHandlerException(e); } }
Example #19
Source File: AbstractSerializer.java From attic-polygene-java with Apache License 2.0 | 5 votes |
@Override public void serialize( Iterable<Statement> graph, Writer out ) throws RDFHandlerException { String[] prefixes = { "polygene", "rdf", "rdfs" }; String[] namespaces = { PolygeneRdf.POLYGENE_MODEL, Rdfs.RDF, Rdfs.RDFS }; serialize( graph, out, prefixes, namespaces ); }
Example #20
Source File: BigdataSPARQLResultsJSONWriterForConstruct.java From database with GNU General Public License v2.0 | 5 votes |
@Override public void startRDF() throws RDFHandlerException { try { writer.startDocument(); writer.startHeader(); writer.startQueryResult(Arrays.asList(new String[] { "subject", "predicate", "object", "context" })); writer.endHeader(); } catch (QueryResultHandlerException e) { throw new RDFHandlerException(e); } }
Example #21
Source File: BigdataSPARQLResultsJSONParserForConstruct.java From database with GNU General Public License v2.0 | 5 votes |
@Override public void handleSolution(BindingSet bs) throws TupleQueryResultHandlerException { if (!bs.hasBinding("subject")) { throw new TupleQueryResultHandlerException("no subject: " + bs); } if (!bs.hasBinding("predicate")) { throw new TupleQueryResultHandlerException("no predicate: " + bs); } if (!bs.hasBinding("object")) { throw new TupleQueryResultHandlerException("no object: " + bs); } final Resource s = (Resource) bs.getValue("subject"); final URI p = (URI) bs.getValue("predicate"); final Value o = (Value) bs.getValue("object"); final Resource c = bs.hasBinding("context") ? (Resource) bs.getValue("context") : null; final Statement stmt = valueFactory.createStatement(s, p, o, c); try { getRDFHandler().handleStatement(stmt); } catch (RDFHandlerException e) { throw new TupleQueryResultHandlerException(e); } }
Example #22
Source File: DeleteServlet.java From database with GNU General Public License v2.0 | 5 votes |
@Override public void handleStatement(final Statement stmt) throws RDFHandlerException { final Resource[] c = (Resource[]) (stmt.getContext() == null ? defaultContext : new Resource[] { stmt.getContext() }); try { conn.removeStatements(// stmt.getSubject(), // stmt.getPredicate(), // stmt.getObject(), // c ); } catch (SailException e) { throw new RDFHandlerException(e); } if (c.length >= 2) { // removed from more than one context nmodified.addAndGet(c.length); } else { nmodified.incrementAndGet(); } }
Example #23
Source File: BigdataSailRemoteRepositoryConnection.java From database with GNU General Public License v2.0 | 5 votes |
@Override public void exportStatements(final Resource s, final URI p, final Value o, final boolean includeInferred, final RDFHandler handler, final Resource... c) throws RepositoryException, RDFHandlerException { try { final RemoteRepository remote = repo.getRemoteRepository(); final GraphQueryResult src = remote.getStatements(s, p, o, includeInferred, c); try { handler.startRDF(); while (src.hasNext()) { handler.handleStatement(src.next()); } handler.endRDF(); } finally { src.close(); } } catch (Exception ex) { throw new RepositoryException(ex); } }
Example #24
Source File: BigdataSailRemoteRepositoryConnectionTest.java From database with GNU General Public License v2.0 | 5 votes |
@Test public void testExport() throws RepositoryException, RDFHandlerException { final RDFHandler handler = new RDFHandlerBase(); con.exportStatements(s, p, o, includeInferred, handler, c); assertEquals(EncodeDecodeValue.encodeValue(s), remote.data.opts.getRequestParam("s")); assertEquals(EncodeDecodeValue.encodeValue(p), remote.data.opts.getRequestParam("p")); assertEquals(EncodeDecodeValue.encodeValue(o), remote.data.opts.getRequestParam("o")); assertEquals(EncodeDecodeValue.encodeValue(c), remote.data.opts.getRequestParam("c")); assertEquals(Boolean.toString(includeInferred), remote.data.opts.getRequestParam(RemoteRepositoryDecls.INCLUDE_INFERRED)); }
Example #25
Source File: RDFModelFormater.java From ldp4j with Apache License 2.0 | 5 votes |
protected String exportRepository(RepositoryConnection connection) throws RepositoryException, RDFHandlerException { StringWriter writer=new StringWriter(); RDFWriter rdfWriter=Rio.createWriter(getFormat(),writer); if(rdfWriter instanceof TurtleWriter) { rdfWriter=new RebasingTurtleWriter(writer); } connection.export(rdfWriter); return writer.toString(); }
Example #26
Source File: InsertServlet.java From database with GNU General Public License v2.0 | 5 votes |
@Override public void handleStatement(final Statement stmt) throws RDFHandlerException { final Resource[] c = (Resource[]) (stmt.getContext() == null ? defaultContext : new Resource[] { stmt.getContext() }); try { conn.addStatement(// stmt.getSubject(), // stmt.getPredicate(), // stmt.getObject(), // c ); } catch (SailException e) { throw new RDFHandlerException(e); } if (c.length >= 2) { // added to more than one context nmodified.addAndGet(c.length); } else { nmodified.incrementAndGet(); } }
Example #27
Source File: TestTicket355.java From database with GNU General Public License v2.0 | 5 votes |
private void executeQuery(final SailRepository repo) throws RepositoryException, MalformedQueryException, QueryEvaluationException, RDFParseException, IOException, RDFHandlerException { try { repo.initialize(); final RepositoryConnection conn = repo.getConnection(); conn.setAutoCommit(false); try { final ValueFactory vf = conn.getValueFactory(); conn.add(vf.createURI("os:subject"), vf.createURI("os:prop"), vf.createLiteral("value")); conn.commit(); String query = "SELECT ?subj WHERE { " + "?subj <os:prop> ?val . " + "FILTER(STR(?val) != ?arg)}"; TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); tq.setBinding("arg", vf.createLiteral("notValue")); TupleQueryResult tqr = tq.evaluate(); assertTrue(tqr.hasNext()); tqr.close(); } finally { conn.close(); } } finally { repo.shutDown(); } }
Example #28
Source File: TestTicket348.java From database with GNU General Public License v2.0 | 5 votes |
private void executeTest(final SailRepository repo) throws RepositoryException, MalformedQueryException, QueryEvaluationException, RDFParseException, RDFHandlerException, IOException { try { repo.initialize(); final RepositoryConnection conn = repo.getConnection(); try { conn.setAutoCommit(false); final ValueFactory vf = conn.getValueFactory(); final URI uri = vf.createURI("os:/elem/example"); // run a query which looks for a statement and then adds it if it is not found. addDuringQueryExec(conn, uri, RDF.TYPE, vf.createURI("os:class/Clazz")); // now try to export the statements. final RepositoryResult<Statement> stats = conn.getStatements(null, null, null, false); try { // materialize the newly added statement. stats.next(); } catch (RuntimeException e) { fail(e.getLocalizedMessage(), e); // With Bigdata this fails } finally { stats.close(); } conn.rollback(); // discard the result (or commit, but do something to avoid a logged warning from Sesame). } finally { conn.close(); } } finally { repo.shutDown(); } }
Example #29
Source File: HTMLWriter.java From cumulusrdf with Apache License 2.0 | 5 votes |
@Override public void startRDF() throws RDFHandlerException { try { _writer.write("<html><head><title>" + _title + "</title></head><body>"); _writer.write("<h1>" + _title + "</h1>"); } catch (IOException e) { throw new RDFHandlerException(e); } }
Example #30
Source File: TestTicket967.java From database with GNU General Public License v2.0 | 5 votes |
private void executeTest(final SailRepository repo) throws RepositoryException, MalformedQueryException, QueryEvaluationException, RDFParseException, RDFHandlerException, IOException { try { repo.initialize(); final RepositoryConnection conn = repo.getConnection(); try { conn.setAutoCommit(false); final ValueFactory vf = conn.getValueFactory(); final URI uri = vf.createURI("os:/elem/example"); // run a query which looks for a statement and then adds it if it is not found. addDuringQueryExec(conn, uri, RDF.TYPE, vf.createURI("os:class/Clazz")); // now try to export the statements. final RepositoryResult<Statement> stats = conn.getStatements(null, null, null, false); try { // materialize the newly added statement. stats.next(); } catch (RuntimeException e) { fail(e.getLocalizedMessage(), e); // With Bigdata this fails } finally { stats.close(); } conn.rollback(); // discard the result (or commit, but do something to avoid a logged warning from Sesame). } finally { conn.close(); } } finally { repo.shutDown(); } }