Java Code Examples for com.hp.hpl.jena.rdf.model.StmtIterator#hasNext()
The following examples show how to use
com.hp.hpl.jena.rdf.model.StmtIterator#hasNext() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GeneralR2RMLCompiler.java From GeoTriples with Apache License 2.0 | 6 votes |
public List<RDFNode> getRDFNodes(Resource r, Property p, R2RMLReader.NodeType acceptableNodes) { List<RDFNode> result = new ArrayList<RDFNode>(); StmtIterator it = r.listProperties(p); while (it.hasNext()) { Statement stmt = it.next(); if (acceptableNodes.isTypeOf(stmt.getObject())) { result.add(stmt.getObject()); } else { if (acceptableNodes.coerce(stmt.getObject()) != null) { result.add(acceptableNodes.coerce(stmt.getObject())); } } } Collections.sort(result, RDFComparator.getRDFNodeComparator()); return result; }
Example 2
Source File: D2RQReader.java From GeoTriples with Apache License 2.0 | 6 votes |
private void parseDownloadMap(DownloadMap dm, Resource r) { StmtIterator stmts; stmts = r.listProperties(D2RQ.dataStorage); while (stmts.hasNext()) { dm.setDatabase(mapping.database( stmts.nextStatement().getResource())); } stmts = r.listProperties(D2RQ.belongsToClassMap); while (stmts.hasNext()) { dm.setBelongsToClassMap(mapping.classMap( stmts.nextStatement().getResource())); } stmts = r.listProperties(D2RQ.contentDownloadColumn); while (stmts.hasNext()) { dm.setContentDownloadColumn(stmts.nextStatement().getString()); } stmts = r.listProperties(D2RQ.mediaType); while (stmts.hasNext()) { dm.setMediaType(stmts.nextStatement().getString()); } }
Example 3
Source File: SparqlExtractor.java From wandora with GNU General Public License v3.0 | 6 votes |
public void RDF2TopicMap(Model model, TopicMap map) { // list the statements in the Model StmtIterator iter = model.listStatements(); Statement stmt = null; int counter = 0; while (iter.hasNext() && !forceStop()) { try { stmt = iter.nextStatement(); // get next statement handleStatement(stmt, map); } catch(Exception e) { log(e); } counter++; setProgress(counter); if(counter % 100 == 0) hlog("RDF statements processed: " + counter); } log("Total RDF statements processed: " + counter); }
Example 4
Source File: PageServlet.java From GeoTriples with Apache License 2.0 | 5 votes |
private Collection<Property> collectProperties(Model m, Resource r) { Collection<Property> result = new TreeSet<Property>(); StmtIterator it = r.listProperties(); while (it.hasNext()) { result.add(new Property(it.nextStatement(), false)); } it = m.listStatements(null, null, r); while (it.hasNext()) { result.add(new Property(it.nextStatement(), true)); } return result; }
Example 5
Source File: Course.java From neo4jena with Apache License 2.0 | 5 votes |
public static void write(GraphDatabaseService njgraph) { InputStream in = FileManager.get().open( inputFileName ); if (in == null) { throw new IllegalArgumentException( "File: " + inputFileName + " not found"); } Model model = ModelFactory.createDefaultModel(); model.read(in,"","TTL"); double triples = model.size(); log.info("Model loaded with " + triples + " triples"); System.out.println("Model loaded with " + triples + " triples"); Map<String, String> prefixMap = model.getNsPrefixMap(); // System.out.println("Prefix Mapping: " + prefixMap); NeoGraph graph = new NeoGraph(njgraph); graph.getPrefixMapping().setNsPrefixes(prefixMap); graph.startBulkLoad(); log.info("Connection created"); Model njmodel = ModelFactory.createModelForGraph(graph); log.info("NeoGraph Model initiated"); System.out.println("NeoGraph Model initiated"); //log.info(njmodel.add(model)); //njmodel.add(model); StmtIterator iterator = model.listStatements(); StopWatch watch = new StopWatch(); int count = 0; while(iterator.hasNext()){ njmodel.add(iterator.next()); count++; } System.out.println("Total triples loaded are:"+ count); graph.stopBulkLoad(); //log.info("Storing completed (ms): " + watch.stop()); System.out.println("Storing completed (ms): " + watch.stop()); }
Example 6
Source File: RdfGeneratorTest.java From xcurator with Apache License 2.0 | 5 votes |
/** * Run the RDF generator pipeline for clinical trial data Before running * this, run the Mapping Discovery Test first to generate the mapping file * for clinical trials. * * @throws SAXException * @throws IOException * @throws ParserConfigurationException */ @Test public void test_generateRdfs_clinical_trials() throws SAXException, IOException, ParserConfigurationException { // Setup deserializer mappingDeserialization = new XmlBasedMappingDeserialization( new FileInputStream("output/clinicaltrials-mapping.xml"), parser); Document dataDocument = parser.parse(RdfGeneratorTest.class.getResourceAsStream( "/clinicaltrials/data/content.xml"), 10); rdfGenerator = new RdfGenerator(new DataDocument(dataDocument), new XmlBasedMapping()); // Add steps rdfGenerator.addStep(mappingDeserialization); rdfGenerator.addStep(rdfGeneration); // Generate rdfGenerator.generateRdfs(); // Verify Model model = TDBFactory.createModel(testTdbDir); Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty()); ResIterator iter = model.listResourcesWithProperty(RDF.type); while (iter.hasNext()) { Resource resource = iter.nextResource(); System.out.println(resource.getLocalName()); StmtIterator iterStm = resource.listProperties(); while (iterStm.hasNext()) { System.out.println(iterStm.nextStatement().toString()); } } }
Example 7
Source File: R2RMLReader.java From GeoTriples with Apache License 2.0 | 5 votes |
private void checkForSpuriousTriples() { StmtIterator it = remainingTriples.listStatements(); while (it.hasNext()) { Statement stmt = it.next(); report.report(Problem.SPURIOUS_TRIPLE, stmt.getSubject(), stmt.getPredicate(), stmt.getObject()); } }
Example 8
Source File: D2RQReader.java From GeoTriples with Apache License 2.0 | 5 votes |
private void parsePropertyBridges() { StmtIterator stmts = this.model.listStatements(null, D2RQ.belongsToClassMap, (RDFNode) null); while (stmts.hasNext()) { Statement stmt = stmts.nextStatement(); ClassMap classMap = this.mapping.classMap(stmt.getResource()); Resource r = stmt.getSubject(); PropertyBridge bridge = new PropertyBridge(r); bridge.setBelongsToClassMap(classMap); parseResourceMap(bridge, r); parsePropertyBridge(bridge, r); } }
Example 9
Source File: D2RQReader.java From GeoTriples with Apache License 2.0 | 5 votes |
private void parseTranslationTables() { Set<Resource> translationTableResources = new HashSet<Resource>(); Iterator<? extends Resource> it = this.model.listIndividuals(D2RQ.TranslationTable); while (it.hasNext()) { translationTableResources.add(it.next()); } StmtIterator stmts; stmts = this.model.listStatements(null, D2RQ.translateWith, (Resource) null); while (stmts.hasNext()) { translationTableResources.add(stmts.nextStatement().getResource()); } stmts = this.model.listStatements(null, D2RQ.translation, (RDFNode) null); while (stmts.hasNext()) { translationTableResources.add(stmts.nextStatement().getSubject()); } stmts = this.model.listStatements(null, D2RQ.javaClass, (RDFNode) null); while (stmts.hasNext()) { translationTableResources.add(stmts.nextStatement().getSubject()); } stmts = this.model.listStatements(null, D2RQ.href, (RDFNode) null); while (stmts.hasNext()) { translationTableResources.add(stmts.nextStatement().getSubject()); } it = translationTableResources.iterator(); while (it.hasNext()) { Resource r = it.next(); TranslationTable table = new TranslationTable(r); parseTranslationTable(table, r); this.mapping.addTranslationTable(table); } }
Example 10
Source File: ScannerFactory.java From DataHubSystem with GNU Affero General Public License v3.0 | 5 votes |
/** * Retrieve the dhus system supported items for file scanning processing. * Is considered supported all classes having * <code>http://www.gael.fr/dhus#metadataExtractor</code> property * connection. * @return the list of supported class names. */ public static synchronized String[] getDefaultCortexSupport () { DrbCortexModel model; try { model = DrbCortexModel.getDefaultModel (); } catch (IOException e) { throw new UnsupportedOperationException ( "Drb cortex not properly initialized."); } ExtendedIterator it=model.getCortexModel ().getOntModel ().listClasses (); List<String>list = new ArrayList<String> (); while (it.hasNext ()) { OntClass cl = (OntClass)it.next (); OntProperty metadata_extractor_p = cl.getOntModel().getOntProperty( "http://www.gael.fr/dhus#support"); StmtIterator properties = cl.listProperties (metadata_extractor_p); while (properties.hasNext ()) { Statement stmt = properties.nextStatement (); LOGGER.debug ("Scanner Support Added for " + stmt.getSubject ().toString ()); list.add (stmt.getSubject ().toString ()); } } return list.toArray (new String[list.size ()]); }
Example 11
Source File: GetEventStats.java From EventCoreference with Apache License 2.0 | 5 votes |
static ArrayList<String> getAllEsoEvents (Dataset dataset, ArrayList<String> esoTypes) { ArrayList<String> events = new ArrayList<String>(); Iterator<String> it = dataset.listNames(); while (it.hasNext()) { String name = it.next(); if (name.equals(instanceGraph)) { Model namedModel = dataset.getNamedModel(name); StmtIterator siter = namedModel.listStatements(); while (siter.hasNext()) { Statement s = siter.nextStatement(); // System.out.println("s.toString() = " + s.toString()); if (s.getPredicate().toString().endsWith("#type")) { for (int i = 0; i < esoTypes.size(); i++) { String esoType = esoTypes.get(i); if (s.getObject().toString().endsWith(esoType)) { String subject = s.getSubject().getURI(); if (!events.contains(subject)) { events.add(subject); } break; } } } } } } return events; }
Example 12
Source File: WriteStatementsKnowledgeStore.java From EventCoreference with Apache License 2.0 | 5 votes |
static public void main (String[] args) { Dataset dataset = null; String address = "http://145.100.57.176:50053/"; ArrayList<org.openrdf.model.Statement> statements = new ArrayList<org.openrdf.model.Statement>(); Iterator<String> it = dataset.listNames(); while (it.hasNext()) { String name = it.next(); Model namedModel = dataset.getNamedModel(name); StmtIterator siter = namedModel.listStatements(); while (siter.hasNext()) { com.hp.hpl.jena.rdf.model.Statement s = siter.nextStatement(); org.openrdf.model.Statement statement =castJenaOpenRdf(s, name); if (statement!=null) { statements.add(statement); } } } if (DEBUG) { try { ByteArrayOutputStream os = new ByteArrayOutputStream(); RDFDataMgr.write(os, dataset, RDFFormat.TRIG_PRETTY); String rdfString = new String(os.toByteArray(), "UTF-8"); System.out.println("rdfString = " + rdfString); os.close(); } catch (Exception e) { e.printStackTrace(); } } // System.out.println("address = " + address); WriteStatementsKnowledgeStore.storeTriples(statements, address); }
Example 13
Source File: VocabularySummarizer.java From GeoTriples with Apache License 2.0 | 5 votes |
public Collection<Resource> getUndefinedClasses(Model model) { Set<Resource> result = new HashSet<Resource>(); StmtIterator it = model.listStatements(null, RDF.type, (RDFNode) null); while (it.hasNext()) { Statement stmt = it.nextStatement(); if (stmt.getObject().isURIResource() && stmt.getResource().getURI().startsWith(namespace) && !classes.contains(stmt.getObject())) { result.add(stmt.getResource()); } } return result; }
Example 14
Source File: OpenCycOntology.java From xcurator with Apache License 2.0 | 5 votes |
private Set<String> getTypesOfSubject(Resource subject) { Set<String> ret = new HashSet<String>(); StmtIterator stiter2 = model.listStatements(new SimpleSelector(subject, RDF.type, (RDFNode) null)); while (stiter2.hasNext()) { String uri = stiter2.next().getObject().asResource().getURI(); if (uri.startsWith("http://sw.opencyc.org")) { ret.add(uri); } } return ret; }
Example 15
Source File: WP2RDFXMLWriter.java From GeoTriples with Apache License 2.0 | 5 votes |
protected void writeRDFStatements(Model model, Resource subject, PrintWriter writer) { StmtIterator sIter = model .listStatements(subject, null, (RDFNode) null); writeDescriptionHeader(subject, writer); while (sIter.hasNext()) { Statement nextSt = sIter.nextStatement(); if (nextSt.getObject().toString().startsWith("null^^")) { sIter.nextStatement(); continue; } writePredicate(nextSt, writer); } writeDescriptionTrailer(subject, writer); }
Example 16
Source File: RdfGeneratorTest2.java From xcurator with Apache License 2.0 | 5 votes |
/** * Run the RDF generator pipeline for clinical trial data Before running * this, run the Mapping Discovery Test first to generate the mapping file * for clinical trials. * * @throws SAXException * @throws IOException * @throws ParserConfigurationException */ @Test public void test_generateRdfs_clinical_trials() throws SAXException, IOException, ParserConfigurationException { // Setup deserializer mappingDeserialization = new XmlBasedMappingDeserialization( new FileInputStream("output/clinicaltrials-mapping.xml"), parser); Document dataDocument = parser.parse(RdfGeneratorTest2.class.getResourceAsStream( "/clinicaltrials/data/content.xml"), 10); rdfGenerator = new RdfGenerator(new DataDocument(dataDocument), new XmlBasedMapping()); // Add steps rdfGenerator.addStep(mappingDeserialization); rdfGenerator.addStep(rdfGeneration); // Generate rdfGenerator.generateRdfs(); // Verify Model model = TDBFactory.createModel(testTdbDir); Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty()); ResIterator iter = model.listResourcesWithProperty(RDF.type); while (iter.hasNext()) { Resource resource = iter.nextResource(); System.out.println(resource.getLocalName()); StmtIterator iterStm = resource.listProperties(); while (iterStm.hasNext()) { System.out.println(iterStm.nextStatement().toString()); } } }
Example 17
Source File: SelectEntityTrig.java From EventCoreference with Apache License 2.0 | 4 votes |
static public void main (String[] args) { String trigfolderPath = ""; trigfolderPath = "/Users/piek/Desktop/tweede-kamer/events"; String entity = ""; String date = ""; entity = "lippens"; for (int i = 0; i < args.length; i++) { String arg = args[i]; if (arg.equals("--trig-folder") && args.length>(i+1)) { trigfolderPath = args[i+1]; } else if (arg.equals("--entity") && args.length>(i+1)) { entity = args[i+1]; } else if (arg.equals("--event-date") && args.length>(i+1)) { date = args[i+1]; } } if (entity.isEmpty()) { System.out.println("Entity is empty"); return; } File trigfolder = new File(trigfolderPath); String trigEntityPath = trigfolder.getParent()+"/"+entity; File entityTrigFolder = new File (trigEntityPath); if (!entityTrigFolder.exists()) { entityTrigFolder.mkdir(); } if (!entityTrigFolder.exists()) { System.out.println("Could not create entity trig folder"); return; } dataset = TDBFactory.createDataset(); ArrayList<File> trigFiles = Util.makeRecursiveFileList(trigfolder, ".trig"); System.out.println(trigfolder.getName() + " trigFiles.size() = " + trigFiles.size()); int cnt = 1; for (int i = 0; i < trigFiles.size(); i++) { File file = trigFiles.get(i); if (!file.getParentFile().getName().startsWith(date)) { continue; } if (i%500==0) { System.out.println("i = " + i); // if (i>1000) break; } ArrayList<String> events = new ArrayList<String>(); dataset = RDFDataMgr.loadDataset(file.getAbsolutePath()); Model namedModel = dataset.getNamedModel(TrigUtil.instanceGraph); StmtIterator siter = namedModel.listStatements(); while (siter.hasNext()) { Statement s = siter.nextStatement(); String subject = s.getSubject().getURI().toLowerCase(); if (subject.indexOf(entity.toLowerCase())>-1) { String trigName = trigEntityPath+"/"+cnt+"_"+file.getName(); File trigCopy = new File(trigName); copyFile(file, trigCopy); cnt++; break; } } dataset = null; } }
Example 18
Source File: RdfGeneratorTest2.java From xcurator with Apache License 2.0 | 4 votes |
@Test // Run test_discoverMapping_multiple_XBRLs to generate the mapping file before running // this test. public void test_generateRdfs_multiple_XBRLs() throws SAXException, IOException, ParserConfigurationException { // Setup deserializer mappingDeserialization = new XmlBasedMappingDeserialization( new FileInputStream("output/xbrl-mapping.xml"), parser); Document fb2013 = parser.parse(BasicEntityDiscoveryTest.class.getResourceAsStream( "/secxbrls/data/fb-20131231.xml"), -1); Document msft2013 = parser.parse(BasicEntityDiscoveryTest.class.getResourceAsStream( "/secxbrls/data/msft-20130630.xml"), -1); Document goog2013 = parser.parse(BasicEntityDiscoveryTest.class.getResourceAsStream( "/secxbrls/data/goog-20131231.xml"), -1); rdfGenerator = new RdfGenerator(new XmlBasedMapping()); // Add document and steps rdfGenerator.addDataDocument(new DataDocument(fb2013, "http://example.org/resource/fb-20131231")) .addDataDocument(new DataDocument(msft2013, "http://example.org/resource/msft-20130630")) .addDataDocument(new DataDocument(goog2013, "http://example.org/resource/goog-20131231")) .addStep(mappingDeserialization) .addStep(rdfGeneration); // Generate rdfGenerator.generateRdfs(); // Verify Model model = TDBFactory.createModel(testTdbDir); Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty()); ResIterator iter = model.listResourcesWithProperty(RDF.type); while (iter.hasNext()) { Resource resource = iter.nextResource(); System.out.println(resource.getLocalName()); StmtIterator iterStm = resource.listProperties(); while (iterStm.hasNext()) { System.out.println(iterStm.nextStatement().toString()); } } }
Example 19
Source File: RdfGeneratorTest.java From xcurator with Apache License 2.0 | 4 votes |
@Test // Run test_discoverMapping_multiple_XBRLs to generate the mapping file before running // this test. public void test_generateRdfs_multiple_XBRLs() throws SAXException, IOException, ParserConfigurationException { // Setup deserializer mappingDeserialization = new XmlBasedMappingDeserialization( new FileInputStream("output/xbrl-mapping.xml"), parser); Document fb2013 = parser.parse(BasicEntityDiscoveryTest.class.getResourceAsStream( "/secxbrls/data/fb-20131231.xml"), -1); Document msft2013 = parser.parse(BasicEntityDiscoveryTest.class.getResourceAsStream( "/secxbrls/data/msft-20130630.xml"), -1); Document goog2013 = parser.parse(BasicEntityDiscoveryTest.class.getResourceAsStream( "/secxbrls/data/goog-20131231.xml"), -1); rdfGenerator = new RdfGenerator(new XmlBasedMapping()); // Add document and steps rdfGenerator.addDataDocument(new DataDocument(fb2013, "http://example.org/resource/fb-20131231")) .addDataDocument(new DataDocument(msft2013, "http://example.org/resource/msft-20130630")) .addDataDocument(new DataDocument(goog2013, "http://example.org/resource/goog-20131231")) .addStep(mappingDeserialization) .addStep(rdfGeneration); // Generate rdfGenerator.generateRdfs(); // Verify Model model = TDBFactory.createModel(testTdbDir); Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty()); ResIterator iter = model.listResourcesWithProperty(RDF.type); while (iter.hasNext()) { Resource resource = iter.nextResource(); System.out.println(resource.getLocalName()); StmtIterator iterStm = resource.listProperties(); while (iterStm.hasNext()) { System.out.println(iterStm.nextStatement().toString()); } } }
Example 20
Source File: D2RQReader.java From GeoTriples with Apache License 2.0 | 4 votes |
private void parsePropertyBridge(PropertyBridge bridge, Resource r) { StmtIterator stmts; stmts = r.listProperties(D2RQ.column); while (stmts.hasNext()) { if (r.getProperty(RDF.type).equals(D2RQ.ObjectPropertyBridge)) { // Legacy bridge.setURIColumn(Microsyntax.parseColumn(stmts.nextStatement().getString())); } else { bridge.setColumn(Microsyntax.parseColumn(stmts.nextStatement().getString())); } } stmts = r.listProperties(D2RQ.pattern); while (stmts.hasNext()) { if (r.getProperty(RDF.type).equals(D2RQ.ObjectPropertyBridge)) { // Legacy bridge.setURIPattern(stmts.nextStatement().getString()); } else { bridge.setPattern(stmts.nextStatement().getString()); } } stmts = r.listProperties(D2RQ.sqlExpression); while (stmts.hasNext()) { bridge.setSQLExpression(stmts.nextStatement().getString()); } stmts = r.listProperties(D2RQ.lang); while (stmts.hasNext()) { bridge.setLang(stmts.nextStatement().getString()); } stmts = r.listProperties(D2RQ.datatype); while (stmts.hasNext()) { bridge.setDatatype(stmts.nextStatement().getResource().getURI()); } stmts = r.listProperties(D2RQ.refersToClassMap); while (stmts.hasNext()) { Resource classMapResource = stmts.nextStatement().getResource(); bridge.setRefersToClassMap(this.mapping.classMap(classMapResource)); } stmts = r.listProperties(D2RQ.dynamicProperty); while (stmts.hasNext()) { bridge.addDynamicProperty(stmts.next().getString()); } stmts = r.listProperties(D2RQ.property); while (stmts.hasNext()) { bridge.addProperty(stmts.nextStatement().getResource()); } stmts = this.model.listStatements(null, D2RQ.propertyBridge, r); while (stmts.hasNext()) { bridge.addProperty(stmts.nextStatement().getSubject()); } stmts = r.listProperties(D2RQ.propertyDefinitionLabel); while (stmts.hasNext()) { bridge.addDefinitionLabel(stmts.nextStatement().getLiteral()); } stmts = r.listProperties(D2RQ.propertyDefinitionComment); while (stmts.hasNext()) { bridge.addDefinitionComment(stmts.nextStatement().getLiteral()); } stmts = r.listProperties(D2RQ.additionalPropertyDefinitionProperty); while (stmts.hasNext()) { Resource additionalProperty = stmts.nextStatement().getResource(); bridge.addAdditionalDefinitionProperty(additionalProperty); } stmts = r.listProperties(D2RQ.limit); while (stmts.hasNext()) { bridge.setLimit(stmts.nextStatement().getInt()); } stmts = r.listProperties(D2RQ.limitInverse); while (stmts.hasNext()) { bridge.setLimitInverse(stmts.nextStatement().getInt()); } stmts = r.listProperties(D2RQ.orderDesc); while (stmts.hasNext()) { bridge.setOrder(stmts.nextStatement().getString(), true); } stmts = r.listProperties(D2RQ.orderAsc); while (stmts.hasNext()) { bridge.setOrder(stmts.nextStatement().getString(), false); } }