com.hp.hpl.jena.tdb.TDBFactory Java Examples
The following examples show how to use
com.hp.hpl.jena.tdb.TDBFactory.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Environment.java From uncc2014watsonsim with GNU General Public License v2.0 | 6 votes |
/** * Create a (possibly) shared NLP environment. The given data directory * must be created (usually from a downloaded zipfile, check the README). * Expect many open files and many reads. Network filesystems are known to * perform poorly as data directories. Strive to use a local directory if * possible, or at least the Lucene indices otherwise. * * config.properties can be either in the data directory or the working * directory. This is to allow sharing (read-only) indices while still * allowing separate development configurations. */ public Environment() { // Now do some per-thread setup db = new Database(this); rdf = TDBFactory.assembleDataset( pathMustExist("rdf/jena-lucene.ttl")); // Lucene indexes have huge overhead so avoid re-instantiating by putting them in the Environment IndexReader reader; try { reader = DirectoryReader.open(new MMapDirectory(Paths.get(getConfOrDie("lucene_index")))); } catch (IOException e) { e.printStackTrace(); throw new RuntimeException("The candidate-answer Lucene index failed to open."); } lucene = new IndexSearcher(reader); //lucene.setSimilarity(new BM25Similarity()); }
Example #2
Source File: GetPerspectiveRelations.java From EventCoreference with Apache License 2.0 | 6 votes |
public static void perspectiveRelationsToTrig (String pathToTrigFile, ArrayList<PerspectiveObject> perspectiveObjects) { try { OutputStream fos = new FileOutputStream(pathToTrigFile); Dataset ds = TDBFactory.createDataset(); Model defaultModel = ds.getDefaultModel(); //ResourcesUri.prefixModel(defaultModel); // Model provenanceModel = ds.getNamedModel("http://www.newsreader-project.eu/perspective"); ResourcesUri.prefixModelGaf(defaultModel); String attrBase = pathToTrigFile+"_"; JenaSerialization.addJenaPerspectiveObjects(attrBase, ResourcesUri.grasp, "wasAttributedTo", perspectiveObjects, 1); RDFDataMgr.write(fos, ds, RDFFormat.TRIG_PRETTY); fos.close(); } catch (IOException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } }
Example #3
Source File: RDFFileManager.java From Benchmark with GNU General Public License v3.0 | 5 votes |
public static void initializeDataset(String serviceDesc) { // if (clean) { deleteDir(new File(databaseDirectory)); if (!(new File(databaseDirectory)).mkdir()) { System.out.println("can not create working directory" + databaseDirectory); } DatasetGraph datasettdb = TDBFactory.createDatasetGraph(databaseDirectory); dataset = DatasetImpl.wrap(datasettdb); loadOntology(dataset); Model serviceBase = FileManager.get().loadModel(datasetDirectory + serviceDesc); dataset.getDefaultModel().add(ModelFactory.createOntologyModel(ontoSpec, serviceBase)); // } else // dataset = TDBFactory.createDataset(databaseDirectory); }
Example #4
Source File: JenaSerialization.java From EventCoreference with Apache License 2.0 | 5 votes |
static public void createSimpleModels () { ds = TDBFactory.createDataset(); graspModel = ds.getNamedModel(ResourcesUri.nwr + "grasp"); provenanceModel = ds.getNamedModel(ResourcesUri.nwr + "provenance"); instanceModel = ds.getNamedModel(ResourcesUri.nwr+"instances"); prefixSimpleModels(); }
Example #5
Source File: JenaSerialization.java From EventCoreference with Apache License 2.0 | 5 votes |
static public void createModels () { ds = TDBFactory.createDataset(); graspModel = ds.getNamedModel(ResourcesUri.nwr + "grasp"); provenanceModel = ds.getNamedModel(ResourcesUri.nwr + "provenance"); instanceModel = ds.getNamedModel(ResourcesUri.nwr+"instances"); prefixModels(); }
Example #6
Source File: GetPerspectiveRelations.java From EventCoreference with Apache License 2.0 | 5 votes |
public static void perspectiveRelationsToTrigStream (OutputStream fos, String uri, ArrayList<PerspectiveObject> perspectiveObjects) { Dataset ds = TDBFactory.createDataset(); Model defaultModel = ds.getDefaultModel(); ResourcesUri.prefixModel(defaultModel); // Model provenanceModel = ds.getNamedModel("http://www.newsreader-project.eu/perspective"); ResourcesUri.prefixModelGaf(defaultModel); String attrBase = uri+"_"; JenaSerialization.addJenaPerspectiveObjects(attrBase, ResourcesUri.grasp,"wasAttributedTo", perspectiveObjects, 1); RDFDataMgr.write(fos, ds, RDFFormat.TRIG_PRETTY); }
Example #7
Source File: GetPerspectiveRelations.java From EventCoreference with Apache License 2.0 | 5 votes |
public static void perspectiveRelationsToTrig (String pathToTrigFile, KafSaxParser kafSaxParser, String project, ArrayList<PerspectiveObject> sourcePerspectiveObjects, ArrayList<PerspectiveObject> authorPerspectiveObjects) { try { OutputStream fos = new FileOutputStream(pathToTrigFile); Dataset ds = TDBFactory.createDataset(); Model defaultModel = ds.getDefaultModel(); ResourcesUri.prefixModelGaf(defaultModel); ResourcesUri.prefixModelNwr(defaultModel); defaultModel.setNsPrefix("rdf", ResourcesUri.rdf); defaultModel.setNsPrefix("rdfs", ResourcesUri.rdfs); String docId = kafSaxParser.getKafMetaData().getUrl().replaceAll("#", "HASH"); if (!docId.toLowerCase().startsWith("http")) { docId = ResourcesUri.nwrdata + project + "/" + docId; } JenaSerialization.addDocMetaData(docId, kafSaxParser, project); String attrBase = kafSaxParser.getKafMetaData().getUrl()+"_"+"s"; JenaSerialization.addJenaPerspectiveObjects(attrBase, ResourcesUri.grasp, "wasAttributedTo", sourcePerspectiveObjects, 1); attrBase = kafSaxParser.getKafMetaData().getUrl()+"_"+"d"; JenaSerialization.addJenaPerspectiveObjects(attrBase, ResourcesUri.prov, "wasDerivedFrom", authorPerspectiveObjects, sourcePerspectiveObjects.size()+1); RDFDataMgr.write(fos, ds, RDFFormat.TRIG_PRETTY); fos.close(); } catch (IOException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } }
Example #8
Source File: RdfGeneratorTest.java From xcurator with Apache License 2.0 | 5 votes |
@Test // Run test_discoverMapping_XBRL_msft to generate the mapping file before running // this test. public void test_generateRdfs_msft_XBRL() throws SAXException, IOException, ParserConfigurationException { // Setup deserializer mappingDeserialization = new XmlBasedMappingDeserialization( new FileInputStream("output/msft-20130630-mapping.xml"), parser); Document dataDocument = parser.parse(RdfGeneratorTest.class.getResourceAsStream( "/secxbrls/data/msft-20130630.xml"), -1); rdfGenerator = new RdfGenerator(new DataDocument(dataDocument), new XmlBasedMapping()); // Add steps rdfGenerator.addStep(mappingDeserialization); rdfGenerator.addStep(rdfGeneration); // Generate rdfGenerator.generateRdfs(); // Verify Model model = TDBFactory.createModel(testTdbDir); Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty()); Resource r = model.getResource("http://example.org/resource/class/unitNumerator"); // Failing, investigate Assert.assertTrue(r.hasProperty(model.getProperty("http://example.org/resource/property/measure"))); // ResIterator iter = model.listResourcesWithProperty(RDF.type); // while (iter.hasNext()) { // Resource resource = iter.nextResource(); // System.out.println(resource.getLocalName()); // StmtIterator iterStm = resource.listProperties(); // while (iterStm.hasNext()) { // System.out.println(iterStm.nextStatement().toString()); // } // } }
Example #9
Source File: RdfGeneratorTest.java From xcurator with Apache License 2.0 | 5 votes |
@Test // Run test_discoverMapping_fb_XBRL to generate the mapping file before running // this test. public void test_generateRdfs_fb_XBRL() throws SAXException, IOException, ParserConfigurationException { // Setup deserializer mappingDeserialization = new XmlBasedMappingDeserialization( new FileInputStream("output/fb-20121231-mapping.xml"), parser); Document dataDocument = parser.parse(RdfGeneratorTest.class.getResourceAsStream( "/secxbrls/data/fb-20121231.xml"), -1); rdfGenerator = new RdfGenerator(new DataDocument(dataDocument), new XmlBasedMapping()); // Add steps rdfGenerator.addStep(mappingDeserialization); rdfGenerator.addStep(rdfGeneration); // Generate rdfGenerator.generateRdfs(); // Verify Model model = TDBFactory.createModel(testTdbDir); Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty()); Resource r = model.getResource("http://example.org/resource/class/unitNumerator"); // Failing, investigate Assert.assertTrue(r.hasProperty(model.getProperty("http://example.org/resource/property/measure"))); // ResIterator iter = model.listResourcesWithProperty(RDF.type); // while (iter.hasNext()) { // Resource resource = iter.nextResource(); // System.out.println(resource.getLocalName()); // StmtIterator iterStm = resource.listProperties(); // while (iterStm.hasNext()) { // System.out.println(iterStm.nextStatement().toString()); // } // } }
Example #10
Source File: RdfGeneratorTest.java From xcurator with Apache License 2.0 | 5 votes |
/** * Run the RDF generator pipeline for clinical trial data Before running * this, run the Mapping Discovery Test first to generate the mapping file * for clinical trials. * * @throws SAXException * @throws IOException * @throws ParserConfigurationException */ @Test public void test_generateRdfs_clinical_trials() throws SAXException, IOException, ParserConfigurationException { // Setup deserializer mappingDeserialization = new XmlBasedMappingDeserialization( new FileInputStream("output/clinicaltrials-mapping.xml"), parser); Document dataDocument = parser.parse(RdfGeneratorTest.class.getResourceAsStream( "/clinicaltrials/data/content.xml"), 10); rdfGenerator = new RdfGenerator(new DataDocument(dataDocument), new XmlBasedMapping()); // Add steps rdfGenerator.addStep(mappingDeserialization); rdfGenerator.addStep(rdfGeneration); // Generate rdfGenerator.generateRdfs(); // Verify Model model = TDBFactory.createModel(testTdbDir); Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty()); ResIterator iter = model.listResourcesWithProperty(RDF.type); while (iter.hasNext()) { Resource resource = iter.nextResource(); System.out.println(resource.getLocalName()); StmtIterator iterStm = resource.listProperties(); while (iterStm.hasNext()) { System.out.println(iterStm.nextStatement().toString()); } } }
Example #11
Source File: RdfGeneratorTest2.java From xcurator with Apache License 2.0 | 5 votes |
@Test // Run test_discoverMapping_XBRL_msft to generate the mapping file before running // this test. public void test_generateRdfs_msft_XBRL() throws SAXException, IOException, ParserConfigurationException { // Setup deserializer mappingDeserialization = new XmlBasedMappingDeserialization( new FileInputStream("output/msft-20130630-mapping.xml"), parser); Document dataDocument = parser.parse(RdfGeneratorTest2.class.getResourceAsStream( "/secxbrls/data/msft-20130630.xml"), -1); rdfGenerator = new RdfGenerator(new DataDocument(dataDocument), new XmlBasedMapping()); // Add steps rdfGenerator.addStep(mappingDeserialization); rdfGenerator.addStep(rdfGeneration); // Generate rdfGenerator.generateRdfs(); // Verify Model model = TDBFactory.createModel(testTdbDir); Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty()); Resource r = model.getResource("http://example.org/resource/class/unitNumerator"); // Failing, investigate Assert.assertTrue(r.hasProperty(model.getProperty("http://example.org/resource/property/measure"))); // ResIterator iter = model.listResourcesWithProperty(RDF.type); // while (iter.hasNext()) { // Resource resource = iter.nextResource(); // System.out.println(resource.getLocalName()); // StmtIterator iterStm = resource.listProperties(); // while (iterStm.hasNext()) { // System.out.println(iterStm.nextStatement().toString()); // } // } }
Example #12
Source File: RdfGeneratorTest2.java From xcurator with Apache License 2.0 | 5 votes |
@Test // Run test_discoverMapping_fb_XBRL to generate the mapping file before running // this test. public void test_generateRdfs_fb_XBRL() throws SAXException, IOException, ParserConfigurationException { // Setup deserializer mappingDeserialization = new XmlBasedMappingDeserialization( new FileInputStream("output/fb-20121231-mapping.xml"), parser); Document dataDocument = parser.parse(RdfGeneratorTest2.class.getResourceAsStream( "/secxbrls/data/fb-20121231.xml"), -1); rdfGenerator = new RdfGenerator(new DataDocument(dataDocument), new XmlBasedMapping()); // Add steps rdfGenerator.addStep(mappingDeserialization); rdfGenerator.addStep(rdfGeneration); // Generate rdfGenerator.generateRdfs(); // Verify Model model = TDBFactory.createModel(testTdbDir); Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty()); Resource r = model.getResource("http://example.org/resource/class/unitNumerator"); // Failing, investigate Assert.assertTrue(r.hasProperty(model.getProperty("http://example.org/resource/property/measure"))); // ResIterator iter = model.listResourcesWithProperty(RDF.type); // while (iter.hasNext()) { // Resource resource = iter.nextResource(); // System.out.println(resource.getLocalName()); // StmtIterator iterStm = resource.listProperties(); // while (iterStm.hasNext()) { // System.out.println(iterStm.nextStatement().toString()); // } // } }
Example #13
Source File: RdfGeneratorTest2.java From xcurator with Apache License 2.0 | 5 votes |
/** * Run the RDF generator pipeline for clinical trial data Before running * this, run the Mapping Discovery Test first to generate the mapping file * for clinical trials. * * @throws SAXException * @throws IOException * @throws ParserConfigurationException */ @Test public void test_generateRdfs_clinical_trials() throws SAXException, IOException, ParserConfigurationException { // Setup deserializer mappingDeserialization = new XmlBasedMappingDeserialization( new FileInputStream("output/clinicaltrials-mapping.xml"), parser); Document dataDocument = parser.parse(RdfGeneratorTest2.class.getResourceAsStream( "/clinicaltrials/data/content.xml"), 10); rdfGenerator = new RdfGenerator(new DataDocument(dataDocument), new XmlBasedMapping()); // Add steps rdfGenerator.addStep(mappingDeserialization); rdfGenerator.addStep(rdfGeneration); // Generate rdfGenerator.generateRdfs(); // Verify Model model = TDBFactory.createModel(testTdbDir); Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty()); ResIterator iter = model.listResourcesWithProperty(RDF.type); while (iter.hasNext()) { Resource resource = iter.nextResource(); System.out.println(resource.getLocalName()); StmtIterator iterStm = resource.listProperties(); while (iterStm.hasNext()) { System.out.println(iterStm.nextStatement().toString()); } } }
Example #14
Source File: RdfGeneratorTest2.java From xcurator with Apache License 2.0 | 4 votes |
@Test // Run test_discoverMapping_multiple_XBRLs to generate the mapping file before running // this test. public void test_generateRdfs_multiple_XBRLs() throws SAXException, IOException, ParserConfigurationException { // Setup deserializer mappingDeserialization = new XmlBasedMappingDeserialization( new FileInputStream("output/xbrl-mapping.xml"), parser); Document fb2013 = parser.parse(BasicEntityDiscoveryTest.class.getResourceAsStream( "/secxbrls/data/fb-20131231.xml"), -1); Document msft2013 = parser.parse(BasicEntityDiscoveryTest.class.getResourceAsStream( "/secxbrls/data/msft-20130630.xml"), -1); Document goog2013 = parser.parse(BasicEntityDiscoveryTest.class.getResourceAsStream( "/secxbrls/data/goog-20131231.xml"), -1); rdfGenerator = new RdfGenerator(new XmlBasedMapping()); // Add document and steps rdfGenerator.addDataDocument(new DataDocument(fb2013, "http://example.org/resource/fb-20131231")) .addDataDocument(new DataDocument(msft2013, "http://example.org/resource/msft-20130630")) .addDataDocument(new DataDocument(goog2013, "http://example.org/resource/goog-20131231")) .addStep(mappingDeserialization) .addStep(rdfGeneration); // Generate rdfGenerator.generateRdfs(); // Verify Model model = TDBFactory.createModel(testTdbDir); Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty()); ResIterator iter = model.listResourcesWithProperty(RDF.type); while (iter.hasNext()) { Resource resource = iter.nextResource(); System.out.println(resource.getLocalName()); StmtIterator iterStm = resource.listProperties(); while (iterStm.hasNext()) { System.out.println(iterStm.nextStatement().toString()); } } }
Example #15
Source File: RdfGeneratorTest.java From xcurator with Apache License 2.0 | 4 votes |
@Test // Run test_discoverMapping_multiple_XBRLs to generate the mapping file before running // this test. public void test_generateRdfs_multiple_XBRLs() throws SAXException, IOException, ParserConfigurationException { // Setup deserializer mappingDeserialization = new XmlBasedMappingDeserialization( new FileInputStream("output/xbrl-mapping.xml"), parser); Document fb2013 = parser.parse(BasicEntityDiscoveryTest.class.getResourceAsStream( "/secxbrls/data/fb-20131231.xml"), -1); Document msft2013 = parser.parse(BasicEntityDiscoveryTest.class.getResourceAsStream( "/secxbrls/data/msft-20130630.xml"), -1); Document goog2013 = parser.parse(BasicEntityDiscoveryTest.class.getResourceAsStream( "/secxbrls/data/goog-20131231.xml"), -1); rdfGenerator = new RdfGenerator(new XmlBasedMapping()); // Add document and steps rdfGenerator.addDataDocument(new DataDocument(fb2013, "http://example.org/resource/fb-20131231")) .addDataDocument(new DataDocument(msft2013, "http://example.org/resource/msft-20130630")) .addDataDocument(new DataDocument(goog2013, "http://example.org/resource/goog-20131231")) .addStep(mappingDeserialization) .addStep(rdfGeneration); // Generate rdfGenerator.generateRdfs(); // Verify Model model = TDBFactory.createModel(testTdbDir); Assert.assertFalse("No RDF was generated. TDB directory: " + testTdbDir, model.isEmpty()); ResIterator iter = model.listResourcesWithProperty(RDF.type); while (iter.hasNext()) { Resource resource = iter.nextResource(); System.out.println(resource.getLocalName()); StmtIterator iterStm = resource.listProperties(); while (iterStm.hasNext()) { System.out.println(iterStm.nextStatement().toString()); } } }
Example #16
Source File: JenaUtils.java From xcurator with Apache License 2.0 | 4 votes |
public static Model getTDBModel(String path) { Model m = TDBFactory.createModel(path); return m; }
Example #17
Source File: RDFFileManager.java From Benchmark with GNU General Public License v3.0 | 4 votes |
public static void initializeDataset() { dataset = TDBFactory.createDataset(databaseDirectory); // } else // dataset = TDBFactory.createDataset(databaseDirectory); }
Example #18
Source File: SelectEntityTrig.java From EventCoreference with Apache License 2.0 | 4 votes |
static public void main (String[] args) { String trigfolderPath = ""; trigfolderPath = "/Users/piek/Desktop/tweede-kamer/events"; String entity = ""; String date = ""; entity = "lippens"; for (int i = 0; i < args.length; i++) { String arg = args[i]; if (arg.equals("--trig-folder") && args.length>(i+1)) { trigfolderPath = args[i+1]; } else if (arg.equals("--entity") && args.length>(i+1)) { entity = args[i+1]; } else if (arg.equals("--event-date") && args.length>(i+1)) { date = args[i+1]; } } if (entity.isEmpty()) { System.out.println("Entity is empty"); return; } File trigfolder = new File(trigfolderPath); String trigEntityPath = trigfolder.getParent()+"/"+entity; File entityTrigFolder = new File (trigEntityPath); if (!entityTrigFolder.exists()) { entityTrigFolder.mkdir(); } if (!entityTrigFolder.exists()) { System.out.println("Could not create entity trig folder"); return; } dataset = TDBFactory.createDataset(); ArrayList<File> trigFiles = Util.makeRecursiveFileList(trigfolder, ".trig"); System.out.println(trigfolder.getName() + " trigFiles.size() = " + trigFiles.size()); int cnt = 1; for (int i = 0; i < trigFiles.size(); i++) { File file = trigFiles.get(i); if (!file.getParentFile().getName().startsWith(date)) { continue; } if (i%500==0) { System.out.println("i = " + i); // if (i>1000) break; } ArrayList<String> events = new ArrayList<String>(); dataset = RDFDataMgr.loadDataset(file.getAbsolutePath()); Model namedModel = dataset.getNamedModel(TrigUtil.instanceGraph); StmtIterator siter = namedModel.listStatements(); while (siter.hasNext()) { Statement s = siter.nextStatement(); String subject = s.getSubject().getURI().toLowerCase(); if (subject.indexOf(entity.toLowerCase())>-1) { String trigName = trigEntityPath+"/"+cnt+"_"+file.getName(); File trigCopy = new File(trigName); copyFile(file, trigCopy); cnt++; break; } } dataset = null; } }