org.kitesdk.data.spi.DefaultConfiguration Java Examples
The following examples show how to use
org.kitesdk.data.spi.DefaultConfiguration.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AbstractKiteProcessor.java From localization_nifi with Apache License 2.0 | 6 votes |
protected static Configuration getConfiguration(String configFiles) { Configuration conf = DefaultConfiguration.get(); if (configFiles == null || configFiles.isEmpty()) { return conf; } for (String file : COMMA.split(configFiles)) { // process each resource only once if (conf.getResource(file) == null) { // use Path instead of String to get the file from the FS conf.addResource(new Path(file)); } } return conf; }
Example #2
Source File: Loader.java From kite with Apache License 2.0 | 6 votes |
@Override public void load() { Registration.register( new URIPattern("hbase::zk"), new URIPattern("hbase::zk/:dataset?namespace=default"), new OptionBuilder<DatasetRepository>() { @Override public DatasetRepository getFromOptions(Map<String, String> options) { Configuration conf = HBaseConfiguration.create(DefaultConfiguration.get()); String[] hostsAndPort = parseHostsAndPort(options.get("zk")); conf.set(HConstants.ZOOKEEPER_QUORUM, hostsAndPort[0]); String port = hostsAndPort[1]; if (port != null) { conf.set(HConstants.ZOOKEEPER_CLIENT_PORT, port); } return new HBaseDatasetRepository.Builder().configuration(conf).build(); } }); }
Example #3
Source File: Loader.java From kite with Apache License 2.0 | 6 votes |
@Override public void load() { try { // load hdfs-site.xml by loading HdfsConfiguration FileSystem.getLocal(DefaultConfiguration.get()); } catch (IOException e) { throw new DatasetIOException("Cannot load default config", e); } OptionBuilder<DatasetRepository> builder = new URIBuilder(); // username and secret are the same; host is the bucket Registration.register( new URIPattern("s3n:/*path"), new URIPattern("s3n:/*path/:namespace/:dataset"), builder); Registration.register( new URIPattern("s3a:/*path"), new URIPattern("s3a:/*path/:namespace/:dataset"), builder); }
Example #4
Source File: Loader.java From kite with Apache License 2.0 | 6 votes |
@Override public DatasetRepository getFromOptions(Map<String, String> match) { String path = match.get("path"); final Path root = (path == null || path.isEmpty()) ? new Path("/") : new Path("/", path); Configuration conf = DefaultConfiguration.get(); FileSystem fs; try { fs = FileSystem.get(fileSystemURI(match), conf); } catch (IOException e) { // "Incomplete HDFS URI, no host" => add a helpful suggestion if (e.getMessage().startsWith("Incomplete")) { throw new DatasetIOException("Could not get a FileSystem: " + "make sure the credentials for " + match.get(URIPattern.SCHEME) + " URIs are configured.", e); } throw new DatasetIOException("Could not get a FileSystem", e); } return new FileSystemDatasetRepository.Builder() .configuration(new Configuration(conf)) // make a modifiable copy .rootDirectory(fs.makeQualified(root)) .build(); }
Example #5
Source File: TestMergeOutputCommitter.java From kite with Apache License 2.0 | 6 votes |
@Test public void testSetupJobIsIdempotent() { DatasetKeyOutputFormat.MergeOutputCommitter<Object> outputCommitter = new DatasetKeyOutputFormat.MergeOutputCommitter<Object>(); Configuration conf = DefaultConfiguration.get(); DatasetKeyOutputFormat.configure(conf).appendTo(outputDataset); JobID jobId = new JobID("jt", 42); JobContext context = Hadoop.JobContext.ctor.newInstance(conf, jobId); // setup the job outputCommitter.setupJob(context); // call setup again to simulate an ApplicationMaster restart outputCommitter.setupJob(context); }
Example #6
Source File: TestKiteConfigurationService.java From kite with Apache License 2.0 | 6 votes |
@After public void removeDataPath() throws IOException { // restore configuration DefaultConfiguration.set(startingConf); if(serviceTempDir != null) { FileUtils.deleteDirectory(serviceTempDir); serviceTempDir = null; } if(kiteConfigPath != null) { fs.delete(kiteConfigPath, true); kiteConfigPath = null; } if(Services.get() != null) { Services.get().destroy(); } if(startingOozieHome == null) { System.clearProperty("oozie.home.dir"); } else { System.setProperty("oozie.home.dir", startingOozieHome); startingOozieHome = null; } }
Example #7
Source File: TestKiteURIHandler.java From kite with Apache License 2.0 | 6 votes |
@Test public void loadConfigFromHCatAccessor() throws URIHandlerException, URISyntaxException, ServiceException, IOException { setupKiteConfigurationService(true, true); URI uri = new URI("view:file:target/data/data/nomailbox?message=hello"); uriHandler.exists(uri, null); Configuration defaultConf = DefaultConfiguration.get(); Assert.assertEquals("test.value", defaultConf.get("test.property")); Services.get().get(KiteConfigurationService.class).getKiteConf().set("test.value", "something.else"); // doesn't modify default config on further exist calls uriHandler.exists(uri, null); defaultConf = DefaultConfiguration.get(); Assert.assertEquals("test.value", defaultConf.get("test.property")); Assert.assertEquals("something.else", Services.get().get(KiteConfigurationService.class).getKiteConf().get("test.value")); }
Example #8
Source File: TestKiteURIHandler.java From kite with Apache License 2.0 | 6 votes |
@After public void removeDataPath() throws IOException { fs.delete(new Path("target/data"), true); // restore configuration DefaultConfiguration.set(startingConf); if(serviceTempDir != null) { FileUtils.deleteDirectory(serviceTempDir); serviceTempDir = null; } if(Services.get() != null) { Services.get().destroy(); } if(startingOozieHome == null) { System.clearProperty("oozie.home.dir"); } else { System.setProperty("oozie.home.dir", startingOozieHome); startingOozieHome = null; } }
Example #9
Source File: KiteURIHandler.java From kite with Apache License 2.0 | 6 votes |
private synchronized void loadConfiguration() { if(Services.get() != null) { KiteConfigurationService kiteService = Services.get().get(KiteConfigurationService.class); if(kiteService != null) { Configuration kiteConf = kiteService.getKiteConf(); if(kiteConf != null) { DefaultConfiguration.set(kiteConf); } else { // kite conf was null LOG.warn("Configuration for Kite not loaded, Kite configuration service config was null."); } } else { // service was null LOG.warn("Configuration for Kite not loaded, Kite configuration service was not available."); } } else { // services were null LOG.warn("Configuration for Kite not loaded, oozie services were not available."); } }
Example #10
Source File: TestFlumeConfigurationCommand.java From kite with Apache License 2.0 | 6 votes |
@BeforeClass public static void setConfiguration() throws Exception { HBaseTestUtils.getMiniCluster(); original = DefaultConfiguration.get(); Configuration conf = HBaseTestUtils.getConf(); DefaultConfiguration.set(conf); zkQuorum = conf.get(HConstants.ZOOKEEPER_QUORUM); zkPort = conf.get(HConstants.ZOOKEEPER_CLIENT_PORT); URI defaultFs = URI.create(conf.get("fs.default.name")); hdfsIsDefault = "hdfs".equals(defaultFs.getScheme()); hdfsHost = defaultFs.getHost(); hdfsPort = Integer.toString(defaultFs.getPort()); }
Example #11
Source File: TestKiteProcessorsCluster.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testSchemaFromDistributedFileSystem() throws IOException { Schema expected = SchemaBuilder.record("Test").fields() .requiredLong("id") .requiredString("color") .optionalDouble("price") .endRecord(); Path schemaPath = new Path("hdfs:/tmp/schema.avsc"); FileSystem fs = schemaPath.getFileSystem(DefaultConfiguration.get()); OutputStream out = fs.create(schemaPath); out.write(bytesFor(expected.toString(), Charset.forName("utf8"))); out.close(); Schema schema = AbstractKiteProcessor.getSchema( schemaPath.toString(), DefaultConfiguration.get()); Assert.assertEquals("Schema from file should match", expected, schema); }
Example #12
Source File: TestKiteProcessorsCluster.java From localization_nifi with Apache License 2.0 | 6 votes |
@Test public void testSchemaFromDistributedFileSystem() throws IOException { Schema expected = SchemaBuilder.record("Test").fields() .requiredLong("id") .requiredString("color") .optionalDouble("price") .endRecord(); Path schemaPath = new Path("hdfs:/tmp/schema.avsc"); FileSystem fs = schemaPath.getFileSystem(DefaultConfiguration.get()); OutputStream out = fs.create(schemaPath); out.write(bytesFor(expected.toString(), Charset.forName("utf8"))); out.close(); Schema schema = AbstractKiteProcessor.getSchema( schemaPath.toString(), DefaultConfiguration.get()); Assert.assertEquals("Schema from file should match", expected, schema); }
Example #13
Source File: TestConfigurationProperty.java From localization_nifi with Apache License 2.0 | 6 votes |
@Test public void testConfigurationCanary() throws IOException { TestRunner runner = TestRunners.newTestRunner(StoreInKiteDataset.class); runner.setProperty( AbstractKiteProcessor.CONF_XML_FILES, confLocation.toString()); Assert.assertFalse("Should not contain canary value", DefaultConfiguration.get().getBoolean("nifi.config.canary", false)); AbstractKiteProcessor processor = new StoreInKiteDataset(); ProcessContext context = runner.getProcessContext(); processor.setDefaultConfiguration(context); Assert.assertTrue("Should contain canary value", DefaultConfiguration.get().getBoolean("nifi.config.canary", false)); }
Example #14
Source File: AbstractKiteProcessor.java From nifi with Apache License 2.0 | 6 votes |
protected static Configuration getConfiguration(String configFiles) { Configuration conf = DefaultConfiguration.get(); if (configFiles == null || configFiles.isEmpty()) { return conf; } for (String file : COMMA.split(configFiles)) { // process each resource only once if (conf.getResource(file) == null) { // use Path instead of String to get the file from the FS conf.addResource(new Path(file)); } } return conf; }
Example #15
Source File: TestConfigurationProperty.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testConfigurationCanary() throws IOException { TestRunner runner = TestRunners.newTestRunner(StoreInKiteDataset.class); runner.setProperty( AbstractKiteProcessor.CONF_XML_FILES, confLocation.toString()); Assert.assertFalse("Should not contain canary value", DefaultConfiguration.get().getBoolean("nifi.config.canary", false)); AbstractKiteProcessor processor = new StoreInKiteDataset(); ProcessContext context = runner.getProcessContext(); processor.setDefaultConfiguration(context); Assert.assertTrue("Should contain canary value", DefaultConfiguration.get().getBoolean("nifi.config.canary", false)); }
Example #16
Source File: TestS3Dataset.java From kite with Apache License 2.0 | 5 votes |
@BeforeClass public static void addCredentials() { original = DefaultConfiguration.get(); Configuration conf = DefaultConfiguration.get(); if (ID != null) { conf.set("fs.s3n.awsAccessKeyId", ID); conf.set("fs.s3n.awsSecretAccessKey", KEY); conf.set("fs.s3a.access.key", ID); conf.set("fs.s3a.secret.key", KEY); } DefaultConfiguration.set(conf); }
Example #17
Source File: DatasetDescriptor.java From kite with Apache License 2.0 | 5 votes |
public Builder() { this.properties = Maps.newHashMap(); this.conf = DefaultConfiguration.get(); try { this.defaultFS = FileSystem.get(conf).getUri(); } catch (IOException e) { throw new DatasetIOException("Cannot get the default FS", e); } }
Example #18
Source File: Loader.java From kite with Apache License 2.0 | 5 votes |
@Override public DatasetRepository getFromOptions(Map<String, String> match) { final Path root; String path = match.get("path"); if (match.containsKey("absolute") && Boolean.valueOf(match.get("absolute"))) { root = (path == null || path.isEmpty()) ? new Path("/") : new Path("/", path); } else { root = (path == null || path.isEmpty()) ? new Path(".") : new Path(path); } Configuration conf = DefaultConfiguration.get(); FileSystem fs; try { fs = FileSystem.get(fileSystemURI(match), conf); } catch (IOException e) { // "Incomplete HDFS URI, no host" => add a helpful suggestion if (e.getMessage().startsWith("Incomplete")) { throw new DatasetIOException("Could not get a FileSystem: " + "make sure the default " + match.get(URIPattern.SCHEME) + " URI is configured.", e); } throw new DatasetIOException("Could not get a FileSystem", e); } return new FileSystemDatasetRepository.Builder() .configuration(new Configuration(conf)) // make a modifiable copy .rootDirectory(fs.makeQualified(root)) .build(); }
Example #19
Source File: Loader.java From kite with Apache License 2.0 | 5 votes |
@Override public void load() { try { // load hdfs-site.xml by loading HdfsConfiguration FileSystem.getLocal(DefaultConfiguration.get()); } catch (IOException e) { throw new DatasetIOException("Cannot load default config", e); } OptionBuilder<DatasetRepository> builder = new URIBuilder(); Registration.register( new URIPattern("file:/*path?absolute=true"), new URIPattern("file:/*path/:namespace/:dataset?absolute=true"), builder); Registration.register( new URIPattern("file:*path"), new URIPattern("file:*path/:namespace/:dataset"), builder); Registration.register( new URIPattern("hdfs:/*path?absolute=true"), new URIPattern("hdfs:/*path/:namespace/:dataset?absolute=true"), builder); Registration.register( new URIPattern("hdfs:*path"), new URIPattern("hdfs:*path/:namespace/:dataset"), builder); Registration.register( new URIPattern("webhdfs:/*path?absolute=true"), new URIPattern("webhdfs:/*path/:namespace/:dataset?absolute=true"), builder); }
Example #20
Source File: TestGetSchema.java From localization_nifi with Apache License 2.0 | 5 votes |
@Test @Ignore("Does not work on windows") public void testSchemaFromFileSystem() throws IOException { File schemaFile = temp.newFile("schema.avsc"); FileOutputStream out = new FileOutputStream(schemaFile); out.write(bytesFor(SCHEMA.toString(), Charset.forName("utf8"))); out.close(); Schema schema = AbstractKiteProcessor.getSchema( schemaFile.toString(), DefaultConfiguration.get()); Assert.assertEquals("Schema from file should match", SCHEMA, schema); }
Example #21
Source File: DatasetKeyInputFormat.java From kite with Apache License 2.0 | 5 votes |
@Override @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="UWF_FIELD_NOT_INITIALIZED_IN_CONSTRUCTOR", justification="Delegate set by setConf") public RecordReader<E, Void> createRecordReader(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { Configuration conf = Hadoop.TaskAttemptContext.getConfiguration.invoke(taskAttemptContext); DefaultConfiguration.init(conf); return delegate.createRecordReader(inputSplit, taskAttemptContext); }
Example #22
Source File: DatasetKeyInputFormat.java From kite with Apache License 2.0 | 5 votes |
@SuppressWarnings({"deprecation", "unchecked"}) private static <E> View<E> load(Configuration conf) { Class<E> type; try { type = (Class<E>)conf.getClass(KITE_TYPE, GenericData.Record.class); } catch (RuntimeException e) { if (e.getCause() instanceof ClassNotFoundException) { throw new TypeNotFoundException(String.format( "The Java class %s for the entity type could not be found", conf.get(KITE_TYPE)), e.getCause()); } else { throw e; } } DefaultConfiguration.set(conf); String schemaStr = conf.get(KITE_READER_SCHEMA); Schema projection = null; if (schemaStr != null) { projection = new Schema.Parser().parse(schemaStr); } String inputUri = conf.get(KITE_INPUT_URI); if (projection != null) { return Datasets.load(inputUri).asSchema(projection).asType(type); } else { return Datasets.load(inputUri, type); } }
Example #23
Source File: DatasetKeyOutputFormat.java From kite with Apache License 2.0 | 5 votes |
@Override public OutputCommitter getOutputCommitter(TaskAttemptContext taskAttemptContext) { Configuration conf = Hadoop.TaskAttemptContext.getConfiguration.invoke(taskAttemptContext); DefaultConfiguration.init(conf); View<E> view = load(taskAttemptContext); return usePerTaskAttemptDatasets(view, conf) ? new MergeOutputCommitter<E>() : new NullOutputCommitter(); }
Example #24
Source File: TestGetSchema.java From localization_nifi with Apache License 2.0 | 5 votes |
@Test public void testSchemaFromResourceURI() throws IOException { DatasetDescriptor descriptor = new DatasetDescriptor.Builder() .schemaUri("resource:schema/user.avsc") // in kite-data-core test-jar .build(); Schema expected = descriptor.getSchema(); Schema schema = AbstractKiteProcessor.getSchema( "resource:schema/user.avsc", DefaultConfiguration.get()); Assert.assertEquals("Schema from resource URI should match", expected, schema); }
Example #25
Source File: TestDefaultConfigurationFileSystem.java From kite with Apache License 2.0 | 5 votes |
@Test public void testFindsHDFS() throws Exception { // set the default configuration that the loader will use Configuration existing = DefaultConfiguration.get(); DefaultConfiguration.set(getConfiguration()); FileSystemDataset<GenericRecord> dataset = Datasets.load("dataset:hdfs:/tmp/datasets/ns/strings"); Assert.assertNotNull("Dataset should be found", dataset); Assert.assertEquals("Dataset should be located in HDFS", "hdfs", dataset.getFileSystem().getUri().getScheme()); // replace the original config so the other tests are not affected DefaultConfiguration.set(existing); }
Example #26
Source File: TestKiteURIHandler.java From kite with Apache License 2.0 | 5 votes |
@Before public void setUp() throws IOException, URISyntaxException { this.conf = (distributed ? MiniDFSTest.getConfiguration() : new Configuration()); this.fs = FileSystem.get(conf); this.testDescriptor = new DatasetDescriptor.Builder() .format(Formats.AVRO) .schema(SchemaBuilder.record("Event").fields() .requiredLong("timestamp") .requiredString("message") .endRecord()) .partitionStrategy(new PartitionStrategy.Builder() .year("timestamp") .month("timestamp") .day("timestamp") .build()) .build(); uriHandler = new KiteURIHandler(); startingConf = DefaultConfiguration.get(); startingOozieHome = System.getProperty("oozie.home.dir"); }
Example #27
Source File: Loader.java From kite with Apache License 2.0 | 5 votes |
@Override public DatasetRepository getFromOptions(Map<String, String> match) { LOG.debug("External URI options: {}", match); final Path root; String path = match.get("path"); if (match.containsKey("absolute") && Boolean.valueOf(match.get("absolute"))) { root = (path == null || path.isEmpty()) ? new Path("/") : new Path("/", path); } else { root = (path == null || path.isEmpty()) ? new Path(".") : new Path(path); } // make a modifiable copy (it may be changed) Configuration conf = newHiveConf(DefaultConfiguration.get()); FileSystem fs; try { fs = FileSystem.get(fileSystemURI(match, conf), conf); } catch (IOException e) { // "Incomplete HDFS URI, no host" => add a helpful suggestion if (e.getMessage().startsWith("Incomplete")) { throw new DatasetIOException("Could not get a FileSystem: " + "make sure the default " + match.get(URIPattern.SCHEME) + " URI is configured.", e); } throw new DatasetIOException("Could not get a FileSystem", e); } // setup the MetaStore URI setMetaStoreURI(conf, match); return new HiveManagedDatasetRepository.Builder() .configuration(conf) .rootDirectory(fs.makeQualified(root)) .build(); }
Example #28
Source File: AbstractDatasetMojo.java From kite with Apache License 2.0 | 5 votes |
protected Configuration getConf() { if (!addedConf) { addToConfiguration(hadoopConfiguration); } // use the default return DefaultConfiguration.get(); }
Example #29
Source File: AbstractDatasetMojo.java From kite with Apache License 2.0 | 5 votes |
private static void addToConfiguration(Properties hadoopConfiguration) { // base the new Configuration on the current defaults Configuration conf = new Configuration(DefaultConfiguration.get()); // add all of the properties as config settings for (String key : hadoopConfiguration.stringPropertyNames()) { String value = hadoopConfiguration.getProperty(key); conf.set(key, value); } // replace the original Configuration DefaultConfiguration.set(conf); addedConf = true; }
Example #30
Source File: TestUtil.java From kite with Apache License 2.0 | 5 votes |
public static int run(Logger console, Configuration conf, String... args) throws Exception { // ensure the default config is not changed by calling Main Configuration original = DefaultConfiguration.get(); Main main = new Main(console); main.setConf(conf); int rc = main.run(args); DefaultConfiguration.set(original); return rc; }