Java Code Examples for org.apache.oozie.client.OozieClient#createConfiguration()
The following examples show how to use
org.apache.oozie.client.OozieClient#createConfiguration() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RunAppMojo.java From kite with Apache License 2.0 | 5 votes |
public void execute() throws MojoExecutionException, MojoFailureException { OozieClient oozieClient = new OozieClient(oozieUrl); Properties conf = oozieClient.createConfiguration(); if (jobProperties != null) { conf.putAll(jobProperties); } if (hadoopConfiguration != null) { conf.putAll(hadoopConfiguration); String hadoopFs = hadoopConfiguration.getProperty("fs.default.name"); if (hadoopFs == null) { throw new MojoExecutionException("Missing property 'fs.default.name' in " + "hadoopConfiguration"); } String hadoopJobTracker = hadoopConfiguration.getProperty("mapred.job.tracker"); if (hadoopJobTracker == null) { throw new MojoExecutionException("Missing property 'mapred.job.tracker' in " + "hadoopConfiguration"); } conf.put(NAMENODE_PROPERTY, hadoopFs); conf.put(JOBTRACKER_PROPERTY, hadoopJobTracker); } String appPath = getAppPath().toString(); conf.setProperty(getAppPathPropertyName(), appPath); conf.setProperty(APP_PATH_PROPERTY, appPath); // used in coordinator.xml getLog().info("App path: " + appPath); try { String jobId = oozieClient.run(conf); getLog().info("Running Oozie job " + jobId); } catch (OozieClientException e) { throw new MojoExecutionException("Error running Oozie job", e); } }
Example 2
Source File: OozieLocalServerIntegrationTest.java From hadoop-mini-clusters with Apache License 2.0 | 4 votes |
@Test public void testSubmitWorkflow() throws Exception { LOG.info("OOZIE: Test Submit Workflow Start"); FileSystem hdfsFs = hdfsLocalCluster.getHdfsFileSystemHandle(); OozieClient oozie = oozieLocalServer.getOozieClient(); Path appPath = new Path(hdfsFs.getHomeDirectory(), "testApp"); hdfsFs.mkdirs(new Path(appPath, "lib")); Path workflow = new Path(appPath, "workflow.xml"); // Setup input directory and file hdfsFs.mkdirs(new Path(TEST_INPUT_DIR)); hdfsFs.copyFromLocalFile( new Path(getClass().getClassLoader().getResource(TEST_INPUT_FILE).toURI()), new Path(TEST_INPUT_DIR)); //write workflow.xml String wfApp = "<workflow-app name=\"sugar-option-decision\" xmlns=\"uri:oozie:workflow:0.5\">\n" + " <global>\n" + " <job-tracker>${jobTracker}</job-tracker>\n" + " <name-node>${nameNode}</name-node>\n" + " <configuration>\n" + " <property>\n" + " <name>mapreduce.output.fileoutputformat.outputdir</name>\n" + " <value>" + TEST_OUTPUT_DIR + "</value>\n" + " </property>\n" + " <property>\n" + " <name>mapreduce.input.fileinputformat.inputdir</name>\n" + " <value>" + TEST_INPUT_DIR + "</value>\n" + " </property>\n" + " </configuration>\n" + " </global>\n" + " <start to=\"first\"/>\n" + " <action name=\"first\">\n" + " <map-reduce> <prepare><delete path=\"" + TEST_OUTPUT_DIR + "\"/></prepare></map-reduce>\n" + " <ok to=\"decision-second-option\"/>\n" + " <error to=\"kill\"/>\n" + " </action>\n" + " <decision name=\"decision-second-option\">\n" + " <switch>\n" + " <case to=\"option\">${doOption}</case>\n" + " <default to=\"second\"/>\n" + " </switch>\n" + " </decision>\n" + " <action name=\"option\">\n" + " <map-reduce> <prepare><delete path=\"" + TEST_OUTPUT_DIR + "\"/></prepare></map-reduce>\n" + " <ok to=\"second\"/>\n" + " <error to=\"kill\"/>\n" + " </action>\n" + " <action name=\"second\">\n" + " <map-reduce> <prepare><delete path=\"" + TEST_OUTPUT_DIR + "\"/></prepare></map-reduce>\n" + " <ok to=\"end\"/>\n" + " <error to=\"kill\"/>\n" + " </action>\n" + " <kill name=\"kill\">\n" + " <message>\n" + " Failed to workflow, error message[${wf: errorMessage (wf: lastErrorNode ())}]\n" + " </message>\n" + " </kill>\n" + " <end name=\"end\"/>\n" + "</workflow-app>"; Writer writer = new OutputStreamWriter(hdfsFs.create(workflow)); writer.write(wfApp); writer.close(); //write job.properties Properties conf = oozie.createConfiguration(); conf.setProperty(OozieClient.APP_PATH, workflow.toString()); conf.setProperty(OozieClient.USER_NAME, UserGroupInformation.getCurrentUser().getUserName()); conf.setProperty("nameNode", "hdfs://localhost:" + hdfsLocalCluster.getHdfsNamenodePort()); conf.setProperty("jobTracker", mrLocalCluster.getResourceManagerAddress()); conf.setProperty("doOption", "true"); //submit and check final String jobId = oozie.run(conf); WorkflowJob wf = oozie.getJobInfo(jobId); assertNotNull(wf); assertEquals(WorkflowJob.Status.RUNNING, wf.getStatus()); while(true){ Thread.sleep(1000); wf = oozie.getJobInfo(jobId); if(wf.getStatus() == WorkflowJob.Status.FAILED || wf.getStatus() == WorkflowJob.Status.KILLED || wf.getStatus() == WorkflowJob.Status.PREP || wf.getStatus() == WorkflowJob.Status.SUCCEEDED){ break; } } wf = oozie.getJobInfo(jobId); assertEquals(WorkflowJob.Status.SUCCEEDED, wf.getStatus()); LOG.info("OOZIE: Workflow: {}", wf.toString()); hdfsFs.close(); }
Example 3
Source File: OozieLocalServerIntegrationTest.java From hadoop-mini-clusters with Apache License 2.0 | 4 votes |
@Test public void testSubmitCoordinator() throws Exception { LOG.info("OOZIE: Test Submit Coordinator Start"); FileSystem hdfsFs = hdfsLocalCluster.getHdfsFileSystemHandle(); OozieClient oozie = oozieLocalServer.getOozieCoordClient(); Path appPath = new Path(hdfsFs.getHomeDirectory(), "testApp"); hdfsFs.mkdirs(new Path(appPath, "lib")); Path workflow = new Path(appPath, "workflow.xml"); Path coordinator = new Path(appPath, "coordinator.xml"); //write workflow.xml String wfApp = "<workflow-app xmlns='uri:oozie:workflow:0.1' name='test-wf'>" + " <start to='end'/>" + " <end name='end'/>" + "</workflow-app>"; String coordApp = "<coordinator-app timezone='UTC' end='2016-07-26T02:26Z' start='2016-07-26T01:26Z' frequency='${coord:hours(1)}' name='test-coordinator' xmlns='uri:oozie:coordinator:0.4'>" + " <action>" + " <workflow>" + " <app-path>" + workflow.toString() + "</app-path>" + " </workflow>" + " </action>" + "</coordinator-app>"; Writer writer = new OutputStreamWriter(hdfsFs.create(workflow)); writer.write(wfApp); writer.close(); Writer coordWriter = new OutputStreamWriter(hdfsFs.create(coordinator)); coordWriter.write(coordApp); coordWriter.close(); //write job.properties Properties conf = oozie.createConfiguration(); conf.setProperty(OozieClient.COORDINATOR_APP_PATH, coordinator.toString()); conf.setProperty(OozieClient.USER_NAME, UserGroupInformation.getCurrentUser().getUserName()); //submit and check final String jobId = oozie.submit(conf); CoordinatorJob coord = oozie.getCoordJobInfo(jobId); assertNotNull(coord); assertEquals(Job.Status.PREP, coord.getStatus()); LOG.info("OOZIE: Coordinator: {}", coord.toString()); hdfsFs.close(); }
Example 4
Source File: OozieJobsServiceImpl.java From searchanalytics-bigdata with MIT License | 4 votes |
private void submitTopQueriesBundleCoordJob(String workFlowRoot) throws OozieClientException, InterruptedException { // OozieClient client = LocalOozie.getCoordClient(); String oozieURL = System.getProperty("oozie.base.url"); LOG.debug("Oozie BaseURL is: {} ", oozieURL); OozieClient client = new OozieClient(oozieURL); Properties conf = client.createConfiguration(); conf.setProperty(OozieClient.BUNDLE_APP_PATH, workFlowRoot + "/load-and-index-customerqueries-bundle-configuration.xml"); conf.setProperty("coordAppPathLoadCustomerQueries", workFlowRoot + "/coord-app-load-customerqueries.xml"); conf.setProperty("coordAppPathIndexTopQueriesES", workFlowRoot + "/coord-app-index-topqueries-es.xml"); conf.setProperty("nameNode", hadoopClusterService.getHDFSUri()); conf.setProperty("jobTracker", hadoopClusterService.getJobTRackerUri()); conf.setProperty("workflowRoot", workFlowRoot); String userName = System.getProperty("user.name"); String oozieWorkFlowRoot = hadoopClusterService.getHDFSUri() + "/usr/" + userName + "/oozie"; conf.setProperty("oozieWorkflowRoot", oozieWorkFlowRoot); Date now = new Date(); conf.setProperty("jobStart", DateUtils.formatDateOozieTZ(new DateTime( now).minusDays(1).toDate())); conf.setProperty("jobStartIndex", DateUtils .formatDateOozieTZ(new DateTime(now).minusDays(1).plusMinutes(1).toDate())); conf.setProperty("jobEnd", DateUtils.formatDateOozieTZ(new DateTime() .plusDays(2).toDate())); conf.setProperty("initialDataset", DateUtils.formatDateOozieTZ(now)); conf.setProperty("tzOffset", "2"); // submit and start the workflow job String jobId = client.submit(conf); LOG.debug("Bundle job submitted"); // wait until the workflow job finishes printing the status every 10 // seconds int retries = 3; for (int i = 1; i <= retries; i++) { // Sleep 60 sec./ 3 mins Thread.sleep(60 * 1000); BundleJob bundleJobInfo = client.getBundleJobInfo(jobId); LOG.debug("Bundle job running ..."); LOG.debug("bundleJobInfo Try: {}", i); LOG.debug("bundleJobInfo StartTime: {}", bundleJobInfo.getStartTime()); LOG.debug("bundleJobInfo EndTime: {}", bundleJobInfo.getEndTime()); LOG.debug("bundleJobInfo ConsoleURL: {}", bundleJobInfo.getConsoleUrl()); LOG.debug("bundleJobInfo Status: {}", bundleJobInfo.getStatus()); for (CoordinatorJob coordinatorJob : bundleJobInfo .getCoordinators()) { LOG.debug("bundleJobInfo Coord StartTime: {}", coordinatorJob.getStartTime()); LOG.debug("bundleJobInfo Coord EndTime: {}", coordinatorJob.getEndTime()); LOG.debug("bundleJobInfo Coord NextMaterizedTime: {}", coordinatorJob.getNextMaterializedTime()); LOG.debug("bundleJobInfo Frequency: {}", coordinatorJob.getFrequency()); LOG.debug("bundleJobInfo Coord Status: {}", coordinatorJob.getStatus()); for (CoordinatorAction action : coordinatorJob.getActions()) { LOG.debug("bundleJobInfo Action Id: {}", action.getId()); LOG.debug("bundleJobInfo Action NominalTimeL: {}", action.getNominalTime()); LOG.debug("bundleJobInfo Action Runconf: {}", action.getRunConf()); LOG.debug("bundleJobInfo Action Status: {}", action.getStatus()); LOG.debug("bundleJobInfo ActionConsoleURL: {}", action.getConsoleUrl()); LOG.debug("bundleJobInfo ActionErrorMessage: {}", action.getErrorMessage()); } } if (bundleJobInfo.getStatus() == Job.Status.RUNNING) { // Wait three times to see the running state is stable..then it // is fine. // Job will keep running even if hive action fails. if (i == retries) { LOG.info("Bundle Job in running state! " + bundleJobInfo.getStatus()); break; } else { continue; } } else if (bundleJobInfo.getStatus() == Job.Status.PREMATER || bundleJobInfo.getStatus() == Job.Status.PREP) { // still preparing. continue; } else { throw new RuntimeException( "Error occured while running customer top queries bundle job! " + bundleJobInfo.getStatus()); } } }
Example 5
Source File: OozieJobsServiceImpl.java From searchanalytics-bigdata with MIT License | 4 votes |
private void submitCoordJob(String workFlowRoot) throws OozieClientException, InterruptedException { // OozieClient client = LocalOozie.getCoordClient(); String oozieURL = System.getProperty("oozie.base.url"); LOG.debug("Oozie BaseURL is: {} ", oozieURL); OozieClient client = new OozieClient(oozieURL); Properties conf = client.createConfiguration(); conf.setProperty(OozieClient.COORDINATOR_APP_PATH, workFlowRoot + "/coord-app-hive-add-partition.xml"); conf.setProperty("nameNode", hadoopClusterService.getHDFSUri()); conf.setProperty("jobTracker", hadoopClusterService.getJobTRackerUri()); conf.setProperty("workflowRoot", workFlowRoot); Date nowMinusOneMin = new DateTime().minusMinutes(1).toDate(); Date now = new DateTime().toDate(); conf.setProperty("jobStart", DateUtils.formatDateOozieTZ(nowMinusOneMin)); conf.setProperty("jobEnd", DateUtils.formatDateOozieTZ(new DateTime() .plusHours(2).toDate())); conf.setProperty("initialDataset", DateUtils.formatDateOozieTZ(now)); conf.setProperty("tzOffset", "2"); // submit and start the workflow job String jobId = client.submit(conf); LOG.debug("Workflow job submitted"); // wait until the workflow job finishes printing the status every 10 // seconds int retries = 2; for (int i = 1; i <= retries; i++) { // Sleep 60 sec./ 3 mins Thread.sleep(60 * 1000); CoordinatorJob coordJobInfo = client.getCoordJobInfo(jobId); LOG.debug("Workflow job running ..."); LOG.debug("coordJobInfo Try: {}", i); LOG.debug("coordJobInfo StartTime: {}", coordJobInfo.getStartTime()); LOG.debug("coordJobInfo NextMaterizedTime: {}", coordJobInfo.getNextMaterializedTime()); LOG.debug("coordJobInfo EndTime: {}", coordJobInfo.getEndTime()); LOG.debug("coordJobInfo Frequency: {}", coordJobInfo.getFrequency()); LOG.debug("coordJobInfo ConsoleURL: {}", coordJobInfo.getConsoleUrl()); LOG.debug("coordJobInfo Status: {}", coordJobInfo.getStatus()); for (CoordinatorAction action : coordJobInfo.getActions()) { LOG.debug("coordJobInfo Action Id: {}", action.getId()); LOG.debug("coordJobInfo Action NominalTimeL: {}", action.getNominalTime()); LOG.debug("coordJobInfo Action Runconf: {}", action.getRunConf()); LOG.debug("coordJobInfo Action Status: {}", action.getStatus()); LOG.debug("coordJobInfo ActionConsoleURL: {}", action.getConsoleUrl()); LOG.debug("coordJobInfo ActionErrorMessage: {}", action.getErrorMessage()); } if (coordJobInfo.getStatus() == Job.Status.RUNNING) { // Wait three times to see the running state is stable..then it // is fine. // Job will keep running even if hive action fails. if (i == retries) { LOG.info("Coord Job in running state!"); break; } else { continue; } } else if (coordJobInfo.getStatus() == Job.Status.PREMATER || coordJobInfo.getStatus() == Job.Status.PREP) { // still preparing. continue; } else { throw new RuntimeException( "Error occured while running coord job!"); } } }
Example 6
Source File: OozieJobsServiceImpl.java From searchanalytics-bigdata with MIT License | 4 votes |
private void submitWorkflowJob(String workFlowRoot) throws OozieClientException, InterruptedException { String oozieURL = System.getProperty("oozie.base.url"); LOG.debug("Oozie BaseURL is: {} ", oozieURL); OozieClient client = new OozieClient(oozieURL); DateTime now = new DateTime(); int monthOfYear = now.getMonthOfYear(); int dayOfMonth = now.getDayOfMonth(); int hourOfDay = now.getHourOfDay(); String year = String.valueOf(now.getYear()); String month = monthOfYear < 10 ? "0" + String.valueOf(monthOfYear) : String.valueOf(monthOfYear); String day = dayOfMonth < 10 ? "0" + String.valueOf(dayOfMonth) : String.valueOf(dayOfMonth); String hour = hourOfDay < 10 ? "0" + String.valueOf(hourOfDay) : String .valueOf(hourOfDay); Properties conf = client.createConfiguration(); conf.setProperty(OozieClient.APP_PATH, workFlowRoot + "/hive-action-add-partition.xml"); conf.setProperty("nameNode", hadoopClusterService.getHDFSUri()); conf.setProperty("jobTracker", hadoopClusterService.getJobTRackerUri()); conf.setProperty("workflowRoot", workFlowRoot); conf.setProperty("YEAR", year); conf.setProperty("MONTH", month); conf.setProperty("DAY", day); conf.setProperty("HOUR", hour); conf.setProperty("oozie.use.system.libpath", "true"); // submit and start the workflow job client.setDebugMode(1); // client.dryrun(conf); String jobId = client.run(conf);// submit(conf); LOG.debug("Workflow job submitted"); // wait until the workflow job finishes printing the status every 10 // seconds int retries = 3; for (int i = 1; i <= retries; i++) { // Sleep 60 sec./ 3 mins Thread.sleep(60 * 1000); WorkflowJob jobInfo = client.getJobInfo(jobId); Status jobStatus = jobInfo.getStatus(); LOG.debug("Workflow job running ..."); LOG.debug("HiveActionWorkflowJob Status Try: {}", i); LOG.debug("HiveActionWorkflowJob Id: {}", jobInfo.getId()); LOG.debug("HiveActionWorkflowJob StartTime: {}", jobInfo.getStartTime()); LOG.debug("HiveActionWorkflowJob EndTime: {}", jobInfo.getEndTime()); LOG.debug("HiveActionWorkflowJob ConsoleURL: {}", jobInfo.getConsoleUrl()); LOG.debug("HiveActionWorkflowJob Status: {}", jobInfo.getStatus()); WorkflowAction workflowAction = jobInfo.getActions().get(0); LOG.debug("HiveActionWorkflowJob Action consoleURL: {}", workflowAction.getConsoleUrl()); LOG.debug("HiveActionWorkflowJob Action Name: {}", workflowAction.getName()); LOG.debug("HiveActionWorkflowJob Action error message: {}", workflowAction.getErrorMessage()); LOG.debug("HiveActionWorkflowJob Action Status: {}", workflowAction.getStats()); LOG.debug("HiveActionWorkflowJob Action data: {}", workflowAction.getData()); LOG.debug("HiveActionWorkflowJob Action conf: {}", workflowAction.getConf()); LOG.debug("HiveActionWorkflowJob Action retries: {}", workflowAction.getRetries()); LOG.debug("HiveActionWorkflowJob Action id: {}", workflowAction.getId()); LOG.debug("HiveActionWorkflowJob Action start time: {}", workflowAction.getStartTime()); LOG.debug("HiveActionWorkflowJob Action end time: {}", workflowAction.getEndTime()); LOG.debug("HiveActionWorkflowJob Oozie Url: {}", client.getOozieUrl()); if (jobStatus == WorkflowJob.Status.SUCCEEDED) { LOG.info("Oozie workflow job was successful!" + jobStatus); break; } else if (jobStatus == WorkflowJob.Status.PREP || jobStatus == WorkflowJob.Status.RUNNING) { if (i == retries) { throw new RuntimeException("Error executing workflow job!" + jobStatus); } else { continue; } } else { throw new RuntimeException("Error executing workflow job!" + jobStatus); } } }