Java Code Examples for org.apache.oozie.client.OozieClient#submit()

The following examples show how to use org.apache.oozie.client.OozieClient#submit() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: OozieLocalServerIntegrationTest.java    From hadoop-mini-clusters with Apache License 2.0 4 votes vote down vote up
@Test
public void testSubmitCoordinator() throws Exception {

    LOG.info("OOZIE: Test Submit Coordinator Start");

    FileSystem hdfsFs = hdfsLocalCluster.getHdfsFileSystemHandle();
    OozieClient oozie = oozieLocalServer.getOozieCoordClient();

    Path appPath = new Path(hdfsFs.getHomeDirectory(), "testApp");
    hdfsFs.mkdirs(new Path(appPath, "lib"));
    Path workflow = new Path(appPath, "workflow.xml");
    Path coordinator = new Path(appPath, "coordinator.xml");

    //write workflow.xml
    String wfApp =
            "<workflow-app xmlns='uri:oozie:workflow:0.1' name='test-wf'>" +
                    "    <start to='end'/>" +
                    "    <end name='end'/>" +
                    "</workflow-app>";

    String coordApp =
            "<coordinator-app timezone='UTC' end='2016-07-26T02:26Z' start='2016-07-26T01:26Z' frequency='${coord:hours(1)}' name='test-coordinator' xmlns='uri:oozie:coordinator:0.4'>" +
                    "    <action>" +
                    "        <workflow>" +
                    "            <app-path>" + workflow.toString() + "</app-path>" +
                    "        </workflow>" +
                    "    </action>" +
                    "</coordinator-app>";

    Writer writer = new OutputStreamWriter(hdfsFs.create(workflow));
    writer.write(wfApp);
    writer.close();

    Writer coordWriter = new OutputStreamWriter(hdfsFs.create(coordinator));
    coordWriter.write(coordApp);
    coordWriter.close();

    //write job.properties
    Properties conf = oozie.createConfiguration();
    conf.setProperty(OozieClient.COORDINATOR_APP_PATH, coordinator.toString());
    conf.setProperty(OozieClient.USER_NAME, UserGroupInformation.getCurrentUser().getUserName());

    //submit and check
    final String jobId = oozie.submit(conf);
    CoordinatorJob coord  = oozie.getCoordJobInfo(jobId);
    assertNotNull(coord);
    assertEquals(Job.Status.PREP, coord.getStatus());

    LOG.info("OOZIE: Coordinator: {}", coord.toString());
    hdfsFs.close();
}
 
Example 2
Source File: OozieJobsServiceImpl.java    From searchanalytics-bigdata with MIT License 4 votes vote down vote up
private void submitTopQueriesBundleCoordJob(String workFlowRoot)
		throws OozieClientException, InterruptedException {
	// OozieClient client = LocalOozie.getCoordClient();
	String oozieURL = System.getProperty("oozie.base.url");
	LOG.debug("Oozie BaseURL is: {} ", oozieURL);
	OozieClient client = new OozieClient(oozieURL);
	Properties conf = client.createConfiguration();
	conf.setProperty(OozieClient.BUNDLE_APP_PATH, workFlowRoot
			+ "/load-and-index-customerqueries-bundle-configuration.xml");
	conf.setProperty("coordAppPathLoadCustomerQueries", workFlowRoot
			+ "/coord-app-load-customerqueries.xml");
	conf.setProperty("coordAppPathIndexTopQueriesES", workFlowRoot
			+ "/coord-app-index-topqueries-es.xml");

	conf.setProperty("nameNode", hadoopClusterService.getHDFSUri());
	conf.setProperty("jobTracker", hadoopClusterService.getJobTRackerUri());
	conf.setProperty("workflowRoot", workFlowRoot);
	String userName = System.getProperty("user.name");
	String oozieWorkFlowRoot = hadoopClusterService.getHDFSUri() + "/usr/"
			+ userName + "/oozie";
	conf.setProperty("oozieWorkflowRoot", oozieWorkFlowRoot);
	Date now = new Date();
	conf.setProperty("jobStart", DateUtils.formatDateOozieTZ(new DateTime(
			now).minusDays(1).toDate()));
	conf.setProperty("jobStartIndex", DateUtils
			.formatDateOozieTZ(new DateTime(now).minusDays(1).plusMinutes(1).toDate()));
	conf.setProperty("jobEnd", DateUtils.formatDateOozieTZ(new DateTime()
			.plusDays(2).toDate()));
	conf.setProperty("initialDataset", DateUtils.formatDateOozieTZ(now));
	conf.setProperty("tzOffset", "2");

	// submit and start the workflow job
	String jobId = client.submit(conf);

	LOG.debug("Bundle job submitted");
	// wait until the workflow job finishes printing the status every 10
	// seconds
	int retries = 3;
	for (int i = 1; i <= retries; i++) {
		// Sleep 60 sec./ 3 mins
		Thread.sleep(60 * 1000);

		BundleJob bundleJobInfo = client.getBundleJobInfo(jobId);
		LOG.debug("Bundle job running ...");
		LOG.debug("bundleJobInfo Try: {}", i);
		LOG.debug("bundleJobInfo StartTime: {}",
				bundleJobInfo.getStartTime());
		LOG.debug("bundleJobInfo EndTime: {}", bundleJobInfo.getEndTime());
		LOG.debug("bundleJobInfo ConsoleURL: {}",
				bundleJobInfo.getConsoleUrl());
		LOG.debug("bundleJobInfo Status: {}", bundleJobInfo.getStatus());

		for (CoordinatorJob coordinatorJob : bundleJobInfo
				.getCoordinators()) {
			LOG.debug("bundleJobInfo Coord StartTime: {}",
					coordinatorJob.getStartTime());
			LOG.debug("bundleJobInfo Coord EndTime: {}",
					coordinatorJob.getEndTime());
			LOG.debug("bundleJobInfo Coord NextMaterizedTime: {}",
					coordinatorJob.getNextMaterializedTime());
			LOG.debug("bundleJobInfo Frequency: {}",
					coordinatorJob.getFrequency());
			LOG.debug("bundleJobInfo Coord Status: {}",
					coordinatorJob.getStatus());
			for (CoordinatorAction action : coordinatorJob.getActions()) {
				LOG.debug("bundleJobInfo Action Id: {}", action.getId());
				LOG.debug("bundleJobInfo Action NominalTimeL: {}",
						action.getNominalTime());
				LOG.debug("bundleJobInfo Action Runconf: {}",
						action.getRunConf());
				LOG.debug("bundleJobInfo Action Status: {}",
						action.getStatus());
				LOG.debug("bundleJobInfo ActionConsoleURL: {}",
						action.getConsoleUrl());
				LOG.debug("bundleJobInfo ActionErrorMessage: {}",
						action.getErrorMessage());
			}
		}

		if (bundleJobInfo.getStatus() == Job.Status.RUNNING) {
			// Wait three times to see the running state is stable..then it
			// is fine.
			// Job will keep running even if hive action fails.
			if (i == retries) {
				LOG.info("Bundle Job in running state! "
						+ bundleJobInfo.getStatus());
				break;
			} else {
				continue;
			}
		} else if (bundleJobInfo.getStatus() == Job.Status.PREMATER
				|| bundleJobInfo.getStatus() == Job.Status.PREP) {
			// still preparing.
			continue;
		} else {
			throw new RuntimeException(
					"Error occured while running customer top queries bundle job! "
							+ bundleJobInfo.getStatus());
		}
	}
}
 
Example 3
Source File: OozieJobsServiceImpl.java    From searchanalytics-bigdata with MIT License 4 votes vote down vote up
private void submitCoordJob(String workFlowRoot)
		throws OozieClientException, InterruptedException {
	// OozieClient client = LocalOozie.getCoordClient();
	String oozieURL = System.getProperty("oozie.base.url");
	LOG.debug("Oozie BaseURL is: {} ", oozieURL);
	OozieClient client = new OozieClient(oozieURL);
	Properties conf = client.createConfiguration();
	conf.setProperty(OozieClient.COORDINATOR_APP_PATH, workFlowRoot
			+ "/coord-app-hive-add-partition.xml");
	conf.setProperty("nameNode", hadoopClusterService.getHDFSUri());
	conf.setProperty("jobTracker", hadoopClusterService.getJobTRackerUri());
	conf.setProperty("workflowRoot", workFlowRoot);
	Date nowMinusOneMin = new DateTime().minusMinutes(1).toDate();
	Date now = new DateTime().toDate();
	conf.setProperty("jobStart",
			DateUtils.formatDateOozieTZ(nowMinusOneMin));
	conf.setProperty("jobEnd", DateUtils.formatDateOozieTZ(new DateTime()
			.plusHours(2).toDate()));
	conf.setProperty("initialDataset", DateUtils.formatDateOozieTZ(now));
	conf.setProperty("tzOffset", "2");

	// submit and start the workflow job
	String jobId = client.submit(conf);

	LOG.debug("Workflow job submitted");
	// wait until the workflow job finishes printing the status every 10
	// seconds
	int retries = 2;
	for (int i = 1; i <= retries; i++) {
		// Sleep 60 sec./ 3 mins
		Thread.sleep(60 * 1000);

		CoordinatorJob coordJobInfo = client.getCoordJobInfo(jobId);
		LOG.debug("Workflow job running ...");
		LOG.debug("coordJobInfo Try: {}", i);
		LOG.debug("coordJobInfo StartTime: {}", coordJobInfo.getStartTime());
		LOG.debug("coordJobInfo NextMaterizedTime: {}",
				coordJobInfo.getNextMaterializedTime());
		LOG.debug("coordJobInfo EndTime: {}", coordJobInfo.getEndTime());
		LOG.debug("coordJobInfo Frequency: {}", coordJobInfo.getFrequency());
		LOG.debug("coordJobInfo ConsoleURL: {}",
				coordJobInfo.getConsoleUrl());
		LOG.debug("coordJobInfo Status: {}", coordJobInfo.getStatus());
		for (CoordinatorAction action : coordJobInfo.getActions()) {
			LOG.debug("coordJobInfo Action Id: {}", action.getId());
			LOG.debug("coordJobInfo Action NominalTimeL: {}",
					action.getNominalTime());
			LOG.debug("coordJobInfo Action Runconf: {}",
					action.getRunConf());
			LOG.debug("coordJobInfo Action Status: {}", action.getStatus());
			LOG.debug("coordJobInfo ActionConsoleURL: {}",
					action.getConsoleUrl());
			LOG.debug("coordJobInfo ActionErrorMessage: {}",
					action.getErrorMessage());
		}
		if (coordJobInfo.getStatus() == Job.Status.RUNNING) {
			// Wait three times to see the running state is stable..then it
			// is fine.
			// Job will keep running even if hive action fails.
			if (i == retries) {
				LOG.info("Coord Job in running state!");
				break;
			} else {
				continue;
			}
		} else if (coordJobInfo.getStatus() == Job.Status.PREMATER
				|| coordJobInfo.getStatus() == Job.Status.PREP) {
			// still preparing.
			continue;
		} else {
			throw new RuntimeException(
					"Error occured while running coord job!");
		}
	}
}