org.apache.oozie.client.OozieClientException Java Examples
The following examples show how to use
org.apache.oozie.client.OozieClientException.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OozieUtil.java From EasyML with Apache License 2.0 | 6 votes |
/** * Submit Oozie Job * * @param app_path * @throws OozieClientException * @throws IOException */ public static String submit(String app_path) throws OozieClientException, IOException { // create a workflow job configuration and set the workflow application path Properties conf = wc.createConfiguration(); conf.setProperty(OozieClient.APP_PATH, app_path); // setting workflow parameters conf.setProperty("queueName", Constants.QUEUE_NAME); conf.setProperty("nameNode", Constants.NAME_NODE); conf.setProperty("jobTracker", Constants.JOB_TRACKER); conf.setProperty("appPath", app_path); String jobId = wc.run(conf); logger.info("submit workflow job:" + jobId); return jobId; }
Example #2
Source File: OozieJobsServiceImplTest.java From searchanalytics-bigdata with MIT License | 6 votes |
@Test public void startHiveAddPartitionCoordJob() throws OozieClientException, InterruptedException, IllegalArgumentException, IOException { prepareHiveData(); int itemsCount = printAndCountHdfsFileDirData( hadoopClusterService.getHDFSUri() + "/searchevents", "searchevents", false, true); assertEquals(200, itemsCount); hiveSearchClicksService.printHivePartitions(DBNAME_SEARCH, TBNAME_SEARCH_CLICKS); oozieJobsService.startHiveAddPartitionCoordJob(); System.out.println("Coord job completed ..."); hiveSearchClicksService.printHivePartitions(DBNAME_SEARCH, TBNAME_SEARCH_CLICKS); int totalRowCount = hiveSearchClicksService.getTotalRowCount( DBNAME_SEARCH, TBNAME_SEARCH_CLICKS); System.out.println("totalRowCount : " + totalRowCount); assertTrue(totalRowCount > 0); }
Example #3
Source File: DownloadRequestServiceImpl.java From occurrence with Apache License 2.0 | 6 votes |
@Override public void cancel(String downloadKey) { try { Download download = occurrenceDownloadService.get(downloadKey); if (download != null) { if (RUNNING_STATUSES.contains(download.getStatus())) { updateDownloadStatus(download, Download.Status.CANCELLED); client.kill(DownloadUtils.downloadToWorkflowId(downloadKey)); LOG.info("Download {} cancelled", downloadKey); } } else { throw new NotFoundException(String.format("Download %s not found", downloadKey)); } } catch (OozieClientException e) { throw new ServiceUnavailableException("Failed to cancel download " + downloadKey, e); } }
Example #4
Source File: OozieUtil.java From EasyML with Apache License 2.0 | 6 votes |
/** * Kill oozie job * * @throws OozieClientException * @throws IOException */ public static void resume(String jobID) throws OozieClientException, IOException { wc.resume(jobID); OozieJob job = new OozieJob(); job.setId(jobID); try { OozieJob temp = SecureDao.getObject(job); if ( temp != null ) { job = temp; } job.setStatus(getJob(jobID).getStatus()); System.out.println("[resume Status]" + job.getStatus()); String[] setFields = {"status"}; String[] condFields = {"jobid"}; SecureDao.update(job, setFields, condFields); } catch (Exception e) { e.printStackTrace(); } }
Example #5
Source File: OozieUtil.java From EasyML with Apache License 2.0 | 6 votes |
/** * Suspend oozie job * * @param jobID * @throws OozieClientException * @throws IOException */ public static void suspend(String jobID) throws OozieClientException, IOException { wc.suspend(jobID); OozieJob job = new OozieJob(); job.setId(jobID); try { OozieJob temp = SecureDao.getObject(job); if ( temp != null ) { job = temp; } job.setStatus(getJob(jobID).getStatus()); System.out.println("[suspend Status]" + job.getStatus()); String[] setFields = {"status"}; String[] condFields = {"jobid"}; SecureDao.update(job, setFields, condFields); } catch (Exception e) { e.printStackTrace(); } }
Example #6
Source File: OozieUtil.java From EasyML with Apache License 2.0 | 6 votes |
/** * Kill oozie job * * @param jobID * @throws OozieClientException * @throws IOException */ public static void kill(String jobID) throws OozieClientException, IOException { wc.kill(jobID); OozieJob job = new OozieJob(); job.setId(jobID); try { OozieJob temp = SecureDao.getObject(job); if ( temp != null ) { job = temp; } job.setStatus(getJob(jobID).getStatus()); System.out.println("[kill Status]" + job.getStatus()); String[] setFields = {"status"}; String[] condFields = {"jobid"}; SecureDao.update(job, setFields, condFields); } catch (Exception e) { e.printStackTrace(); } }
Example #7
Source File: DownloadServiceImplTest.java From occurrence with Apache License 2.0 | 5 votes |
@Test public void testFailedCreate() throws OozieClientException { doThrow(new OozieClientException("foo", "bar")).when(oozieClient).run(any(Properties.class)); DownloadRequest dl = new PredicateDownloadRequest(DEFAULT_TEST_PREDICATE, "markus", null, true, DownloadFormat.DWCA); try { requestService.create(dl); fail(); } catch (ServiceUnavailableException e) { } }
Example #8
Source File: DownloadServiceImplTest.java From occurrence with Apache License 2.0 | 5 votes |
@Test public void testCreate() throws OozieClientException { when(oozieClient.run(any(Properties.class))).thenReturn(JOB_ID); DownloadRequest dl = new PredicateDownloadRequest(DEFAULT_TEST_PREDICATE, "markus", null, true, DownloadFormat.DWCA); String id = requestService.create(dl); assertThat(id, equalTo(DOWNLOAD_ID)); }
Example #9
Source File: DownloadRequestServiceImpl.java From occurrence with Apache License 2.0 | 5 votes |
@Override public String create(DownloadRequest request) { LOG.debug("Trying to create download from request [{}]", request); Preconditions.checkNotNull(request); try { String exceedComplexityLimit = downloadLimitsService.exceedsDownloadComplexity(request); if (exceedComplexityLimit != null) { LOG.info("Download request refused as it would exceed complexity limits"); Response tooBig = Response .status(GbifResponseStatus.PAYLOAD_TOO_LARGE) .entity("A download limitation is exceeded:\n" + exceedComplexityLimit + "\n") .type("text/plain") .build(); throw new WebApplicationException(tooBig); } String exceedSimultaneousLimit = downloadLimitsService.exceedsSimultaneousDownloadLimit(request.getCreator()); if (exceedSimultaneousLimit != null) { LOG.info("Download request refused as it would exceed simultaneous limits"); Response calm = Response .status(GbifResponseStatus.ENHANCE_YOUR_CALM) .entity("A download limitation is exceeded:\n" + exceedSimultaneousLimit + "\n") .type("text/plain") .build(); throw new WebApplicationException(calm); } String jobId = client.run(parametersBuilder.buildWorkflowParameters(request)); LOG.debug("Oozie job id is: [{}]", jobId); String downloadId = DownloadUtils.workflowToDownloadId(jobId); persistDownload(request, downloadId); return downloadId; } catch (OozieClientException e) { LOG.error("Failed to create download job", e); throw new ServiceUnavailableException("Failed to create download job", e); } }
Example #10
Source File: DownloadServiceImplTest.java From occurrence with Apache License 2.0 | 5 votes |
@Test public void testNotification() throws OozieClientException { when(oozieClient.run(any(Properties.class))).thenReturn(JOB_ID); DownloadRequest dl = new PredicateDownloadRequest(DEFAULT_TEST_PREDICATE, "markus", Lists.newArrayList(TEST_EMAIL), true, DownloadFormat.DWCA); String downloadKey = requestService.create(dl); assertThat(downloadKey, equalTo(DOWNLOAD_ID)); ArgumentCaptor<Properties> argument = ArgumentCaptor.forClass(Properties.class); verify(oozieClient).run(argument.capture()); assertThat(argument.getValue().getProperty(Constants.NOTIFICATION_PROPERTY), equalTo(TEST_EMAIL)); }
Example #11
Source File: RunAppMojo.java From kite with Apache License 2.0 | 5 votes |
public void execute() throws MojoExecutionException, MojoFailureException { OozieClient oozieClient = new OozieClient(oozieUrl); Properties conf = oozieClient.createConfiguration(); if (jobProperties != null) { conf.putAll(jobProperties); } if (hadoopConfiguration != null) { conf.putAll(hadoopConfiguration); String hadoopFs = hadoopConfiguration.getProperty("fs.default.name"); if (hadoopFs == null) { throw new MojoExecutionException("Missing property 'fs.default.name' in " + "hadoopConfiguration"); } String hadoopJobTracker = hadoopConfiguration.getProperty("mapred.job.tracker"); if (hadoopJobTracker == null) { throw new MojoExecutionException("Missing property 'mapred.job.tracker' in " + "hadoopConfiguration"); } conf.put(NAMENODE_PROPERTY, hadoopFs); conf.put(JOBTRACKER_PROPERTY, hadoopJobTracker); } String appPath = getAppPath().toString(); conf.setProperty(getAppPathPropertyName(), appPath); conf.setProperty(APP_PATH_PROPERTY, appPath); // used in coordinator.xml getLog().info("App path: " + appPath); try { String jobId = oozieClient.run(conf); getLog().info("Running Oozie job " + jobId); } catch (OozieClientException e) { throw new MojoExecutionException("Error running Oozie job", e); } }
Example #12
Source File: OozieExternalService.java From celos with Apache License 2.0 | 5 votes |
@Override public ExternalStatus getStatus(SlotID unused, String jobId) throws ExternalServiceException { try { WorkflowJob jobInfo = client.getJobInfo(jobId); String status = jobInfo.getStatus().toString(); return new OozieExternalStatus(status); } catch (OozieClientException e) { throw new ExternalServiceException(e); } }
Example #13
Source File: OozieExternalService.java From celos with Apache License 2.0 | 5 votes |
@Override public void kill(SlotID unused, String externalID) throws ExternalServiceException { try { client.kill(externalID); } catch (OozieClientException e) { throw new ExternalServiceException(e); } }
Example #14
Source File: OozieExternalService.java From celos with Apache License 2.0 | 5 votes |
@Override public void start(SlotID unused, String externalID) throws ExternalServiceException { try { client.start(externalID); } catch (OozieClientException e) { throw new ExternalServiceException(e); } }
Example #15
Source File: HadoopClientServicesImpl.java From pentaho-hadoop-shims with Apache License 2.0 | 5 votes |
public void validateOozieWSVersion() throws HadoopClientServicesException { try { oozieClient.validateWSVersion(); } catch ( OozieClientException e ) { throw new HadoopClientServicesException( e, e.getErrorCode() ); } }
Example #16
Source File: HadoopClientServicesImpl.java From pentaho-hadoop-shims with Apache License 2.0 | 5 votes |
public OozieJobInfo runOozie( Properties props ) throws HadoopClientServicesException { try { String jobId = oozieClient.run( props ); return new OozieJobInfoDelegate( new OozieJobInfoImpl( jobId, oozieClient ) ); } catch ( OozieClientException e ) { throw new HadoopClientServicesException( e, e.getErrorCode() ); } }
Example #17
Source File: CallbackServiceTest.java From occurrence with Apache License 2.0 | 5 votes |
@Test public void testNotificationSent() throws OozieClientException, MessagingException { WorkflowJob job = mock(WorkflowJob.class); when(oozieClient.getJobInfo(JOB_ID)).thenReturn(job); when(job.getId()).thenReturn(JOB_ID); when(job.getCreatedTime()).thenReturn(new Date()); when(job.getConf()) .thenReturn( "<configuration>" + "<property><name>" + Constants.USER_PROPERTY + "</name>" + "<value>test</value></property>" + "<property><name>" + Constants.NOTIFICATION_PROPERTY + "</name>" + "<value>[email protected]</value></property>" + "<property><name>" + Constants.FILTER_PROPERTY + "</name>" + "<value>{\"type\":\"equals\",\"key\":\"DATASET_KEY\",\"value\":\"8575f23e-f762-11e1-a439-00145eb45e9a\"}</value></property>" + "</configuration>"); service.processCallback(JOB_ID, SUCCEEDED); }
Example #18
Source File: HadoopClientServicesImpl.java From pentaho-hadoop-shims with Apache License 2.0 | 5 votes |
public String getOozieProtocolUrl() throws HadoopClientServicesException { try { return oozieClient.getProtocolUrl(); } catch ( OozieClientException e ) { throw new HadoopClientServicesException( e, e.getErrorCode() ); } }
Example #19
Source File: OozieJobsServiceImplTest.java From searchanalytics-bigdata with MIT License | 5 votes |
@Test @Ignore public void startIndexTopCustomerQueryBundleCoordJob() throws OozieClientException, InterruptedException, IllegalArgumentException, IOException { //Make sure search_clicks data exists. prepareHiveData(); int itemsCount = printAndCountHdfsFileDirData( hadoopClusterService.getHDFSUri() + "/searchevents", "searchevents", false, true); assertEquals(200, itemsCount); hiveSearchClicksService.printHivePartitions(DBNAME_SEARCH, TBNAME_SEARCH_CLICKS); oozieJobsService.startHiveAddPartitionCoordJob(); System.out.println("Coord job completed ..."); hiveSearchClicksService.printHivePartitions(DBNAME_SEARCH, TBNAME_SEARCH_CLICKS); int totalRowCount = hiveSearchClicksService.getTotalRowCount( DBNAME_SEARCH, TBNAME_SEARCH_CLICKS); System.out.println("totalRowCount : " + totalRowCount); assertTrue(totalRowCount > 0); oozieJobsService.startIndexTopCustomerQueryBundleCoordJob(); System.out.println("Bundle job completed ..."); int totalQueryRowCount = hiveSearchClicksService.getTotalRowCount( DBNAME_SEARCH, "search_customerquery"); System.out.println("totalRowCount : " + totalRowCount); assertTrue(totalQueryRowCount > 0); }
Example #20
Source File: OozieJobsServiceImplTest.java From searchanalytics-bigdata with MIT License | 5 votes |
@Test public void testAddHiveActionWorkflowJob() throws OozieClientException, InterruptedException, IllegalArgumentException, IOException { prepareHiveData(); oozieJobsService.runHiveAddPartitionWorkflowJob(); System.out.println("Workflow job completed ..."); int totalRowCount = hiveSearchClicksService.getTotalRowCount( DBNAME_SEARCH, TBNAME_SEARCH_CLICKS); System.out.println("totalRowCount : " + totalRowCount); assertTrue(totalRowCount > 0); }
Example #21
Source File: OozieJobsServiceImpl.java From searchanalytics-bigdata with MIT License | 5 votes |
@Override public void startIndexTopCustomerQueryBundleCoordJob() { try { String workFlowRoot = setupTopCustomerQueryBundleJobApp(); submitTopQueriesBundleCoordJob(workFlowRoot); } catch (OozieClientException | InterruptedException | IllegalArgumentException | IOException e) { String errMsg = "Error occured while starting bundle job!"; LOG.error(errMsg, e); throw new RuntimeException(errMsg, e); } }
Example #22
Source File: OozieJobsServiceImpl.java From searchanalytics-bigdata with MIT License | 5 votes |
@Override public void runHiveAddPartitionWorkflowJob() { try { String workFlowRoot = setupHiveAddPartitionWorkflowApp(); submitWorkflowJob(workFlowRoot); } catch (OozieClientException | InterruptedException | IllegalArgumentException | IOException e) { String errMsg = "Error occured while starting hive add partition Workflow job!"; LOG.error(errMsg, e); throw new RuntimeException(errMsg, e); } }
Example #23
Source File: OozieJobsServiceImpl.java From searchanalytics-bigdata with MIT License | 5 votes |
@Override public void startHiveAddPartitionCoordJob() { try { String workFlowRoot = setupHiveAddPartitionWorkflowApp(); submitCoordJob(workFlowRoot); } catch (OozieClientException | InterruptedException | IllegalArgumentException | IOException e) { String errMsg = "Error occured while starting hive add partition coord job!"; LOG.error(errMsg, e); throw new RuntimeException(errMsg, e); } }
Example #24
Source File: OozieUtil.java From EasyML with Apache License 2.0 | 5 votes |
/** * Rerun oozie job * * @param jobID * @throws OozieClientException * @throws IOException */ public static void reRun(String jobID) throws OozieClientException, IOException { logger.info("rerun job:" + jobID); // create a workflow job configuration and set the workflow application path Properties conf = wc.createConfiguration(); String app_path = wc.getJobInfo(jobID).getAppPath(); // Need to analyze conf_o conf.setProperty(OozieClient.APP_PATH, app_path); conf.setProperty("queueName", Constants.QUEUE_NAME); conf.setProperty("nameNode", Constants.NAME_NODE); conf.setProperty("jobTracker", Constants.JOB_TRACKER); conf.setProperty("appPath", app_path); conf.setProperty("oozie.wf.rerun.failnodes", "false"); wc.reRun(jobID, conf); OozieJob job = new OozieJob(); job.setId(jobID); try { OozieJob temp = SecureDao.getObject(job); if ( temp != null ) { job = temp; } job.setStatus(getJob(jobID).getStatus()); job.setCreatedTime(TimeUtils.getTime()); logger.info("[reRun Status]" + job.getStatus()); String[] setFields = {"status", "createtime", "endtime"}; String[] condFields = {"jobid"}; SecureDao.update(job, setFields, condFields); } catch (Exception e) { e.printStackTrace(); } }
Example #25
Source File: OozieJobsServiceImpl.java From searchanalytics-bigdata with MIT License | 4 votes |
private void submitWorkflowJob(String workFlowRoot) throws OozieClientException, InterruptedException { String oozieURL = System.getProperty("oozie.base.url"); LOG.debug("Oozie BaseURL is: {} ", oozieURL); OozieClient client = new OozieClient(oozieURL); DateTime now = new DateTime(); int monthOfYear = now.getMonthOfYear(); int dayOfMonth = now.getDayOfMonth(); int hourOfDay = now.getHourOfDay(); String year = String.valueOf(now.getYear()); String month = monthOfYear < 10 ? "0" + String.valueOf(monthOfYear) : String.valueOf(monthOfYear); String day = dayOfMonth < 10 ? "0" + String.valueOf(dayOfMonth) : String.valueOf(dayOfMonth); String hour = hourOfDay < 10 ? "0" + String.valueOf(hourOfDay) : String .valueOf(hourOfDay); Properties conf = client.createConfiguration(); conf.setProperty(OozieClient.APP_PATH, workFlowRoot + "/hive-action-add-partition.xml"); conf.setProperty("nameNode", hadoopClusterService.getHDFSUri()); conf.setProperty("jobTracker", hadoopClusterService.getJobTRackerUri()); conf.setProperty("workflowRoot", workFlowRoot); conf.setProperty("YEAR", year); conf.setProperty("MONTH", month); conf.setProperty("DAY", day); conf.setProperty("HOUR", hour); conf.setProperty("oozie.use.system.libpath", "true"); // submit and start the workflow job client.setDebugMode(1); // client.dryrun(conf); String jobId = client.run(conf);// submit(conf); LOG.debug("Workflow job submitted"); // wait until the workflow job finishes printing the status every 10 // seconds int retries = 3; for (int i = 1; i <= retries; i++) { // Sleep 60 sec./ 3 mins Thread.sleep(60 * 1000); WorkflowJob jobInfo = client.getJobInfo(jobId); Status jobStatus = jobInfo.getStatus(); LOG.debug("Workflow job running ..."); LOG.debug("HiveActionWorkflowJob Status Try: {}", i); LOG.debug("HiveActionWorkflowJob Id: {}", jobInfo.getId()); LOG.debug("HiveActionWorkflowJob StartTime: {}", jobInfo.getStartTime()); LOG.debug("HiveActionWorkflowJob EndTime: {}", jobInfo.getEndTime()); LOG.debug("HiveActionWorkflowJob ConsoleURL: {}", jobInfo.getConsoleUrl()); LOG.debug("HiveActionWorkflowJob Status: {}", jobInfo.getStatus()); WorkflowAction workflowAction = jobInfo.getActions().get(0); LOG.debug("HiveActionWorkflowJob Action consoleURL: {}", workflowAction.getConsoleUrl()); LOG.debug("HiveActionWorkflowJob Action Name: {}", workflowAction.getName()); LOG.debug("HiveActionWorkflowJob Action error message: {}", workflowAction.getErrorMessage()); LOG.debug("HiveActionWorkflowJob Action Status: {}", workflowAction.getStats()); LOG.debug("HiveActionWorkflowJob Action data: {}", workflowAction.getData()); LOG.debug("HiveActionWorkflowJob Action conf: {}", workflowAction.getConf()); LOG.debug("HiveActionWorkflowJob Action retries: {}", workflowAction.getRetries()); LOG.debug("HiveActionWorkflowJob Action id: {}", workflowAction.getId()); LOG.debug("HiveActionWorkflowJob Action start time: {}", workflowAction.getStartTime()); LOG.debug("HiveActionWorkflowJob Action end time: {}", workflowAction.getEndTime()); LOG.debug("HiveActionWorkflowJob Oozie Url: {}", client.getOozieUrl()); if (jobStatus == WorkflowJob.Status.SUCCEEDED) { LOG.info("Oozie workflow job was successful!" + jobStatus); break; } else if (jobStatus == WorkflowJob.Status.PREP || jobStatus == WorkflowJob.Status.RUNNING) { if (i == retries) { throw new RuntimeException("Error executing workflow job!" + jobStatus); } else { continue; } } else { throw new RuntimeException("Error executing workflow job!" + jobStatus); } } }
Example #26
Source File: OozieJobInfoDelegateTest.java From pentaho-hadoop-shims with Apache License 2.0 | 4 votes |
@Before public void before() throws OozieClientException { oozieJobInfoDelegate = new OozieJobInfoDelegate( job ); }
Example #27
Source File: OozieJobInfoImplTest.java From pentaho-hadoop-shims with Apache License 2.0 | 4 votes |
@Before public void before() throws OozieClientException { oozieJobInfo = new OozieJobInfoImpl( id, client ); when( client.getJobInfo( id ) ).thenReturn( workflowJob ); }
Example #28
Source File: OozieJobsServiceImpl.java From searchanalytics-bigdata with MIT License | 4 votes |
private void submitCoordJob(String workFlowRoot) throws OozieClientException, InterruptedException { // OozieClient client = LocalOozie.getCoordClient(); String oozieURL = System.getProperty("oozie.base.url"); LOG.debug("Oozie BaseURL is: {} ", oozieURL); OozieClient client = new OozieClient(oozieURL); Properties conf = client.createConfiguration(); conf.setProperty(OozieClient.COORDINATOR_APP_PATH, workFlowRoot + "/coord-app-hive-add-partition.xml"); conf.setProperty("nameNode", hadoopClusterService.getHDFSUri()); conf.setProperty("jobTracker", hadoopClusterService.getJobTRackerUri()); conf.setProperty("workflowRoot", workFlowRoot); Date nowMinusOneMin = new DateTime().minusMinutes(1).toDate(); Date now = new DateTime().toDate(); conf.setProperty("jobStart", DateUtils.formatDateOozieTZ(nowMinusOneMin)); conf.setProperty("jobEnd", DateUtils.formatDateOozieTZ(new DateTime() .plusHours(2).toDate())); conf.setProperty("initialDataset", DateUtils.formatDateOozieTZ(now)); conf.setProperty("tzOffset", "2"); // submit and start the workflow job String jobId = client.submit(conf); LOG.debug("Workflow job submitted"); // wait until the workflow job finishes printing the status every 10 // seconds int retries = 2; for (int i = 1; i <= retries; i++) { // Sleep 60 sec./ 3 mins Thread.sleep(60 * 1000); CoordinatorJob coordJobInfo = client.getCoordJobInfo(jobId); LOG.debug("Workflow job running ..."); LOG.debug("coordJobInfo Try: {}", i); LOG.debug("coordJobInfo StartTime: {}", coordJobInfo.getStartTime()); LOG.debug("coordJobInfo NextMaterizedTime: {}", coordJobInfo.getNextMaterializedTime()); LOG.debug("coordJobInfo EndTime: {}", coordJobInfo.getEndTime()); LOG.debug("coordJobInfo Frequency: {}", coordJobInfo.getFrequency()); LOG.debug("coordJobInfo ConsoleURL: {}", coordJobInfo.getConsoleUrl()); LOG.debug("coordJobInfo Status: {}", coordJobInfo.getStatus()); for (CoordinatorAction action : coordJobInfo.getActions()) { LOG.debug("coordJobInfo Action Id: {}", action.getId()); LOG.debug("coordJobInfo Action NominalTimeL: {}", action.getNominalTime()); LOG.debug("coordJobInfo Action Runconf: {}", action.getRunConf()); LOG.debug("coordJobInfo Action Status: {}", action.getStatus()); LOG.debug("coordJobInfo ActionConsoleURL: {}", action.getConsoleUrl()); LOG.debug("coordJobInfo ActionErrorMessage: {}", action.getErrorMessage()); } if (coordJobInfo.getStatus() == Job.Status.RUNNING) { // Wait three times to see the running state is stable..then it // is fine. // Job will keep running even if hive action fails. if (i == retries) { LOG.info("Coord Job in running state!"); break; } else { continue; } } else if (coordJobInfo.getStatus() == Job.Status.PREMATER || coordJobInfo.getStatus() == Job.Status.PREP) { // still preparing. continue; } else { throw new RuntimeException( "Error occured while running coord job!"); } } }
Example #29
Source File: OozieJobsServiceImpl.java From searchanalytics-bigdata with MIT License | 4 votes |
private void submitTopQueriesBundleCoordJob(String workFlowRoot) throws OozieClientException, InterruptedException { // OozieClient client = LocalOozie.getCoordClient(); String oozieURL = System.getProperty("oozie.base.url"); LOG.debug("Oozie BaseURL is: {} ", oozieURL); OozieClient client = new OozieClient(oozieURL); Properties conf = client.createConfiguration(); conf.setProperty(OozieClient.BUNDLE_APP_PATH, workFlowRoot + "/load-and-index-customerqueries-bundle-configuration.xml"); conf.setProperty("coordAppPathLoadCustomerQueries", workFlowRoot + "/coord-app-load-customerqueries.xml"); conf.setProperty("coordAppPathIndexTopQueriesES", workFlowRoot + "/coord-app-index-topqueries-es.xml"); conf.setProperty("nameNode", hadoopClusterService.getHDFSUri()); conf.setProperty("jobTracker", hadoopClusterService.getJobTRackerUri()); conf.setProperty("workflowRoot", workFlowRoot); String userName = System.getProperty("user.name"); String oozieWorkFlowRoot = hadoopClusterService.getHDFSUri() + "/usr/" + userName + "/oozie"; conf.setProperty("oozieWorkflowRoot", oozieWorkFlowRoot); Date now = new Date(); conf.setProperty("jobStart", DateUtils.formatDateOozieTZ(new DateTime( now).minusDays(1).toDate())); conf.setProperty("jobStartIndex", DateUtils .formatDateOozieTZ(new DateTime(now).minusDays(1).plusMinutes(1).toDate())); conf.setProperty("jobEnd", DateUtils.formatDateOozieTZ(new DateTime() .plusDays(2).toDate())); conf.setProperty("initialDataset", DateUtils.formatDateOozieTZ(now)); conf.setProperty("tzOffset", "2"); // submit and start the workflow job String jobId = client.submit(conf); LOG.debug("Bundle job submitted"); // wait until the workflow job finishes printing the status every 10 // seconds int retries = 3; for (int i = 1; i <= retries; i++) { // Sleep 60 sec./ 3 mins Thread.sleep(60 * 1000); BundleJob bundleJobInfo = client.getBundleJobInfo(jobId); LOG.debug("Bundle job running ..."); LOG.debug("bundleJobInfo Try: {}", i); LOG.debug("bundleJobInfo StartTime: {}", bundleJobInfo.getStartTime()); LOG.debug("bundleJobInfo EndTime: {}", bundleJobInfo.getEndTime()); LOG.debug("bundleJobInfo ConsoleURL: {}", bundleJobInfo.getConsoleUrl()); LOG.debug("bundleJobInfo Status: {}", bundleJobInfo.getStatus()); for (CoordinatorJob coordinatorJob : bundleJobInfo .getCoordinators()) { LOG.debug("bundleJobInfo Coord StartTime: {}", coordinatorJob.getStartTime()); LOG.debug("bundleJobInfo Coord EndTime: {}", coordinatorJob.getEndTime()); LOG.debug("bundleJobInfo Coord NextMaterizedTime: {}", coordinatorJob.getNextMaterializedTime()); LOG.debug("bundleJobInfo Frequency: {}", coordinatorJob.getFrequency()); LOG.debug("bundleJobInfo Coord Status: {}", coordinatorJob.getStatus()); for (CoordinatorAction action : coordinatorJob.getActions()) { LOG.debug("bundleJobInfo Action Id: {}", action.getId()); LOG.debug("bundleJobInfo Action NominalTimeL: {}", action.getNominalTime()); LOG.debug("bundleJobInfo Action Runconf: {}", action.getRunConf()); LOG.debug("bundleJobInfo Action Status: {}", action.getStatus()); LOG.debug("bundleJobInfo ActionConsoleURL: {}", action.getConsoleUrl()); LOG.debug("bundleJobInfo ActionErrorMessage: {}", action.getErrorMessage()); } } if (bundleJobInfo.getStatus() == Job.Status.RUNNING) { // Wait three times to see the running state is stable..then it // is fine. // Job will keep running even if hive action fails. if (i == retries) { LOG.info("Bundle Job in running state! " + bundleJobInfo.getStatus()); break; } else { continue; } } else if (bundleJobInfo.getStatus() == Job.Status.PREMATER || bundleJobInfo.getStatus() == Job.Status.PREP) { // still preparing. continue; } else { throw new RuntimeException( "Error occured while running customer top queries bundle job! " + bundleJobInfo.getStatus()); } } }
Example #30
Source File: OozieUtil.java From EasyML with Apache License 2.0 | 3 votes |
/** * Get oozie's Url * * @param jobID * @return url string * @throws OozieClientException * @throws IOException */ public static String getUrl(String jobID) throws OozieClientException, IOException { // get a OozieClient for local Oozie String url = wc.getJobInfo(jobID).getAppPath() + "/"; return url; }