Java Code Examples for org.apache.helix.task.TaskDriver#getWorkflowConfig()
The following examples show how to use
org.apache.helix.task.TaskDriver#getWorkflowConfig() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestBatchAddJobs.java From helix with Apache License 2.0 | 6 votes |
@Test public void testBatchAddJobs() throws Exception { TaskDriver driver = new TaskDriver(_gZkClient, CLUSTER_NAME); driver.createQueue(new JobQueue.Builder(QUEUE_NAME).build()); for (int i = 0; i < 10; i++) { _submitJobTasks.add(new SubmitJobTask(ZK_ADDR, i)); _submitJobTasks.get(i).start(); } WorkflowConfig workflowConfig = driver.getWorkflowConfig(QUEUE_NAME); while (workflowConfig.getJobDag().getAllNodes().size() < 100) { Thread.sleep(50); driver.getWorkflowConfig(QUEUE_NAME); } JobDag dag = workflowConfig.getJobDag(); String currentJob = dag.getAllNodes().iterator().next(); while (dag.getDirectChildren(currentJob).size() > 0) { String childJob = dag.getDirectChildren(currentJob).iterator().next(); if (!getPrefix(currentJob).equals(getPrefix(childJob)) && currentJob.charAt(currentJob.length() - 1) != '9') { Assert.fail(); } currentJob = childJob; } }
Example 2
Source File: JobAccessor.java From helix with Apache License 2.0 | 6 votes |
@GET public Response getJobs(@PathParam("clusterId") String clusterId, @PathParam("workflowName") String workflowName) { TaskDriver driver = getTaskDriver(clusterId); WorkflowConfig workflowConfig = driver.getWorkflowConfig(workflowName); ObjectNode root = JsonNodeFactory.instance.objectNode(); if (workflowConfig == null) { return badRequest(String.format("Workflow %s is not found!", workflowName)); } Set<String> jobs = workflowConfig.getJobDag().getAllNodes(); root.put(Properties.id.name(), JobProperties.Jobs.name()); ArrayNode jobsNode = root.putArray(JobProperties.Jobs.name()); if (jobs != null) { jobsNode.addAll((ArrayNode) OBJECT_MAPPER.valueToTree(jobs)); } return JSONRepresentation(root); }
Example 3
Source File: TestJobAccessor.java From helix with Apache License 2.0 | 6 votes |
@Test(dependsOnMethods = "testInvalidGetAndUpdateJobContentStore") public void testDeleteJob() throws InterruptedException { System.out.println("Start test :" + TestHelper.getTestMethodName()); TaskDriver driver = getTaskDriver(CLUSTER_NAME); driver.waitToStop(TEST_QUEUE_NAME, 5000); delete("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME + "/jobs/" + TEST_JOB_NAME, Response.Status.OK.getStatusCode()); String jobName = TaskUtil.getNamespacedJobName(TEST_QUEUE_NAME, TEST_JOB_NAME); JobConfig jobConfig = driver.getJobConfig(jobName); Assert.assertNull(jobConfig); WorkflowConfig workflowConfig = driver.getWorkflowConfig(TEST_QUEUE_NAME); Assert.assertTrue(!workflowConfig.getJobDag().getAllNodes().contains(jobName)); System.out.println("End test :" + TestHelper.getTestMethodName()); }
Example 4
Source File: HelixUtils.java From incubator-gobblin with Apache License 2.0 | 5 votes |
/** * Returns the currently running Helix Workflow Ids given an {@link Iterable} of Gobblin job names. The method returns a * {@link java.util.Map} from Gobblin job name to the corresponding Helix Workflow Id. This method iterates * over all Helix workflows, and obtains the jobs of each workflow from its jobDag. * * NOTE: This call is expensive as it results in listing of znodes and subsequently, multiple ZK calls to get the job * configuration for each HelixJob. Ideally, this method should be called infrequently e.g. when a job is deleted/cancelled. * * @param jobNames a list of Gobblin job names. * @return a map from jobNames to their Helix Workflow Ids. */ public static Map<String, String> getWorkflowIdsFromJobNames(HelixManager helixManager, Collection<String> jobNames) { Map<String, String> jobNameToWorkflowId = new HashMap<>(); TaskDriver taskDriver = new TaskDriver(helixManager); Map<String, WorkflowConfig> workflowConfigMap = taskDriver.getWorkflows(); for (String workflow : workflowConfigMap.keySet()) { WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflow); //Filter out any stale Helix workflows which are not running. if (workflowConfig.getTargetState() != TargetState.START) { continue; } Set<String> helixJobs = workflowConfig.getJobDag().getAllNodes(); for (String helixJob : helixJobs) { Iterator<TaskConfig> taskConfigIterator = taskDriver.getJobConfig(helixJob).getTaskConfigMap().values().iterator(); if (taskConfigIterator.hasNext()) { TaskConfig taskConfig = taskConfigIterator.next(); String jobName = taskConfig.getConfigMap().get(ConfigurationKeys.JOB_NAME_KEY); if (jobNames.contains(jobName)) { if (!jobNameToWorkflowId.containsKey(jobName)) { jobNameToWorkflowId.put(jobName, workflow); } else { log.warn("JobName {} previously found to have WorkflowId {}; found " + " a different WorkflowId {} for the job; " + "Skipping this entry", jobName, jobNameToWorkflowId.get(jobName), workflow); } break; } } } } return jobNameToWorkflowId; }
Example 5
Source File: GobblinHelixJobLauncherTest.java From incubator-gobblin with Apache License 2.0 | 5 votes |
private void waitForWorkFlowCleanup(TaskDriver taskDriver, String queueName) { for (int i = 0; i < 60; i++) { WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(queueName); if (workflowConfig == null) { break; } try { Thread.sleep(1000); } catch (InterruptedException e) { } } }
Example 6
Source File: GobblinHelixJobLauncherTest.java From incubator-gobblin with Apache License 2.0 | 5 votes |
private void waitForWorkFlowStartup(TaskDriver taskDriver, String workflow) { for (int i = 0; i < 5; i++) { WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflow); if (workflowConfig != null) { break; } try { Thread.sleep(1000); } catch (InterruptedException e) { } } }
Example 7
Source File: WorkflowAccessor.java From helix with Apache License 2.0 | 5 votes |
@GET @Path("{workflowId}") public Response getWorkflow(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId) { TaskDriver taskDriver = getTaskDriver(clusterId); WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflowId); WorkflowContext workflowContext = taskDriver.getWorkflowContext(workflowId); ObjectNode root = JsonNodeFactory.instance.objectNode(); TextNode id = JsonNodeFactory.instance.textNode(workflowId); root.put(Properties.id.name(), id); ObjectNode workflowConfigNode = JsonNodeFactory.instance.objectNode(); ObjectNode workflowContextNode = JsonNodeFactory.instance.objectNode(); if (workflowConfig != null) { getWorkflowConfigNode(workflowConfigNode, workflowConfig.getRecord()); } if (workflowContext != null) { getWorkflowContextNode(workflowContextNode, workflowContext.getRecord()); } root.put(WorkflowProperties.WorkflowConfig.name(), workflowConfigNode); root.put(WorkflowProperties.WorkflowContext.name(), workflowContextNode); JobDag jobDag = workflowConfig.getJobDag(); ArrayNode jobs = OBJECT_MAPPER.valueToTree(jobDag.getAllNodes()); ObjectNode parentJobs = OBJECT_MAPPER.valueToTree(jobDag.getChildrenToParents()); root.put(WorkflowProperties.Jobs.name(), jobs); root.put(WorkflowProperties.ParentJobs.name(), parentJobs); root.put(WorkflowProperties.LastScheduledTask.name(), OBJECT_MAPPER.valueToTree(taskDriver.getLastScheduledTaskExecutionInfo(workflowId))); return JSONRepresentation(root); }
Example 8
Source File: WorkflowAccessor.java From helix with Apache License 2.0 | 5 votes |
@GET @Path("{workflowId}/configs") public Response getWorkflowConfig(@PathParam("clusterId") String clusterId, @PathParam("workflowId") String workflowId) { TaskDriver taskDriver = getTaskDriver(clusterId); WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflowId); ObjectNode workflowConfigNode = JsonNodeFactory.instance.objectNode(); if (workflowConfig != null) { getWorkflowConfigNode(workflowConfigNode, workflowConfig.getRecord()); } return JSONRepresentation(workflowConfigNode); }
Example 9
Source File: TestJobAccessor.java From helix with Apache License 2.0 | 5 votes |
@Test(dependsOnMethods = "testGetJobContext") public void testCreateJob() throws IOException { System.out.println("Start test :" + TestHelper.getTestMethodName()); TaskDriver driver = getTaskDriver(CLUSTER_NAME); // Create JobQueue JobQueue.Builder jobQueue = new JobQueue.Builder(TEST_QUEUE_NAME) .setWorkflowConfig(driver.getWorkflowConfig(WORKFLOW_NAME)); Entity entity = Entity.entity(OBJECT_MAPPER.writeValueAsString(Collections .singletonMap(WorkflowAccessor.WorkflowProperties.WorkflowConfig.name(), jobQueue.build().getWorkflowConfig().getRecord().getSimpleFields())), MediaType.APPLICATION_JSON_TYPE); put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME, null, entity, Response.Status.OK.getStatusCode()); // Test enqueue job entity = Entity.entity(JOB_INPUT, MediaType.APPLICATION_JSON_TYPE); put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME + "/jobs/" + TEST_JOB_NAME, null, entity, Response.Status.OK.getStatusCode()); String jobName = TaskUtil.getNamespacedJobName(TEST_QUEUE_NAME, TEST_JOB_NAME); JobConfig jobConfig = driver.getJobConfig(jobName); Assert.assertNotNull(jobConfig); WorkflowConfig workflowConfig = driver.getWorkflowConfig(TEST_QUEUE_NAME); Assert.assertTrue(workflowConfig.getJobDag().getAllNodes().contains(jobName)); System.out.println("End test :" + TestHelper.getTestMethodName()); }
Example 10
Source File: TestWorkflowAccessor.java From helix with Apache License 2.0 | 5 votes |
@Test(dependsOnMethods = "testGetWorkflowContext") public void testCreateWorkflow() throws IOException { System.out.println("Start test :" + TestHelper.getTestMethodName()); TaskDriver driver = getTaskDriver(CLUSTER_NAME); // Create one time workflow Entity entity = Entity.entity(WORKFLOW_INPUT, MediaType.APPLICATION_JSON_TYPE); put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_WORKFLOW_NAME, null, entity, Response.Status.OK.getStatusCode()); WorkflowConfig workflowConfig = driver.getWorkflowConfig(TEST_WORKFLOW_NAME); Assert.assertNotNull(workflowConfig); Assert.assertEquals(workflowConfig.getJobDag().getAllNodes().size(), 2); // Create JobQueue JobQueue.Builder jobQueue = new JobQueue.Builder(TEST_QUEUE_NAME) .setWorkflowConfig(driver.getWorkflowConfig(TEST_WORKFLOW_NAME)); entity = Entity.entity(OBJECT_MAPPER.writeValueAsString(Collections .singletonMap(WorkflowAccessor.WorkflowProperties.WorkflowConfig.name(), jobQueue.build().getWorkflowConfig().getRecord().getSimpleFields())), MediaType.APPLICATION_JSON_TYPE); put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME, null, entity, Response.Status.OK.getStatusCode()); workflowConfig = driver.getWorkflowConfig(TEST_QUEUE_NAME); Assert.assertNotNull(workflowConfig); Assert.assertTrue(workflowConfig.isJobQueue()); Assert.assertEquals(workflowConfig.getJobDag().getAllNodes().size(), 0); System.out.println("End test :" + TestHelper.getTestMethodName()); }
Example 11
Source File: GobblinHelixJobLauncherTest.java From incubator-gobblin with Apache License 2.0 | 4 votes |
public void testJobCleanup() throws Exception { final ConcurrentHashMap<String, Boolean> runningMap = new ConcurrentHashMap<>(); final Properties properties = generateJobProperties(this.baseConfig, "3", "_1504201348473"); final GobblinHelixJobLauncher gobblinHelixJobLauncher = new GobblinHelixJobLauncher(properties, this.helixManager, this.appWorkDir, ImmutableList.<Tag<?>>of(), runningMap, java.util.Optional.empty()); final Properties properties2 = generateJobProperties(this.baseConfig, "33", "_1504201348474"); final GobblinHelixJobLauncher gobblinHelixJobLauncher2 = new GobblinHelixJobLauncher(properties2, this.helixManager, this.appWorkDir, ImmutableList.<Tag<?>>of(), runningMap, java.util.Optional.empty()); gobblinHelixJobLauncher.launchJob(null); gobblinHelixJobLauncher2.launchJob(null); final TaskDriver taskDriver = new TaskDriver(this.helixManager); final String jobIdKey1 = properties.getProperty(ConfigurationKeys.JOB_ID_KEY); final String jobIdKey2 = properties2.getProperty(ConfigurationKeys.JOB_ID_KEY); org.apache.helix.task.JobContext jobContext1 = taskDriver.getJobContext(jobIdKey1); org.apache.helix.task.JobContext jobContext2 = taskDriver.getJobContext(jobIdKey2); waitForWorkFlowStartup(taskDriver, jobIdKey1); waitForWorkFlowStartup(taskDriver, jobIdKey2); // job context should be present until close Assert.assertNotNull(jobContext1); Assert.assertNotNull(jobContext2); gobblinHelixJobLauncher.close(); // workflow deleted asynchronously after close waitForWorkFlowCleanup(taskDriver, jobIdKey1); jobContext1 = taskDriver.getJobContext(jobIdKey1); // job context should have been deleted Assert.assertNull(jobContext1); // workflow should have been deleted WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(jobIdKey1); Assert.assertNull(workflowConfig); WorkflowContext workflowContext = taskDriver.getWorkflowContext(jobIdKey1); Assert.assertNull(workflowContext); // second workflow with shared prefix should not be deleted when the first workflow is cleaned up workflowConfig = taskDriver.getWorkflowConfig(jobIdKey2); Assert.assertNotNull(workflowConfig); gobblinHelixJobLauncher2.close(); // workflow deleted asynchronously after close waitForWorkFlowCleanup(taskDriver, jobIdKey2); workflowConfig = taskDriver.getWorkflowConfig(jobIdKey2); Assert.assertNull(workflowConfig); // check that workunit and taskstate directory for the job are cleaned up final File workunitsDir = new File(this.appWorkDir + File.separator + GobblinClusterConfigurationKeys.INPUT_WORK_UNIT_DIR_NAME + File.separator + jobIdKey1); final File taskstatesDir = new File(this.appWorkDir + File.separator + GobblinClusterConfigurationKeys.OUTPUT_TASK_STATE_DIR_NAME + File.separator + jobIdKey1); Assert.assertFalse(workunitsDir.exists()); Assert.assertFalse(taskstatesDir.exists()); // check that job.state file is cleaned up final File jobStateFile = new File(GobblinClusterUtils.getJobStateFilePath(true, this.appWorkDir, jobIdKey1).toString()); Assert.assertFalse(jobStateFile.exists()); }
Example 12
Source File: TaskAdmin.java From helix with Apache License 2.0 | 4 votes |
private static void list(TaskDriver taskDriver, String workflow) { WorkflowConfig wCfg = taskDriver.getWorkflowConfig(workflow); if (wCfg == null) { LOG.error("Workflow " + workflow + " does not exist!"); return; } WorkflowContext wCtx = taskDriver.getWorkflowContext(workflow); LOG.info("Workflow " + workflow + " consists of the following tasks: " + wCfg.getJobDag() .getAllNodes()); String workflowState = (wCtx != null) ? wCtx.getWorkflowState().name() : TaskState.NOT_STARTED.name(); LOG.info("Current state of workflow is " + workflowState); LOG.info("Job states are: "); LOG.info("-------"); for (String job : wCfg.getJobDag().getAllNodes()) { TaskState jobState = (wCtx != null) ? wCtx.getJobState(job) : TaskState.NOT_STARTED; LOG.info("Job " + job + " is " + jobState); // fetch job information JobConfig jCfg = taskDriver.getJobConfig(job); JobContext jCtx = taskDriver.getJobContext(job); if (jCfg == null || jCtx == null) { LOG.info("-------"); continue; } // calculate taskPartitions List<Integer> partitions = Lists.newArrayList(jCtx.getPartitionSet()); Collections.sort(partitions); // report status for (Integer partition : partitions) { String taskId = jCtx.getTaskIdForPartition(partition); taskId = (taskId != null) ? taskId : jCtx.getTargetForPartition(partition); LOG.info("Task: " + taskId); TaskConfig taskConfig = jCfg.getTaskConfig(taskId); if (taskConfig != null) { LOG.info("Configuration: " + taskConfig.getConfigMap()); } TaskPartitionState state = jCtx.getPartitionState(partition); state = (state != null) ? state : TaskPartitionState.INIT; LOG.info("State: " + state); String assignedParticipant = jCtx.getAssignedParticipant(partition); if (assignedParticipant != null) { LOG.info("Assigned participant: " + assignedParticipant); } LOG.info("-------"); } LOG.info("-------"); } }