Java Code Examples for org.apache.helix.task.JobQueue#Builder
The following examples show how to use
org.apache.helix.task.JobQueue#Builder .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestZkConnectionLost.java From helix with Apache License 2.0 | 6 votes |
private List<String> createAndEnqueueJob(JobQueue.Builder queueBuild, int jobCount) { List<String> currentJobNames = new ArrayList<>(); for (int i = 0; i < jobCount; i++) { String targetPartition = (i == 0) ? "MASTER" : "SLAVE"; JobConfig.Builder jobConfig = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND) .setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB) .setTargetPartitionStates(Sets.newHashSet(targetPartition)) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "100")); String jobName = targetPartition.toLowerCase() + "Job" + i; queueBuild.enqueueJob(jobName, jobConfig); currentJobNames.add(jobName); } Assert.assertEquals(currentJobNames.size(), jobCount); return currentJobNames; }
Example 2
Source File: TestStopAndResumeQueue.java From helix with Apache License 2.0 | 6 votes |
@Test public void testStopAndResumeQueue() throws Exception { String jobQueueName = TestHelper.getTestMethodName(); JobConfig.Builder jobBuilder0 = new JobConfig.Builder().setWorkflow(jobQueueName).setTargetResource(DATABASE) .setTargetPartitionStates(Sets.newHashSet(MasterSlaveSMD.States.MASTER.name())) .setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "1000")); JobQueue.Builder jobQueue = TaskTestUtil.buildJobQueue(jobQueueName); jobQueue.enqueueJob("JOB0", jobBuilder0); _driver.start(jobQueue.build()); _driver.pollForWorkflowState(jobQueueName, TaskState.IN_PROGRESS); _driver.pollForJobState(jobQueueName, TaskUtil.getNamespacedJobName(jobQueueName, "JOB0"), TaskState.COMPLETED); _driver.waitToStop(jobQueueName, 50000L); _driver.resume(jobQueueName); // Resume should change the workflow context's state to IN_PROGRESS even when there is no job // running _driver.pollForWorkflowState(jobQueueName, TaskState.IN_PROGRESS); }
Example 3
Source File: TestScheduleDelayTask.java From helix with Apache License 2.0 | 5 votes |
@Test public void testJobQueueDelay() throws InterruptedException { String workflowName = TestHelper.getTestMethodName(); JobQueue.Builder queueBuild = TaskTestUtil.buildJobQueue(workflowName); JobConfig.Builder jobBuilder = new JobConfig.Builder().setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB) .setCommand(MockTask.TASK_COMMAND).setMaxAttemptsPerTask(2) .setJobCommandConfigMap(WorkflowGenerator.DEFAULT_COMMAND_CONFIG); for (int i = 1; i < 4; i++) { queueBuild.enqueueJob("Job" + i, jobBuilder); } queueBuild.enqueueJob("Job4", jobBuilder.setExecutionDelay(2000L)); _driver.start(queueBuild.build()); _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, "Job4"), TaskState.COMPLETED); long jobFinishTime = _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, "Job3")).getFinishTime(); long jobTwoStartTime = _driver.getWorkflowContext(workflowName) .getJobStartTime(TaskUtil.getNamespacedJobName(workflowName, "Job4")); Assert.assertTrue(jobTwoStartTime - jobFinishTime >= 2000L); }
Example 4
Source File: TestDeleteWorkflow.java From helix with Apache License 2.0 | 5 votes |
@Test public void testDeleteWorkflow() throws InterruptedException { String jobQueueName = TestHelper.getTestMethodName(); JobConfig.Builder jobBuilder = JobConfig.Builder.fromMap(WorkflowGenerator.DEFAULT_JOB_CONFIG) .setMaxAttemptsPerTask(1).setWorkflow(jobQueueName) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "100000")); JobQueue.Builder jobQueue = TaskTestUtil.buildJobQueue(jobQueueName); jobQueue.enqueueJob("job1", jobBuilder); _driver.start(jobQueue.build()); _driver.pollForJobState(jobQueueName, TaskUtil.getNamespacedJobName(jobQueueName, "job1"), TaskState.IN_PROGRESS); // Check that WorkflowConfig, WorkflowContext, and IdealState are indeed created for this job // queue Assert.assertNotNull(_driver.getWorkflowConfig(jobQueueName)); Assert.assertNotNull(_driver.getWorkflowContext(jobQueueName)); Assert.assertNotNull(admin.getResourceIdealState(CLUSTER_NAME, jobQueueName)); // Pause the Controller so that the job queue won't get deleted admin.enableCluster(CLUSTER_NAME, false); Thread.sleep(1000); // Attempt the deletion and time out try { _driver.deleteAndWaitForCompletion(jobQueueName, DELETE_DELAY); Assert.fail( "Delete must time out and throw a HelixException with the Controller paused, but did not!"); } catch (HelixException e) { // Pass } // Resume the Controller and call delete again admin.enableCluster(CLUSTER_NAME, true); _driver.deleteAndWaitForCompletion(jobQueueName, DELETE_DELAY); // Check that the deletion operation completed Assert.assertNull(_driver.getWorkflowConfig(jobQueueName)); Assert.assertNull(_driver.getWorkflowContext(jobQueueName)); Assert.assertNull(admin.getResourceIdealState(CLUSTER_NAME, jobQueueName)); }
Example 5
Source File: TestUserContentStore.java From helix with Apache License 2.0 | 5 votes |
@Test public void testJobContentPutAndGetWithDependency() throws InterruptedException { String queueName = TestHelper.getTestMethodName(); JobQueue.Builder queueBuilder = TaskTestUtil.buildJobQueue(queueName, 0, 100); List<TaskConfig> taskConfigs1 = Lists.newArrayListWithCapacity(1); List<TaskConfig> taskConfigs2 = Lists.newArrayListWithCapacity(1); Map<String, String> taskConfigMap1 = Maps.newHashMap(); Map<String, String> taskConfigMap2 = Maps.newHashMap(); TaskConfig taskConfig1 = new TaskConfig("TaskOne", taskConfigMap1); TaskConfig taskConfig2 = new TaskConfig("TaskTwo", taskConfigMap2); taskConfigs1.add(taskConfig1); taskConfigs2.add(taskConfig2); Map<String, String> jobCommandMap = Maps.newHashMap(); jobCommandMap.put("Timeout", "1000"); JobConfig.Builder jobBuilder1 = new JobConfig.Builder().setCommand("DummyCommand").addTaskConfigs(taskConfigs1) .setJobCommandConfigMap(jobCommandMap).setWorkflow(queueName); JobConfig.Builder jobBuilder2 = new JobConfig.Builder().setCommand("DummyCommand").addTaskConfigs(taskConfigs2) .setJobCommandConfigMap(jobCommandMap).setWorkflow(queueName); queueBuilder.enqueueJob(queueName + 0, jobBuilder1); queueBuilder.enqueueJob(queueName + 1, jobBuilder2); _driver.start(queueBuilder.build()); _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, queueName + 1), TaskState.COMPLETED); Assert.assertEquals(_driver.getWorkflowContext(queueName) .getJobState(TaskUtil.getNamespacedJobName(queueName, queueName + 1)), TaskState.COMPLETED); }
Example 6
Source File: TestUpdateWorkflow.java From helix with Apache License 2.0 | 5 votes |
private JobQueue createDefaultRecurrentJobQueue(String queueName, int numJobs) { JobQueue.Builder queueBuild = TaskTestUtil.buildRecurrentJobQueue(queueName, 0, 600000); for (int i = 0; i <= numJobs; i++) { String targetPartition = (i == 0) ? "MASTER" : "SLAVE"; JobConfig.Builder jobConfig = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND) .setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB) .setTargetPartitionStates(Sets.newHashSet(targetPartition)); String jobName = targetPartition.toLowerCase() + "Job" + i; queueBuild.enqueueJob(jobName, jobConfig); } return queueBuild.build(); }
Example 7
Source File: TestJobFailureDependence.java From helix with Apache License 2.0 | 5 votes |
@Test public void testJobDependantFailure() throws Exception { String queueName = TestHelper.getTestMethodName(); // Create a queue LOG.info("Starting job-queue: " + queueName); JobQueue.Builder queueBuilder = TaskTestUtil.buildJobQueue(queueName, 0, 100); // Create and Enqueue jobs List<String> currentJobNames = new ArrayList<String>(); for (int i = 0; i < _numDbs; i++) { JobConfig.Builder jobConfig = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(_testDbs.get(i)) .setTargetPartitionStates(Sets.newHashSet("SLAVE")); String jobName = "job" + _testDbs.get(i); queueBuilder.enqueueJob(jobName, jobConfig); currentJobNames.add(jobName); } _driver.start(queueBuilder.build()); _gSetupTool.dropResourceFromCluster(CLUSTER_NAME, _testDbs.get(2)); // all jobs after failed job should fail too. for (int i = 2; i < _numDbs; i++) { String namedSpaceJob = String.format("%s_%s", queueName, currentJobNames.get(i)); _driver.pollForJobState(queueName, namedSpaceJob, TaskState.FAILED); } }
Example 8
Source File: TestTaskThreadLeak.java From helix with Apache License 2.0 | 5 votes |
@Test public void testTaskThreadCount() throws InterruptedException { String queueName = "myTestJobQueue"; JobQueue.Builder queueBuilder = new JobQueue.Builder(queueName); String lastJob = null; for (int i = 0; i < 5; i++) { String db = TestHelper.getTestMethodName() + "_" + i; _gSetupTool.addResourceToCluster(CLUSTER_NAME, db, 20, MASTER_SLAVE_STATE_MODEL, IdealState.RebalanceMode.FULL_AUTO.name()); _gSetupTool.rebalanceStorageCluster(CLUSTER_NAME, db, 1); JobConfig.Builder jobBuilder = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(db) .setNumConcurrentTasksPerInstance(100); queueBuilder.addJob(db + "_job", jobBuilder); lastJob = db + "_job"; } queueBuilder .setWorkflowConfig(new WorkflowConfig.Builder(queueName).setParallelJobs(10).build()); _driver.start(queueBuilder.build()); String nameSpacedJob = TaskUtil.getNamespacedJobName(queueName, lastJob); _driver.pollForJobState(queueName, nameSpacedJob, TaskState.COMPLETED); int threadCountAfter = getThreadCount("TaskStateModelFactory"); Assert.assertTrue( (threadCountAfter - _threadCountBefore) <= TaskStateModelFactory.TASK_THREADPOOL_SIZE + 1); }
Example 9
Source File: TestJobAccessor.java From helix with Apache License 2.0 | 5 votes |
@Test(dependsOnMethods = "testGetJobContext") public void testCreateJob() throws IOException { System.out.println("Start test :" + TestHelper.getTestMethodName()); TaskDriver driver = getTaskDriver(CLUSTER_NAME); // Create JobQueue JobQueue.Builder jobQueue = new JobQueue.Builder(TEST_QUEUE_NAME) .setWorkflowConfig(driver.getWorkflowConfig(WORKFLOW_NAME)); Entity entity = Entity.entity(OBJECT_MAPPER.writeValueAsString(Collections .singletonMap(WorkflowAccessor.WorkflowProperties.WorkflowConfig.name(), jobQueue.build().getWorkflowConfig().getRecord().getSimpleFields())), MediaType.APPLICATION_JSON_TYPE); put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME, null, entity, Response.Status.OK.getStatusCode()); // Test enqueue job entity = Entity.entity(JOB_INPUT, MediaType.APPLICATION_JSON_TYPE); put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME + "/jobs/" + TEST_JOB_NAME, null, entity, Response.Status.OK.getStatusCode()); String jobName = TaskUtil.getNamespacedJobName(TEST_QUEUE_NAME, TEST_JOB_NAME); JobConfig jobConfig = driver.getJobConfig(jobName); Assert.assertNotNull(jobConfig); WorkflowConfig workflowConfig = driver.getWorkflowConfig(TEST_QUEUE_NAME); Assert.assertTrue(workflowConfig.getJobDag().getAllNodes().contains(jobName)); System.out.println("End test :" + TestHelper.getTestMethodName()); }
Example 10
Source File: TestTaskRebalancerParallel.java From helix with Apache License 2.0 | 5 votes |
/** * This test starts 4 jobs in job queue, the job all stuck, and verify that * (1) the number of running job does not exceed configured max allowed parallel jobs * (2) one instance can only be assigned to one job in the workflow */ @Test public void testWhenDisallowOverlapJobAssignment() throws Exception { String queueName = TestHelper.getTestMethodName(); WorkflowConfig.Builder cfgBuilder = new WorkflowConfig.Builder(queueName); cfgBuilder.setParallelJobs(PARALLEL_COUNT); cfgBuilder.setAllowOverlapJobAssignment(false); JobQueue.Builder queueBuild = new JobQueue.Builder(queueName).setWorkflowConfig(cfgBuilder.build()); JobQueue queue = queueBuild.build(); _driver.createQueue(queue); List<JobConfig.Builder> jobConfigBuilders = new ArrayList<JobConfig.Builder>(); for (String testDbName : _testDbs) { jobConfigBuilders.add( new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(testDbName) .setTargetPartitionStates(Collections.singleton("SLAVE")) .setJobCommandConfigMap(Collections.singletonMap(MockTask.JOB_DELAY, "1000"))); } _driver.stop(queueName); for (int i = 0; i < jobConfigBuilders.size(); ++i) { _driver.enqueueJob(queueName, "job_" + (i + 1), jobConfigBuilders.get(i)); } _driver.resume(queueName); Thread.sleep(1000L); Assert.assertTrue(TaskTestUtil.pollForWorkflowParallelState(_driver, queueName)); }
Example 11
Source File: JobQueuesResource.java From helix with Apache License 2.0 | 5 votes |
/** * Add a new job queue * <p> * Usage: * <code>curl -d @'{jobQueueConfig.yaml}' * -H 'Content-Type: application/json' http://{host:port}/clusters/{clusterName}/jobQueues * <p> * For jobQueueConfig.yaml, see {@link Workflow#parse(String)} */ @Override public Representation post(Representation entity) { try { String clusterName = ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.CLUSTER_NAME); ZkClient zkClient = ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT); Form form = new Form(entity); // Get the job queue and submit it if (form.size() < 1) { throw new HelixException("Yaml job queue config is required!"); } Parameter payload = form.get(0); String yamlPayload = payload.getName(); if (yamlPayload == null) { throw new HelixException("Yaml job queue config is required!"); } Workflow workflow = Workflow.parse(yamlPayload); JobQueue.Builder jobQueueCfgBuilder = new JobQueue.Builder(workflow.getName()); jobQueueCfgBuilder.fromMap(workflow.getWorkflowConfig().getResourceConfigMap()); TaskDriver driver = new TaskDriver(zkClient, clusterName); driver.createQueue(jobQueueCfgBuilder.build()); getResponse().setEntity(getHostedEntitiesRepresentation(clusterName)); getResponse().setStatus(Status.SUCCESS_OK); } catch (Exception e) { getResponse().setEntity(ClusterRepresentationUtil.getErrorAsJsonStringFromException(e), MediaType.APPLICATION_JSON); getResponse().setStatus(Status.SUCCESS_OK); LOG.error("Exception in posting job queue: " + entity, e); } return null; }
Example 12
Source File: TestDisableJobExternalView.java From helix with Apache License 2.0 | 4 votes |
/** * This test is no longer valid since Helix no longer computes ExternalView for Task Framework * resources. Contexts effectively serve as ExternalView for task resources. * **This test has been modified to test that there are no job-related resources appearing in * ExternalView** * @throws Exception */ @Test public void testJobsDisableExternalView() throws Exception { String queueName = TestHelper.getTestMethodName(); ExternviewChecker externviewChecker = new ExternviewChecker(); _manager.addExternalViewChangeListener(externviewChecker); // Create a queue LOG.info("Starting job-queue: " + queueName); JobQueue.Builder queueBuilder = TaskTestUtil.buildJobQueue(queueName); JobConfig.Builder job1 = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND) .setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB) .setTargetPartitionStates(Sets.newHashSet("SLAVE")); JobConfig.Builder job2 = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND) .setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB) .setTargetPartitionStates(Sets.newHashSet("SLAVE")).setDisableExternalView(true); JobConfig.Builder job3 = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND) .setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB) .setTargetPartitionStates(Sets.newHashSet("MASTER")).setDisableExternalView(false); // enqueue jobs queueBuilder.enqueueJob("job1", job1); queueBuilder.enqueueJob("job2", job2); queueBuilder.enqueueJob("job3", job3); _driver.createQueue(queueBuilder.build()); // ensure all jobs are completed String namedSpaceJob3 = String.format("%s_%s", queueName, "job3"); _driver.pollForJobState(queueName, namedSpaceJob3, TaskState.COMPLETED); Set<String> seenExternalViews = externviewChecker.getSeenExternalViews(); String namedSpaceJob1 = String.format("%s_%s", queueName, "job1"); String namedSpaceJob2 = String.format("%s_%s", queueName, "job2"); Assert.assertTrue(!seenExternalViews.contains(namedSpaceJob1), "ExternalView found for " + namedSpaceJob1 + ". Jobs shouldn't be in EV!"); Assert.assertTrue(!seenExternalViews.contains(namedSpaceJob2), "External View for " + namedSpaceJob2 + " shoudld not exist!"); Assert.assertTrue(!seenExternalViews.contains(namedSpaceJob3), "ExternalView found for " + namedSpaceJob3 + ". Jobs shouldn't be in EV!"); _manager .removeListener(new PropertyKey.Builder(CLUSTER_NAME).externalViews(), externviewChecker); }
Example 13
Source File: TestDeleteWorkflow.java From helix with Apache License 2.0 | 4 votes |
@Test public void testDeleteWorkflowForcefully() throws InterruptedException { String jobQueueName = TestHelper.getTestMethodName(); JobConfig.Builder jobBuilder = JobConfig.Builder.fromMap(WorkflowGenerator.DEFAULT_JOB_CONFIG) .setMaxAttemptsPerTask(1).setWorkflow(jobQueueName) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "1000000")); JobQueue.Builder jobQueue = TaskTestUtil.buildJobQueue(jobQueueName); jobQueue.enqueueJob("job1", jobBuilder); _driver.start(jobQueue.build()); _driver.pollForJobState(jobQueueName, TaskUtil.getNamespacedJobName(jobQueueName, "job1"), TaskState.IN_PROGRESS); // Check that WorkflowConfig, WorkflowContext, and IdealState are indeed created for this job // queue Assert.assertNotNull(_driver.getWorkflowConfig(jobQueueName)); Assert.assertNotNull(_driver.getWorkflowContext(jobQueueName)); Assert.assertNotNull(_driver.getJobConfig(TaskUtil.getNamespacedJobName(jobQueueName, "job1"))); Assert .assertNotNull(_driver.getJobContext(TaskUtil.getNamespacedJobName(jobQueueName, "job1"))); Assert.assertNotNull(admin.getResourceIdealState(CLUSTER_NAME, jobQueueName)); // Delete the idealstate of workflow HelixDataAccessor accessor = _manager.getHelixDataAccessor(); PropertyKey.Builder keyBuild = accessor.keyBuilder(); accessor.removeProperty(keyBuild.idealStates(jobQueueName)); Assert.assertNull(admin.getResourceIdealState(CLUSTER_NAME, jobQueueName)); // Attempt the deletion and and it should time out since idealstate does not exist anymore. try { _driver.deleteAndWaitForCompletion(jobQueueName, DELETE_DELAY); Assert.fail( "Delete must time out and throw a HelixException with the Controller paused, but did not!"); } catch (HelixException e) { // Pass } // delete forcefully _driver.delete(jobQueueName, true); Assert.assertNull(_driver.getWorkflowConfig(jobQueueName)); Assert.assertNull(_driver.getWorkflowContext(jobQueueName)); Assert.assertNull(_driver.getJobConfig(TaskUtil.getNamespacedJobName(jobQueueName, "job1"))); Assert.assertNull(_driver.getJobContext(TaskUtil.getNamespacedJobName(jobQueueName, "job1"))); Assert.assertNull(admin.getResourceIdealState(CLUSTER_NAME, jobQueueName)); }
Example 14
Source File: TaskTestUtil.java From helix with Apache License 2.0 | 4 votes |
public static JobQueue.Builder buildJobQueue(String jobQueueName, int delayStart, int failureThreshold) { return buildJobQueue(jobQueueName, delayStart, failureThreshold, 500); }
Example 15
Source File: TestEnqueueJobs.java From helix with Apache License 2.0 | 4 votes |
@Test public void testQueueParallelJobs() throws InterruptedException { final int parallelJobs = 3; final int numberOfJobsAddedBeforeControllerSwitch = 4; final int totalNumberOfJobs = 7; String queueName = TestHelper.getTestMethodName(); JobQueue.Builder builder = TaskTestUtil.buildJobQueue(queueName); WorkflowConfig.Builder workflowCfgBuilder = new WorkflowConfig.Builder() .setWorkflowId(queueName).setParallelJobs(parallelJobs).setAllowOverlapJobAssignment(true); _driver.start(builder.setWorkflowConfig(workflowCfgBuilder.build()).build()); JobConfig.Builder jobBuilder = new JobConfig.Builder().setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB) .setCommand(MockTask.TASK_COMMAND).setMaxAttemptsPerTask(2) .setJobCommandConfigMap(Collections.singletonMap(MockTask.JOB_DELAY, "10000")); // Add 4 jobs to the queue for (int i = 0; i < numberOfJobsAddedBeforeControllerSwitch; i++) { _driver.enqueueJob(queueName, "JOB" + i, jobBuilder); } // Wait until all of the enqueued jobs (Job0 to Job3) are finished for (int i = 0; i < numberOfJobsAddedBeforeControllerSwitch; i++) { _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, "JOB" + i), TaskState.COMPLETED); } // Stop the Controller _controller.syncStop(); // Add 3 more jobs to the queue which should run in parallel after the Controller is started for (int i = numberOfJobsAddedBeforeControllerSwitch; i < totalNumberOfJobs; i++) { _driver.enqueueJob(queueName, "JOB" + i, jobBuilder); } // Start the Controller String controllerName = CONTROLLER_PREFIX + "_0"; _controller = new ClusterControllerManager(ZK_ADDR, CLUSTER_NAME, controllerName); _controller.syncStart(); // Wait until all of the newly added jobs (Job4 to Job6) are finished for (int i = numberOfJobsAddedBeforeControllerSwitch; i < totalNumberOfJobs; i++) { _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, "JOB" + i), TaskState.COMPLETED); } // Make sure the jobs have been running in parallel by checking the jobs start time and finish // time long maxStartTime = Long.MIN_VALUE; long minFinishTime = Long.MAX_VALUE; for (int i = numberOfJobsAddedBeforeControllerSwitch; i < totalNumberOfJobs; i++) { JobContext jobContext = _driver.getJobContext(TaskUtil.getNamespacedJobName(queueName, "JOB" + i)); maxStartTime = Long.max(maxStartTime, jobContext.getStartTime()); minFinishTime = Long.min(minFinishTime, jobContext.getFinishTime()); } Assert.assertTrue(minFinishTime > maxStartTime); }
Example 16
Source File: TaskTestUtil.java From helix with Apache License 2.0 | 4 votes |
public static JobQueue.Builder buildRecurrentJobQueue(String jobQueueName, int delayStart, int recurrenceInSeconds) { return buildRecurrentJobQueue(jobQueueName, delayStart, recurrenceInSeconds, null); }
Example 17
Source File: TaskTestUtil.java From helix with Apache License 2.0 | 4 votes |
public static JobQueue.Builder buildRecurrentJobQueue(String jobQueueName, int delayStart) { return buildRecurrentJobQueue(jobQueueName, delayStart, 60); }
Example 18
Source File: TestTaskStopQueue.java From helix with Apache License 2.0 | 4 votes |
@Test public void testStopRunningQueue() throws InterruptedException { String jobQueueName = TestHelper.getTestMethodName(); JobConfig.Builder jobBuilder0 = JobConfig.Builder.fromMap(WorkflowGenerator.DEFAULT_JOB_CONFIG) .setTimeoutPerTask(TIMEOUT).setMaxAttemptsPerTask(10).setWorkflow(jobQueueName) .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, EXECUTION_TIME)); JobQueue.Builder jobQueue = TaskTestUtil.buildJobQueue(jobQueueName); jobQueue.enqueueJob("JOB0", jobBuilder0); _driver.start(jobQueue.build()); _driver.pollForWorkflowState(jobQueueName, TaskState.IN_PROGRESS); _driver.pollForJobState(jobQueueName, TaskUtil.getNamespacedJobName(jobQueueName, "JOB0"), TaskState.IN_PROGRESS); _controller.syncStop(); _driver.stop(jobQueueName); String namespacedJobName = TaskUtil.getNamespacedJobName(jobQueueName, "JOB0"); for (int i = 0; i < _numNodes; i++) { String instance = PARTICIPANT_PREFIX + "_" + (_startPort + i); ZkClient client = (ZkClient) _participants[i].getZkClient(); String sessionId = ZkTestHelper.getSessionId(client); String currentStatePath = "/" + CLUSTER_NAME + "/INSTANCES/" + instance + "/CURRENTSTATES/" + sessionId + "/" + namespacedJobName; _manager.getHelixDataAccessor().getBaseDataAccessor().remove(currentStatePath, AccessOption.PERSISTENT); Assert.assertFalse(_manager.getHelixDataAccessor().getBaseDataAccessor() .exists(currentStatePath, AccessOption.PERSISTENT)); } String previousAssignment = "/" + CLUSTER_NAME + "/PROPERTYSTORE/TaskRebalancer/" + namespacedJobName + "/PreviousResourceAssignment"; _manager.getHelixDataAccessor().getBaseDataAccessor().remove(previousAssignment, AccessOption.PERSISTENT); Assert.assertFalse(_manager.getHelixDataAccessor().getBaseDataAccessor() .exists(previousAssignment, AccessOption.PERSISTENT)); // Start the Controller String controllerName = CONTROLLER_PREFIX + "_1"; _controller = new ClusterControllerManager(ZK_ADDR, CLUSTER_NAME, controllerName); _controller.syncStart(); _driver.pollForWorkflowState(jobQueueName, TaskState.STOPPED); _driver.pollForJobState(jobQueueName, TaskUtil.getNamespacedJobName(jobQueueName, "JOB0"), TaskState.STOPPED); }
Example 19
Source File: TestRecurringJobQueue.java From helix with Apache License 2.0 | 4 votes |
@Test public void deleteJobFromRecurrentQueueNotStarted() throws Exception { String queueName = TestHelper.getTestMethodName(); // Create a queue LOG.info("Starting job-queue: " + queueName); JobQueue.Builder queueBuilder = TaskTestUtil.buildRecurrentJobQueue(queueName); // create jobs List<JobConfig.Builder> jobs = new ArrayList<JobConfig.Builder>(); List<String> jobNames = new ArrayList<String>(); Map<String, String> commandConfig = ImmutableMap.of(MockTask.JOB_DELAY, String.valueOf(500)); final int JOB_COUNTS = 3; for (int i = 0; i < JOB_COUNTS; i++) { String targetPartition = (i == 0) ? "MASTER" : "SLAVE"; JobConfig.Builder job = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND) .setJobCommandConfigMap(commandConfig).setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB) .setTargetPartitionStates(Sets.newHashSet(targetPartition)); jobs.add(job); jobNames.add(targetPartition.toLowerCase() + "Job" + i); } // enqueue all jobs except last one for (int i = 0; i < JOB_COUNTS - 1; ++i) { LOG.info("Enqueuing job: " + jobNames.get(i)); queueBuilder.enqueueJob(jobNames.get(i), jobs.get(i)); } _driver.createQueue(queueBuilder.build()); String currentLastJob = jobNames.get(JOB_COUNTS - 2); WorkflowContext wCtx = TaskTestUtil.pollForWorkflowContext(_driver, queueName); String scheduledQueue = wCtx.getLastScheduledSingleWorkflow(); // ensure all jobs are finished String namedSpaceJob = String.format("%s_%s", scheduledQueue, currentLastJob); _driver.pollForJobState(scheduledQueue, namedSpaceJob, TaskState.COMPLETED); // enqueue the last job LOG.info("Enqueuing job: " + jobNames.get(JOB_COUNTS - 1)); _driver.enqueueJob(queueName, jobNames.get(JOB_COUNTS - 1), jobs.get(JOB_COUNTS - 1)); _driver.stop(queueName); // remove the last job _driver.deleteJob(queueName, jobNames.get(JOB_COUNTS - 1)); // verify verifyJobDeleted(queueName, String.format("%s_%s", scheduledQueue, jobNames.get(JOB_COUNTS - 1))); }
Example 20
Source File: TestJobQueueCleanUp.java From helix with Apache License 2.0 | 4 votes |
@Test public void testJobQueueAutoCleanUp() throws InterruptedException { int capacity = 10; String queueName = TestHelper.getTestMethodName(); JobQueue.Builder builder = TaskTestUtil.buildJobQueue(queueName, capacity); WorkflowConfig.Builder cfgBuilder = new WorkflowConfig.Builder(builder.getWorkflowConfig()); cfgBuilder.setJobPurgeInterval(1000); builder.setWorkflowConfig(cfgBuilder.build()); JobConfig.Builder jobBuilder = new JobConfig.Builder().setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB) .setCommand(MockTask.TASK_COMMAND).setMaxAttemptsPerTask(2).setJobCommandConfigMap( ImmutableMap.of(MockTask.SUCCESS_COUNT_BEFORE_FAIL, String.valueOf(capacity / 2))) .setExpiry(200L); Set<String> deletedJobs = new HashSet<String>(); Set<String> remainJobs = new HashSet<String>(); for (int i = 0; i < capacity; i++) { builder.enqueueJob("JOB" + i, jobBuilder); if (i < capacity/2) { deletedJobs.add("JOB" + i); } else { remainJobs.add(TaskUtil.getNamespacedJobName(queueName, "JOB" + i)); } } _driver.start(builder.build()); _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, "JOB" + (capacity - 1)), TaskState.FAILED); Thread.sleep(2000); WorkflowConfig config = _driver.getWorkflowConfig(queueName); Assert.assertEquals(config.getJobDag().getAllNodes(), remainJobs); WorkflowContext context = _driver.getWorkflowContext(queueName); Assert.assertEquals(context.getJobStates().keySet(), remainJobs); Assert.assertTrue(remainJobs.containsAll(context.getJobStartTimes().keySet())); for (String job : deletedJobs) { JobConfig cfg = _driver.getJobConfig(job); JobContext ctx = _driver.getJobContext(job); Assert.assertNull(cfg); Assert.assertNull(ctx); } }