org.apache.kylin.job.execution.Output Java Examples

The following examples show how to use org.apache.kylin.job.execution.Output. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FetcherRunner.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
protected void jobStateCount(String id) {
    final Output outputDigest = getExecutableManager().getOutputDigest(id);
    // logger.debug("Job id:" + id + " not runnable");
    if (outputDigest.getState() == ExecutableState.SUCCEED) {
        succeedJobs.add(id);
        nSUCCEED++;
    } else if (outputDigest.getState() == ExecutableState.ERROR) {
        nError++;
    } else if (outputDigest.getState() == ExecutableState.DISCARDED) {
        nDiscarded++;
    } else if (outputDigest.getState() == ExecutableState.STOPPED) {
        nStopped++;
    } else {
        if (fetchFailed) {
            getExecutableManager().forceKillJob(id);
            nError++;
        } else {
            nOthers++;
        }
    }
}
 
Example #2
Source File: JobInstanceExtractor.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
private List<JobInstance> listJobInstances(String project, String cube, long startTime, long endTime) {
    final List<JobInstance> result = Lists.newArrayList();
    final List<AbstractExecutable> executables = executableManager.getAllExecutables(startTime, endTime);
    final Map<String, Output> allOutputs = executableManager.getAllOutputs();
    for (AbstractExecutable executable : executables) {
        if (executable instanceof CubingJob) {
            String cubeName = CubingExecutableUtil.getCubeName(executable.getParams());
            boolean shouldExtract = false;
            if (cube == null || cube.equalsIgnoreCase(cubeName)) {
                if (project == null) {
                    shouldExtract = true;
                } else {
                    ProjectInstance projectInstance = projectManager.getProject(project);
                    if (projectInstance != null && projectInstance.containsRealization(RealizationType.CUBE, cubeName)) {
                        shouldExtract = true;
                    }
                }
            }

            if (shouldExtract) {
                result.add(parseToJobInstance((CubingJob) executable, allOutputs));
            }
        }
    }
    return result;
}
 
Example #3
Source File: JobInstanceExtractor.java    From kylin with Apache License 2.0 6 votes vote down vote up
private JobInstance.JobStep parseToJobStep(AbstractExecutable task, int i, Output stepOutput) {
    Preconditions.checkNotNull(stepOutput);
    JobInstance.JobStep result = new JobInstance.JobStep();
    result.setId(task.getId());
    result.setName(task.getName());
    result.setSequenceID(i);
    result.setStatus(parseToJobStepStatus(stepOutput.getState()));
    for (Map.Entry<String, String> entry : stepOutput.getExtra().entrySet()) {
        if (entry.getKey() != null && entry.getValue() != null) {
            result.putInfo(entry.getKey(), entry.getValue());
        }
    }
    result.setExecStartTime(AbstractExecutable.getStartTime(stepOutput));
    result.setExecEndTime(AbstractExecutable.getEndTime(stepOutput));
    if (task instanceof ShellExecutable) {
        result.setExecCmd(((ShellExecutable) task).getCmd());
    }
    if (task instanceof MapReduceExecutable) {
        result.setExecCmd(((MapReduceExecutable) task).getMapReduceParams());
        result.setExecWaitTime(AbstractExecutable.getExtraInfoAsLong(stepOutput, MapReduceExecutable.MAP_REDUCE_WAIT_TIME, 0L) / 1000);
    }
    if (task instanceof HadoopShellExecutable) {
        result.setExecCmd(((HadoopShellExecutable) task).getJobParams());
    }
    return result;
}
 
Example #4
Source File: JobInstanceExtractor.java    From kylin with Apache License 2.0 6 votes vote down vote up
private List<JobInstance> listJobInstances(String project, String cube, long startTime, long endTime) {
    final List<JobInstance> result = Lists.newArrayList();
    final List<AbstractExecutable> executables = executableManager.getAllExecutables(startTime, endTime);
    final Map<String, Output> allOutputs = executableManager.getAllOutputs();
    for (AbstractExecutable executable : executables) {
        if (executable instanceof CubingJob) {
            String cubeName = CubingExecutableUtil.getCubeName(executable.getParams());
            boolean shouldExtract = false;
            if (cube == null || cube.equalsIgnoreCase(cubeName)) {
                if (project == null) {
                    shouldExtract = true;
                } else {
                    ProjectInstance projectInstance = projectManager.getProject(project);
                    if (projectInstance != null && projectInstance.containsRealization(RealizationType.CUBE, cubeName)) {
                        shouldExtract = true;
                    }
                }
            }

            if (shouldExtract) {
                result.add(parseToJobInstance((CubingJob) executable, allOutputs));
            }
        }
    }
    return result;
}
 
Example #5
Source File: JobService.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
public List<JobInstance> innerSearchCheckpointJobs(final String cubeName, final String jobName,
        final String projectName, final List<JobStatusEnum> statusList, final JobTimeFilterEnum timeFilter) {
    // TODO: use cache of jobs for this method
    // prepare time range
    Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
    calendar.setTime(new Date());
    long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
    long timeEndInMillis = Long.MAX_VALUE;
    Set<ExecutableState> states = convertStatusEnumToStates(statusList);
    final Map<String, Output> allOutputs = getExecutableManager().getAllOutputs(timeStartInMillis, timeEndInMillis);

    return Lists
            .newArrayList(FluentIterable
                    .from(innerSearchCheckpointJobs(cubeName, jobName, states, timeStartInMillis, timeEndInMillis,
                            allOutputs, false, projectName))
                    .transform(new Function<CheckpointExecutable, JobInstance>() {
                        @Override
                        public JobInstance apply(CheckpointExecutable checkpointExecutable) {
                            return JobInfoConverter.parseToJobInstanceQuietly(checkpointExecutable, allOutputs);
                        }
                    }));
}
 
Example #6
Source File: JobInfoConverterTest.java    From kylin with Apache License 2.0 6 votes vote down vote up
@Test
public void testParseToJobInstance4CuboidJob() {
    TestJob task = new TestJob();
    String jobId = UUID.randomUUID().toString();
    String cubeName = "cube1";
    task.setId(jobId);
    task.setParam(CubingExecutableUtil.CUBE_NAME, cubeName);
    Map<String, Output> outPutMap = Maps.newHashMap();
    DefaultOutput executeOutput = new DefaultOutput();
    executeOutput.setState(ExecutableState.READY);
    Map<String, String> extraMap = Maps.newHashMap();
    executeOutput.setExtra(extraMap);
    outPutMap.put(jobId, executeOutput);

    JobInstance instance3 = JobInfoConverter.parseToJobInstanceQuietly(task, outPutMap);
    // no exception thrown is expected
    assertEquals(jobId, instance3.getId());
    assertEquals(CubeBuildTypeEnum.BUILD, instance3.getType());
    assertEquals(cubeName, instance3.getRelatedCube());
    assertEquals(JobStatusEnum.PENDING, instance3.getStatus());
}
 
Example #7
Source File: JobInfoConverterTest.java    From kylin with Apache License 2.0 6 votes vote down vote up
@Test
public void testParseToJobInstance4CheckpointJob() {
    Test2Job task = new Test2Job();
    String jobId = UUID.randomUUID().toString();
    String cubeName = "cube1";
    task.setId(jobId);
    task.setParam(CubingExecutableUtil.CUBE_NAME, cubeName);
    Map<String, Output> outPutMap = Maps.newHashMap();
    DefaultOutput executeOutput = new DefaultOutput();
    executeOutput.setState(ExecutableState.READY);
    Map<String, String> extraMap = Maps.newHashMap();
    executeOutput.setExtra(extraMap);
    outPutMap.put(jobId, executeOutput);

    JobInstance instance3 = JobInfoConverter.parseToJobInstanceQuietly(task, outPutMap);
    // no exception thrown is expected
    assertEquals(jobId, instance3.getId());
    assertEquals(CubeBuildTypeEnum.CHECKPOINT, instance3.getType());
    assertEquals(cubeName, instance3.getRelatedCube());
    assertEquals(JobStatusEnum.PENDING, instance3.getStatus());
}
 
Example #8
Source File: DistributedScheduler.java    From kylin with Apache License 2.0 6 votes vote down vote up
private void resumeAllRunningJobs() {
    for (final String id : executableManager.getAllJobIds()) {
        final Output output = executableManager.getOutput(id);
        AbstractExecutable executable = executableManager.getJob(id);
        if (output.getState() == ExecutableState.RUNNING && executable instanceof DefaultChainedExecutable) {
            try {
                if (!jobLock.isLocked(getLockPath(executable.getId()))) {
                    executableManager.resumeRunningJobForce(executable.getId());
                    fetcherPool.schedule(fetcher, 0, TimeUnit.SECONDS);
                }
            } catch (Exception e) {
                logger.error("resume the job " + id + " fail in server: " + serverName, e);
            }
        }
    }
}
 
Example #9
Source File: SparkExecutable.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Override
protected void onExecuteStart(ExecutableContext executableContext) {
    final Output output = getOutput();
    if (output.getExtra().containsKey(START_TIME)) {
        final String sparkJobID = output.getExtra().get(ExecutableConstants.SPARK_JOB_ID);
        if (StringUtils.isEmpty(sparkJobID)) {
            getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null);
            return;
        }
        try {
            String status = getAppState(sparkJobID);
            if (status == null || status.equals("FAILED") || status.equals("KILLED")) {
                //remove previous mr job info
                super.onExecuteStart(executableContext);
            } else {
                getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null);
            }
        } catch (IOException e) {
            logger.warn("error get hadoop status");
            super.onExecuteStart(executableContext);
        }
    } else {
        super.onExecuteStart(executableContext);
    }
}
 
Example #10
Source File: FetcherRunner.java    From kylin with Apache License 2.0 6 votes vote down vote up
protected void jobStateCount(String id) {
    final Output outputDigest = getExecutableManager().getOutputDigest(id);
    // logger.debug("Job id:" + id + " not runnable");
    if (outputDigest.getState() == ExecutableState.SUCCEED) {
        succeedJobs.add(id);
        nSUCCEED++;
    } else if (outputDigest.getState() == ExecutableState.ERROR) {
        nError++;
    } else if (outputDigest.getState() == ExecutableState.DISCARDED) {
        nDiscarded++;
    } else if (outputDigest.getState() == ExecutableState.STOPPED) {
        nStopped++;
    } else {
        if (fetchFailed) {
            getExecutableManager().forceKillJob(id);
            nError++;
        } else {
            nOthers++;
        }
    }
}
 
Example #11
Source File: DistributedScheduler.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
private void resumeAllRunningJobs() {
    for (final String id : executableManager.getAllJobIds()) {
        final Output output = executableManager.getOutput(id);
        AbstractExecutable executable = executableManager.getJob(id);
        if (output.getState() == ExecutableState.RUNNING && executable instanceof DefaultChainedExecutable) {
            try {
                if (!jobLock.isLocked(getLockPath(executable.getId()))) {
                    executableManager.resumeRunningJobForce(executable.getId());
                    fetcherPool.schedule(fetcher, 0, TimeUnit.SECONDS);
                }
            } catch (Exception e) {
                logger.error("resume the job " + id + " fail in server: " + serverName, e);
            }
        }
    }
}
 
Example #12
Source File: JobService.java    From kylin with Apache License 2.0 6 votes vote down vote up
public List<JobInstance> innerSearchCheckpointJobs(final String cubeName, final String jobName,
        final String projectName, final List<JobStatusEnum> statusList, final JobTimeFilterEnum timeFilter) {
    // TODO: use cache of jobs for this method
    // prepare time range
    Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
    calendar.setTime(new Date());
    long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
    long timeEndInMillis = Long.MAX_VALUE;
    Set<ExecutableState> states = convertStatusEnumToStates(statusList);
    final Map<String, Output> allOutputs = getExecutableManager().getAllOutputs(timeStartInMillis, timeEndInMillis);

    return Lists
            .newArrayList(FluentIterable
                    .from(innerSearchCheckpointJobs(cubeName, jobName, states, timeStartInMillis, timeEndInMillis,
                            allOutputs, false, projectName))
                    .transform(new Function<CheckpointExecutable, JobInstance>() {
                        @Override
                        public JobInstance apply(CheckpointExecutable checkpointExecutable) {
                            return JobInfoConverter.parseToJobInstanceQuietly(checkpointExecutable, allOutputs);
                        }
                    }));
}
 
Example #13
Source File: JobService.java    From Kylin with Apache License 2.0 6 votes vote down vote up
private JobInstance.JobStep parseToJobStep(AbstractExecutable task, int i) {
    JobInstance.JobStep result = new JobInstance.JobStep();
    result.setId(task.getId());
    result.setName(task.getName());
    result.setSequenceID(i);
    result.setStatus(parseToJobStepStatus(task.getStatus()));
    final Output output = getExecutableManager().getOutput(task.getId());
    for (Map.Entry<String, String> entry : output.getExtra().entrySet()) {
        if (entry.getKey() != null && entry.getValue() != null) {
            result.putInfo(entry.getKey(), entry.getValue());
        }
    }
    result.setExecStartTime(task.getStartTime());
    result.setExecEndTime(task.getEndTime());
    if (task instanceof ShellExecutable) {
        result.setExecCmd(((ShellExecutable) task).getCmd());
    }
    if (task instanceof MapReduceExecutable) {
        result.setExecCmd(((MapReduceExecutable) task).getMapReduceParams());
        result.setExecWaitTime(((MapReduceExecutable) task).getMapReduceWaitTime() / 1000);
    }
    if (task instanceof HadoopShellExecutable) {
        result.setExecCmd(((HadoopShellExecutable) task).getJobParams());
    }
    return result;
}
 
Example #14
Source File: JobInstanceExtractor.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
private JobInstance.JobStep parseToJobStep(AbstractExecutable task, int i, Output stepOutput) {
    Preconditions.checkNotNull(stepOutput);
    JobInstance.JobStep result = new JobInstance.JobStep();
    result.setId(task.getId());
    result.setName(task.getName());
    result.setSequenceID(i);
    result.setStatus(parseToJobStepStatus(stepOutput.getState()));
    for (Map.Entry<String, String> entry : stepOutput.getExtra().entrySet()) {
        if (entry.getKey() != null && entry.getValue() != null) {
            result.putInfo(entry.getKey(), entry.getValue());
        }
    }
    result.setExecStartTime(AbstractExecutable.getStartTime(stepOutput));
    result.setExecEndTime(AbstractExecutable.getEndTime(stepOutput));
    if (task instanceof ShellExecutable) {
        result.setExecCmd(((ShellExecutable) task).getCmd());
    }
    if (task instanceof MapReduceExecutable) {
        result.setExecCmd(((MapReduceExecutable) task).getMapReduceParams());
        result.setExecWaitTime(AbstractExecutable.getExtraInfoAsLong(stepOutput, MapReduceExecutable.MAP_REDUCE_WAIT_TIME, 0L) / 1000);
    }
    if (task instanceof HadoopShellExecutable) {
        result.setExecCmd(((HadoopShellExecutable) task).getJobParams());
    }
    return result;
}
 
Example #15
Source File: SparkExecutable.java    From kylin with Apache License 2.0 6 votes vote down vote up
@Override
protected void onExecuteStart(ExecutableContext executableContext) {
    final Output output = getOutput();
    if (output.getExtra().containsKey(START_TIME)) {
        final String sparkJobID = output.getExtra().get(ExecutableConstants.SPARK_JOB_ID);
        if (StringUtils.isEmpty(sparkJobID)) {
            getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null);
            return;
        }
        try {
            String status = getAppState(sparkJobID);
            if (status == null || status.equals("FAILED") || status.equals("KILLED")) {
                //remove previous mr job info
                super.onExecuteStart(executableContext);
            } else {
                getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null);
            }
        } catch (IOException e) {
            logger.warn("error get hadoop status");
            super.onExecuteStart(executableContext);
        }
    } else {
        super.onExecuteStart(executableContext);
    }
}
 
Example #16
Source File: JobService.java    From kylin with Apache License 2.0 5 votes vote down vote up
protected JobInstance getLookupSnapshotBuildJobInstance(LookupSnapshotBuildJob job) {
    if (job == null) {
        return null;
    }
    Output output = job.getOutput();
    final JobInstance result = new JobInstance();
    result.setName(job.getName());
    result.setProjectName(job.getProjectName());
    result.setRelatedCube(CubingExecutableUtil.getCubeName(job.getParams()));
    result.setRelatedSegment(CubingExecutableUtil.getSegmentId(job.getParams()));
    result.setRelatedSegmentName(CubingExecutableUtil.getSegmentName(job.getParams()));
    result.setLastModified(job.getLastModified());
    result.setSubmitter(job.getSubmitter());
    result.setUuid(job.getId());
    result.setExecStartTime(job.getStartTime());
    result.setExecEndTime(job.getEndTime());
    result.setExecInterruptTime(job.getInterruptTime());
    result.setType(CubeBuildTypeEnum.BUILD);
    result.setStatus(JobInfoConverter.parseToJobStatus(job.getStatus()));
    result.setBuildInstance(AbstractExecutable.getBuildInstance(output));
    result.setDuration(job.getDuration() / 1000);
    for (int i = 0; i < job.getTasks().size(); ++i) {
        AbstractExecutable task = job.getTasks().get(i);
        result.addStep(JobInfoConverter.parseToJobStep(task, i, getExecutableManager().getOutput(task.getId())));
    }
    return result;
}
 
Example #17
Source File: JobService.java    From kylin with Apache License 2.0 5 votes vote down vote up
protected JobInstance getCheckpointJobInstance(AbstractExecutable job) {
    Message msg = MsgPicker.getMsg();

    if (job == null) {
        return null;
    }
    if (!(job instanceof CheckpointExecutable)) {
        throw new BadRequestException(String.format(Locale.ROOT, msg.getILLEGAL_JOB_TYPE(), job.getId()));
    }

    CheckpointExecutable checkpointExecutable = (CheckpointExecutable) job;
    Output output = checkpointExecutable.getOutput();
    final JobInstance result = new JobInstance();
    result.setName(job.getName());
    result.setProjectName(checkpointExecutable.getProjectName());
    result.setRelatedCube(CubingExecutableUtil.getCubeName(job.getParams()));
    result.setDisplayCubeName(CubingExecutableUtil.getCubeName(job.getParams()));
    result.setLastModified(job.getLastModified());
    result.setSubmitter(job.getSubmitter());
    result.setUuid(job.getId());
    result.setExecStartTime(job.getStartTime());
    result.setExecEndTime(job.getEndTime());
    result.setExecInterruptTime(job.getInterruptTime());
    result.setType(CubeBuildTypeEnum.CHECKPOINT);
    result.setStatus(JobInfoConverter.parseToJobStatus(job.getStatus()));
    result.setBuildInstance(AbstractExecutable.getBuildInstance(output));
    result.setDuration(job.getDuration() / 1000);
    for (int i = 0; i < checkpointExecutable.getTasks().size(); ++i) {
        AbstractExecutable task = checkpointExecutable.getTasks().get(i);
        result.addStep(JobInfoConverter.parseToJobStep(task, i, getExecutableManager().getOutput(task.getId())));
    }
    return result;
}
 
Example #18
Source File: JobService.java    From kylin with Apache License 2.0 5 votes vote down vote up
public List<JobInstance> innerSearchCubingJobs(final String cubeName, final String jobName,
        final String projectName, final List<JobStatusEnum> statusList, final JobTimeFilterEnum timeFilter) {
    if (null == projectName) {
        aclEvaluate.checkIsGlobalAdmin();
    } else {
        aclEvaluate.checkProjectOperationPermission(projectName);
    }
    // prepare time range
    Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
    calendar.setTime(new Date());
    long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
    long timeEndInMillis = Long.MAX_VALUE;
    Set<ExecutableState> states = convertStatusEnumToStates(statusList);
    final Map<String, Output> allOutputs = getExecutableManager().getAllOutputs(timeStartInMillis, timeEndInMillis);

    return Lists
            .newArrayList(
                    FluentIterable
                            .from(innerSearchCubingJobs(cubeName, jobName, states, timeStartInMillis,
                                    timeEndInMillis, allOutputs, false, projectName))
                            .transform(new Function<CubingJob, JobInstance>() {
                                @Override
                                public JobInstance apply(CubingJob cubingJob) {
                                    return JobInfoConverter.parseToJobInstanceQuietly(cubingJob, allOutputs);
                                }
                            }).filter(new Predicate<JobInstance>() {
                                @Override
                                public boolean apply(@Nullable JobInstance input) {
                                    return input != null;
                                }
                            }));
}
 
Example #19
Source File: JobInfoConverterTest.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Test
public void testParseToJobInstanceQuietlyUsingNullCubingJob() {
    ConcurrentHashMap<String, Output> concurrentHashMap = new ConcurrentHashMap<>();
    JobInstance jobInstance = JobInfoConverter.parseToJobInstanceQuietly((CubingJob) null, concurrentHashMap);

    assertNull(jobInstance);
}
 
Example #20
Source File: JobInfoConverterTest.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Test
public void testParseToJobInstanceQuietlyUsingNullCheckpointExecutable() {
    TreeMap<String, Output> treeMap = new TreeMap<>();
    JobInstance jobInstance = JobInfoConverter.parseToJobInstanceQuietly((CheckpointExecutable) null, treeMap);

    assertNull(jobInstance);
}
 
Example #21
Source File: JobInfoConverter.java    From kylin with Apache License 2.0 5 votes vote down vote up
public static JobInstance parseToJobInstanceQuietly(CubingJob job, Map<String, Output> outputs) {
    try {
        return parseToJobInstance(job, outputs);
    } catch (Exception e) {
        logger.error("Failed to parse job instance: uuid={}", job, e);
        return null;
    }
}
 
Example #22
Source File: ExecutableManager.java    From Kylin with Apache License 2.0 5 votes vote down vote up
public Output getOutput(String uuid) {
    try {
        final ExecutableOutputPO jobOutput = executableDao.getJobOutput(uuid);
        Preconditions.checkArgument(jobOutput != null, "there is no related output for job id:" + uuid);
        final DefaultOutput result = new DefaultOutput();
        result.setExtra(jobOutput.getInfo());
        result.setState(ExecutableState.valueOf(jobOutput.getStatus()));
        result.setVerboseMsg(jobOutput.getContent());
        result.setLastModified(jobOutput.getLastModified());
        return result;
    } catch (PersistentException e) {
        logger.error("fail to get job output:" + uuid, e);
        throw new RuntimeException(e);
    }
}
 
Example #23
Source File: JobInfoConverter.java    From kylin with Apache License 2.0 5 votes vote down vote up
public static JobInstance.JobStep parseToJobStep(AbstractExecutable task, int i, Output stepOutput) {
    JobInstance.JobStep result = new JobInstance.JobStep();
    result.setId(task.getId());
    result.setName(task.getName());
    result.setSequenceID(i);

    if (stepOutput == null) {
        logger.warn("Cannot found output for task: id={}", task.getId());
        return result;
    }

    result.setStatus(parseToJobStepStatus(stepOutput.getState()));
    for (Map.Entry<String, String> entry : stepOutput.getExtra().entrySet()) {
        if (entry.getKey() != null && entry.getValue() != null) {
            result.putInfo(entry.getKey(), entry.getValue());
        }
    }
    result.setExecStartTime(AbstractExecutable.getStartTime(stepOutput));
    result.setExecEndTime(AbstractExecutable.getEndTime(stepOutput));
    if (task instanceof ShellExecutable) {
        result.setExecCmd(((ShellExecutable) task).getCmd());
    }
    if (task instanceof MapReduceExecutable) {
        result.setExecCmd(((MapReduceExecutable) task).getMapReduceParams());
        result.setExecWaitTime(
                AbstractExecutable.getExtraInfoAsLong(stepOutput, MapReduceExecutable.MAP_REDUCE_WAIT_TIME, 0L)
                        / 1000);
    }
    if (task instanceof HadoopShellExecutable) {
        result.setExecCmd(((HadoopShellExecutable) task).getJobParams());
    }
    return result;
}
 
Example #24
Source File: CubingJob.java    From Kylin with Apache License 2.0 5 votes vote down vote up
@Override
protected Pair<String, String> formatNotifications(ExecutableState state) {
    final Output output = jobService.getOutput(getId());
    String logMsg;
    switch (output.getState()) {
        case ERROR:
            logMsg = output.getVerboseMsg();
            break;
        case DISCARDED:
            logMsg = "";
            break;
        case SUCCEED:
            logMsg = "";
            break;
        default:
            return null;
    }
    String content = ExecutableConstants.NOTIFY_EMAIL_TEMPLATE;
    content = content.replaceAll("\\$\\{job_name\\}", getName());
    content = content.replaceAll("\\$\\{result\\}", state.toString());
    content = content.replaceAll("\\$\\{cube_name\\}", getCubeName());
    content = content.replaceAll("\\$\\{start_time\\}", new Date(getStartTime()).toString());
    content = content.replaceAll("\\$\\{duration\\}", getDuration() / 60000 + "mins");
    content = content.replaceAll("\\$\\{mr_waiting\\}", getMapReduceWaitTime() / 60000 + "mins");
    content = content.replaceAll("\\$\\{last_update_time\\}", new Date(getLastModified()).toString());
    content = content.replaceAll("\\$\\{submitter\\}", getSubmitter());
    content = content.replaceAll("\\$\\{error_log\\}", logMsg);

    try {
        InetAddress inetAddress = InetAddress.getLocalHost();
        content = content.replaceAll("\\$\\{job_engine\\}", inetAddress.getCanonicalHostName());
    } catch (UnknownHostException e) {
        logger.warn(e.getLocalizedMessage(), e);
    }

    String title = "["+ state.toString() + "] - [Kylin Cube Build Job]-" + getCubeName();
    return Pair.of(title, content);
}
 
Example #25
Source File: DefaultScheduler.java    From Kylin with Apache License 2.0 5 votes vote down vote up
@Override
public void run() {
    // logger.debug("Job Fetcher is running...");
    Map<String, Executable> runningJobs = context.getRunningJobs();
    if (runningJobs.size() >= jobEngineConfig.getMaxConcurrentJobLimit()) {
        logger.warn("There are too many jobs running, Job Fetch will wait until next schedule time");
        return;
    }

    int nRunning = 0, nReady = 0, nOthers = 0;
    for (final String id : executableManager.getAllJobIds()) {
        if (runningJobs.containsKey(id)) {
            // logger.debug("Job id:" + id + " is already running");
            nRunning++;
            continue;
        }
        final Output output = executableManager.getOutput(id);
        if ((output.getState() != ExecutableState.READY)) {
            // logger.debug("Job id:" + id + " not runnable");
            nOthers++;
            continue;
        }
        nReady++;
        AbstractExecutable executable = executableManager.getJob(id);
        String jobDesc = executable.toString();
        logger.info(jobDesc + " prepare to schedule");
        try {
            context.addRunningJob(executable);
            jobPool.execute(new JobRunner(executable));
            logger.info(jobDesc + " scheduled");
        } catch (Exception ex) {
            context.removeRunningJob(executable);
            logger.warn(jobDesc + " fail to schedule", ex);
        }
    }
    logger.info("Job Fetcher: " + nRunning + " running, " + runningJobs.size() + " actual running, " + nReady + " ready, " + nOthers + " others");
}
 
Example #26
Source File: JobInfoConverter.java    From kylin with Apache License 2.0 5 votes vote down vote up
public static JobInstance parseToJobInstance(CheckpointExecutable job, Map<String, Output> outputs) {
    if (job == null) {
        logger.warn("job is null.");
        return null;
    }

    Output output = outputs.get(job.getId());
    if (output == null) {
        logger.warn("job output is null.");
        return null;
    }

    final JobInstance result = new JobInstance();
    result.setName(job.getName());
    result.setProjectName(job.getProjectName());
    result.setRelatedCube(CubingExecutableUtil.getCubeName(job.getParams()));
    result.setDisplayCubeName(CubingExecutableUtil.getCubeName(job.getParams()));
    result.setLastModified(output.getLastModified());
    result.setSubmitter(job.getSubmitter());
    result.setUuid(job.getId());
    result.setType(CubeBuildTypeEnum.CHECKPOINT);
    result.setStatus(parseToJobStatus(output.getState()));
    result.setBuildInstance(AbstractExecutable.getBuildInstance(output));
    result.setExecStartTime(AbstractExecutable.getStartTime(output));
    result.setExecEndTime(AbstractExecutable.getEndTime(output));
    result.setExecInterruptTime(AbstractExecutable.getInterruptTime(output));
    result.setDuration(AbstractExecutable.getDuration(result.getExecStartTime(), result.getExecEndTime(),
            result.getExecInterruptTime()) / 1000);
    for (int i = 0; i < job.getTasks().size(); ++i) {
        AbstractExecutable task = job.getTasks().get(i);
        result.addStep(parseToJobStep(task, i, outputs.get(task.getId())));
    }
    return result;
}
 
Example #27
Source File: SparkExecutableLivy.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
protected void onExecuteStart(ExecutableContext executableContext) {
    final Output output = getOutput();
    if (output.getExtra().containsKey(START_TIME)) {
        final String sparkJobID = output.getExtra().get(ExecutableConstants.SPARK_JOB_ID);
        if (sparkJobID == null) {
            getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null);
            return;
        }
        try {
            String status = getAppState(sparkJobID);
            if (Strings.isNullOrEmpty(status) || LivyStateEnum.dead.name().equalsIgnoreCase(status)
                    || LivyStateEnum.error.name().equalsIgnoreCase(status)
                    || LivyStateEnum.shutting_down.name().equalsIgnoreCase(status)) {
                //remove previous mr job info
                super.onExecuteStart(executableContext);
            } else {
                getManager().updateJobOutput(getId(), ExecutableState.RUNNING, null, null);
            }
        } catch (IOException e) {
            logger.warn("error get hadoop status");
            super.onExecuteStart(executableContext);
        }
    } else {
        super.onExecuteStart(executableContext);
    }
}
 
Example #28
Source File: JobInfoConverter.java    From kylin with Apache License 2.0 5 votes vote down vote up
public static JobInstance parseToJobInstanceQuietly(CheckpointExecutable job, Map<String, Output> outputs) {
    try {
        return parseToJobInstance(job, outputs);
    } catch (Exception e) {
        logger.error("Failed to parse job instance: uuid={}", job, e);
        return null;
    }
}
 
Example #29
Source File: JobServiceTest.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Test
public void testExceptionOnLostJobOutput() {
    ExecutableManager manager = ExecutableManager.getInstance(jobService.getConfig());
    AbstractExecutable executable = new TestJob();
    manager.addJob(executable);
    List<CubingJob> jobs = jobService.innerSearchCubingJobs("cube", "jobName",
            Collections.<ExecutableState> emptySet(), 0, Long.MAX_VALUE, Collections.<String, Output> emptyMap(),
            true, "project");
    Assert.assertEquals(0, jobs.size());
}
 
Example #30
Source File: JobInfoConverterTest.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Test
public void testParseToJobInstanceQuietlyUsingNullCubingJob() {
    ConcurrentHashMap<String, Output> concurrentHashMap = new ConcurrentHashMap<>();
    JobInstance jobInstance = JobInfoConverter.parseToJobInstanceQuietly((CubingJob) null, concurrentHashMap);

    assertNull(jobInstance);
}