org.apache.spark.SparkJobInfo Java Examples

The following examples show how to use org.apache.spark.SparkJobInfo. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LocalSparkJobStatus.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Override
public JobExecutionStatus getState() {
	SparkJobInfo sparkJobInfo = getJobInfo();
	// For spark job with empty source data, it's not submitted actually, so
	// we would never
	// receive JobStart/JobEnd event in JobStateListener, use
	// JavaFutureAction to get current
	// job state.
	if (sparkJobInfo == null && future.isDone()) {
		try {
			future.get();
		} catch (Exception e) {
			LOG.error("Failed to run job " + jobId, e);
			return JobExecutionStatus.FAILED;
		}
		return JobExecutionStatus.SUCCEEDED;
	}
	return sparkJobInfo == null ? null : sparkJobInfo.status();
}
 
Example #2
Source File: CompactionJob.java    From spliceengine with GNU Affero General Public License v3.0 6 votes vote down vote up
private boolean compactionRunning(List<Integer> jobIds) {
    if (jobIds.isEmpty()) {
        return false;
    }
    Integer jobId = jobIds.get(0);
    SparkStatusTracker statusTracker = SpliceSpark.getContext().sc().statusTracker();

    Option<SparkJobInfo> op = statusTracker.getJobInfo(jobId);
    if (op.isEmpty()) {
        return false;
    }
    SparkJobInfo jobInfo = op.get();
    Integer stageIds = jobInfo.stageIds()[0];

    Option<SparkStageInfo> stageInfoOp = statusTracker.getStageInfo(stageIds);
    if(stageInfoOp.isEmpty()) {
        return false;
    }
    SparkStageInfo stageInfo = stageInfoOp.get();

    return stageInfo.numActiveTasks() > 0 || stageInfo.numCompletedTasks() > 0;
}
 
Example #3
Source File: JavaStatusTrackerDemo.java    From SparkDemo with MIT License 5 votes vote down vote up
public static void main(String[] args) throws Exception {
  SparkSession spark = SparkSession
    .builder()
    .appName(APP_NAME)
    .getOrCreate();

  final JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());

  // Example of implementing a progress reporter for a simple job.
  JavaRDD<Integer> rdd = jsc.parallelize(Arrays.asList(1, 2, 3, 4, 5), 5).map(
      new IdentityWithDelay<Integer>());
  JavaFutureAction<List<Integer>> jobFuture = rdd.collectAsync();
  while (!jobFuture.isDone()) {
    Thread.sleep(1000);  // 1 second
    List<Integer> jobIds = jobFuture.jobIds();
    if (jobIds.isEmpty()) {
      continue;
    }
    int currentJobId = jobIds.get(jobIds.size() - 1);
    SparkJobInfo jobInfo = jsc.statusTracker().getJobInfo(currentJobId);
    SparkStageInfo stageInfo = jsc.statusTracker().getStageInfo(jobInfo.stageIds()[0]);
    System.out.println(stageInfo.numTasks() + " tasks total: " + stageInfo.numActiveTasks() +
        " active, " + stageInfo.numCompletedTasks() + " complete");
  }

  System.out.println("Job results are: " + jobFuture.get());
  spark.stop();
}
 
Example #4
Source File: LocalSparkJobStatus.java    From sqoop-on-spark with Apache License 2.0 4 votes vote down vote up
private SparkJobInfo getJobInfo() {
	return sparkContext.statusTracker().getJobInfo(jobId);
}