Java Code Examples for org.quartz.JobDataMap#getString()
The following examples show how to use
org.quartz.JobDataMap#getString() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FoxbpmJobExecutionContext.java From FoxBPM with Apache License 2.0 | 6 votes |
public FoxbpmJobExecutionContext(JobExecutionContext jobExecutionContext) { JobDataMap jobDataMap = jobExecutionContext.getJobDetail() .getJobDataMap(); scheduleJob = jobExecutionContext.getJobInstance(); this.tokenId = jobDataMap.getString(TOKEN_ID); this.processInstanceId = jobDataMap.getString(PROCESS_INSTANCE_ID); this.nodeId = jobDataMap.getString(NODE_ID); this.processKey = jobDataMap.getString(PROCESS_DEFINITION_KEY); this.processId = jobDataMap.getString(PROCESS_DEFINITION_ID); this.processName = jobDataMap.getString(PROCESS_DEFINITION_NAME); this.bizKey = jobDataMap.getString(BUSINESS_KEY); this.jobType = jobDataMap.getString("jobType"); this.connectorId = jobDataMap.getString(CONNECTOR_ID); this.connectorInstanceId = jobDataMap.getString(CONNECTOR_INSTANCE_ID); this.connectorInstanceName = jobDataMap .getString(CONNECTOR_INSTANCE_NAME); this.eventType = jobDataMap.getString(EVENT_TYPE); this.eventName = jobDataMap.getString(EVENT_NAME); this.taskId = jobDataMap.getString(TASK_ID); }
Example 2
Source File: RetentionJob.java From chronix.server with Apache License 2.0 | 6 votes |
/** * Executes the job that calls the retention plugin. * * @param context the current job context * @throws JobExecutionException if the solr server could not be reached. */ @Override public void execute(JobExecutionContext context) throws JobExecutionException { LOGGER.info("Starting retention job"); JobDataMap data = context.getMergedJobDataMap(); String url = data.getString(RetentionConstants.RETENTION_URL); HttpGet httpget = new HttpGet(url); try { CloseableHttpResponse response = httpClient.execute(httpget); LOGGER.info("Response was {}", response); } catch (IOException e) { throw new JobExecutionException("Could not execute http get request " + httpget, e); } }
Example 3
Source File: CronJob.java From spring-boot-quartz-demo with MIT License | 6 votes |
@Override protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException { JobKey key = jobExecutionContext.getJobDetail().getKey(); System.out.println("Cron Job started with key :" + key.getName() + ", Group :"+key.getGroup() + " , Thread Name :"+Thread.currentThread().getName() + " ,Time now :"+new Date()); System.out.println("======================================"); System.out.println("Accessing annotation example: "+jobService.getAllJobs()); List<Map<String, Object>> list = jobService.getAllJobs(); System.out.println("Job list :"+list); System.out.println("======================================"); //*********** For retrieving stored key-value pairs ***********/ JobDataMap dataMap = jobExecutionContext.getMergedJobDataMap(); String myValue = dataMap.getString("myKey"); System.out.println("Value:" + myValue); System.out.println("Thread: "+ Thread.currentThread().getName() +" stopped."); }
Example 4
Source File: ScriptJob.java From engine with GNU General Public License v3.0 | 6 votes |
@Override public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap dataMap = context.getJobDetail().getJobDataMap(); String scriptUrl = dataMap.getString(SCRIPT_URL_DATA_KEY); SiteContext siteContext = (SiteContext)dataMap.get(SITE_CONTEXT_DATA_KEY); ServletContext servletContext = (ServletContext)dataMap.get(SERVLET_CONTEXT_DATA_KEY); ScriptFactory scriptFactory = siteContext.getScriptFactory(); if (scriptFactory == null) { throw new JobExecutionException( "No script factory associate to site context '" + siteContext.getSiteName() + "'"); } SiteContext.setCurrent(siteContext); try { Map<String, Object> variables = new HashMap<>(); GroovyScriptUtils.addJobScriptVariables(variables, servletContext); scriptFactory.getScript(scriptUrl).execute(variables); } catch (Exception e) { throw new JobExecutionException("Error executing script job at " + scriptUrl, e); } finally { SiteContext.clear(); } }
Example 5
Source File: WeatherJob.java From openhab1-addons with Eclipse Public License 2.0 | 6 votes |
/** * {@inheritDoc} */ @Override public void execute(JobExecutionContext jobContext) throws JobExecutionException { JobDataMap jobDataMap = jobContext.getJobDetail().getJobDataMap(); String locationId = jobDataMap.getString("locationId"); logger.debug("Starting Weather job for location '{}'", locationId); try { LocationConfig locationConfig = context.getConfig().getLocationConfig(locationId); WeatherProvider weatherProvider = WeatherProviderFactory .createWeatherProvider(locationConfig.getProviderName()); context.setWeather(locationId, weatherProvider.getWeather(locationConfig)); weatherPublisher.publish(locationId); } catch (Exception ex) { logger.error(ex.getMessage(), ex); throw new JobExecutionException(ex.getMessage(), ex); } }
Example 6
Source File: JobStoreImpl.java From nexus-public with Eclipse Public License 1.0 | 6 votes |
private boolean isLocal(final TriggerEntity entity) { if (isClustered()) { String localId = nodeAccess.getId(); JobDataMap triggerDetail = entity.getValue().getJobDataMap(); if (triggerDetail.containsKey(LIMIT_NODE_KEY)) { // filter limited triggers to those limited to run on this node String limitedNodeId = triggerDetail.getString(LIMIT_NODE_KEY); return localId.equals(limitedNodeId); } // filter all other triggers to those "owned" by this node String owner = triggerDetail.getString(NODE_ID); return localId.equals(owner); } return true; }
Example 7
Source File: SchedulerTriggerListener.java From dapeng-soa with Apache License 2.0 | 5 votes |
/** * (1) * Trigger被激发 它关联的job即将被运行 * Called by the Scheduler when a Trigger has fired, and it's associated JobDetail is about to be executed. */ @Override public void triggerFired(Trigger trigger, JobExecutionContext context) { TaskMonitorDataReportUtils.setSessionTid(null); JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String serviceName = jobDataMap.getString("serviceName"); String versionName = jobDataMap.getString("versionName"); String methodName = jobDataMap.getString("methodName"); String message = String.format("SchedulerTriggerListener::triggerFired;Task[%s:%s:%s] 即将被触发", serviceName, versionName, methodName); //sendMessage(serviceName, versionName, methodName, message, false, jobDataMap, "normal"); }
Example 8
Source File: SimpleJob.java From tutorials with MIT License | 5 votes |
public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap dataMap = context.getJobDetail().getJobDataMap(); String jobSays = dataMap.getString("jobSays"); float myFloatValue = dataMap.getFloat("myFloatValue"); System.out.println("Job says: " + jobSays + ", and val is: " + myFloatValue); }
Example 9
Source File: EJBInvokerJob.java From AsuraFramework with Apache License 2.0 | 5 votes |
private InitialContext getInitialContext(JobDataMap jobDataMap) throws NamingException { Hashtable params = new Hashtable(2); String initialContextFactory = jobDataMap.getString(INITIAL_CONTEXT_FACTORY); if (initialContextFactory != null) { params.put(Context.INITIAL_CONTEXT_FACTORY, initialContextFactory); } String providerUrl = jobDataMap.getString(PROVIDER_URL); if (providerUrl != null) { params.put(Context.PROVIDER_URL, providerUrl); } String principal = jobDataMap.getString(PRINCIPAL); if ( principal != null ) { params.put( Context.SECURITY_PRINCIPAL, principal ); } String credentials = jobDataMap.getString(CREDENTIALS); if ( credentials != null ) { params.put( Context.SECURITY_CREDENTIALS, credentials ); } return (params.size() == 0) ? new InitialContext() : new InitialContext(params); }
Example 10
Source File: SchedulerJobListener.java From dapeng-soa with Apache License 2.0 | 5 votes |
/** * (2) * 这个方法正常情况下不执行,但是如果当TriggerListener中的vetoJobExecution方法返回true时,那么执行这个方法. * 需要注意的是 如果方法(2)执行 那么(1),(3)这个俩个方法不会执行,因为任务被终止了嘛. * Called by the Scheduler when a JobDetail was about to be executed (an associated Trigger has occurred), * but a TriggerListener vetoed it's execution. */ @Override public void jobExecutionVetoed(JobExecutionContext context) { JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String serviceName = jobDataMap.getString("serviceName"); String versionName = jobDataMap.getString("versionName"); String methodName = jobDataMap.getString("methodName"); String message = String.format("SchedulerJobListener::jobExecutionVetoed;Task[%s:%s:%s] 触发失败", serviceName, versionName, methodName); sendMessage(serviceName, versionName, methodName, executorService, message, true, jobDataMap, "failed"); }
Example 11
Source File: JMXPluginJob.java From SuitAgent with Apache License 2.0 | 5 votes |
@Override public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String pluginName = jobDataMap.getString("pluginName"); try { JMXPlugin jmxPlugin = (JMXPlugin) jobDataMap.get("pluginObject"); String jmxServerName = jobDataMap.getString("jmxServerName"); List<JMXMetricsValueInfo> jmxMetricsValueInfos = JMXManager.getJmxMetricValue(jmxServerName,jmxPlugin); //设置agentSignName for (JMXMetricsValueInfo jmxMetricsValueInfo : jmxMetricsValueInfos) { if (AgentConfiguration.INSTANCE.isDockerRuntime()){ //容器环境直接取 JavaExecCommandInfo.appName jmxMetricsValueInfo.getJmxConnectionInfo().setName(jmxMetricsValueInfo.getJmxConnectionInfo().getConnectionServerName()); }else { String agentSignName = jmxPlugin.agentSignName(jmxMetricsValueInfo, jmxMetricsValueInfo.getJmxConnectionInfo().getPid()); if ("{jmxServerName}".equals(agentSignName)) { //设置变量 jmxMetricsValueInfo.getJmxConnectionInfo().setName(jmxServerName); }else{ jmxMetricsValueInfo.getJmxConnectionInfo().setName(agentSignName); } } } MetricsCommon jmxMetricsValue = new JMXMetricsValue(jmxPlugin,jmxMetricsValueInfos); ReportMetrics.push(jmxMetricsValue.getReportObjects()); } catch (Exception e) { log.error("插件 {} 运行异常",pluginName,e); } }
Example 12
Source File: JobStoreImpl.java From nexus-public with Eclipse Public License 1.0 | 5 votes |
/** * A {@link TriggerEntity} is orphaned if it's owner isn't in the cluster OR it's limited to a node not in the cluster * If there is no cluster, it's never orphaned */ private boolean isOrphaned(final TriggerEntity entity) { if (isClustered()) { Set<String> memberIds = nodeAccess.getMemberIds(); JobDataMap triggerDetail = entity.getValue().getJobDataMap(); String limitedNodeId = triggerDetail.getString(LIMIT_NODE_KEY); String owner = triggerDetail.getString(NODE_ID); return limitedNodeId != null ? !memberIds.contains(limitedNodeId) : !memberIds.contains(owner); } return false; }
Example 13
Source File: DetectPluginJob.java From SuitAgent with Apache License 2.0 | 5 votes |
@Override public void execute(JobExecutionContext context) throws JobExecutionException { long timestamp = System.currentTimeMillis() / 1000; JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String pluginName = jobDataMap.getString("pluginName"); try { DetectPlugin detectPlugin = (DetectPlugin) jobDataMap.get("pluginObject"); MetricsCommon metricsValue = new DetectMetricsValue(detectPlugin,timestamp); //可能会涉及到外网的连接,采用异步方式 ExecuteThreadUtil.execute(new JobThread(metricsValue,"detect job thread")); } catch (Exception e) { log.error("插件 {} 运行异常",pluginName,e); } }
Example 14
Source File: SimpleJob.java From spring-boot-quartz-demo with MIT License | 5 votes |
@Override protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException { JobKey key = jobExecutionContext.getJobDetail().getKey(); System.out.println("Simple Job started with key :" + key.getName() + ", Group :"+key.getGroup() + " , Thread Name :"+Thread.currentThread().getName()); System.out.println("======================================"); System.out.println("Accessing annotation example: "+jobService.getAllJobs()); List<Map<String, Object>> list = jobService.getAllJobs(); System.out.println("Job list :"+list); System.out.println("======================================"); //*********** For retrieving stored key-value pairs ***********/ JobDataMap dataMap = jobExecutionContext.getMergedJobDataMap(); String myValue = dataMap.getString("myKey"); System.out.println("Value:" + myValue); //*********** For retrieving stored object, It will try to deserialize the bytes Object. ***********/ /* SchedulerContext schedulerContext = null; try { schedulerContext = jobExecutionContext.getScheduler().getContext(); } catch (SchedulerException e1) { e1.printStackTrace(); } YourClass yourClassObject = (YourClass) schedulerContext.get("storedObjectKey"); */ while(toStopFlag){ try { System.out.println("Test Job Running... Thread Name :"+Thread.currentThread().getName()); Thread.sleep(2000); } catch (InterruptedException e) { e.printStackTrace(); } } System.out.println("Thread: "+ Thread.currentThread().getName() +" stopped."); }
Example 15
Source File: EmailJob.java From quartz-manager with MIT License | 5 votes |
@SuppressWarnings("unchecked") private void sendEmail(JobDataMap map) { String subject = map.getString("subject"); String messageBody = map.getString("messageBody"); List<String> to = (List<String>) map.get("to"); List<String> cc = (List<String>) map.get("cc"); List<String> bcc = (List<String>) map.get("bcc"); MimeMessage message = mailSender.createMimeMessage(); try { MimeMessageHelper helper = new MimeMessageHelper(message, false); for(String receipient : to) { helper.setFrom("[email protected]", "Julius from Dynamic Quartz"); helper.setTo(receipient); helper.setSubject(subject); helper.setText(messageBody); if(!isEmpty(cc)) helper.setCc(cc.stream().toArray(String[]::new)); if(!isEmpty(bcc)) helper.setBcc(bcc.stream().toArray(String[]::new)); mailSender.send(message); } } catch (MessagingException | UnsupportedEncodingException e) { log.error("An error occurred: {}", e.getLocalizedMessage()); } }
Example 16
Source File: TaskQuartzJobAdapter.java From micro-integrator with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override public void execute(JobExecutionContext ctx) throws JobExecutionException { /* if task execution node is not fully started yet, ignore this trigger */ if (!TasksDSComponent.getTaskService().isServerInit()) { if (log.isDebugEnabled()) { log.debug("Ignoring task triggered before server startup: " + ctx.getJobDetail()); } return; } JobDataMap dataMap = ctx.getJobDetail().getJobDataMap(); String taskClassName = dataMap.getString(TaskConstants.TASK_CLASS_NAME); if (taskClassName == null) { throw new JobExecutionException("The task class is missing in the job data map"); } try { org.wso2.micro.integrator.ntask.core.Task task = (Task) Class.forName(taskClassName).newInstance(); Map<String, String> properties = (Map<String, String>) dataMap.get(TaskConstants.TASK_PROPERTIES); task.setProperties(properties); task.init(); task.execute(); } catch (Throwable e) { String msg = "Error in executing task: " + e.getMessage(); log.error(msg, e); throw new JobExecutionException(msg, e); } }
Example 17
Source File: DetectPluginJob.java From OpenFalcon-SuitAgent with Apache License 2.0 | 5 votes |
@Override public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String pluginName = jobDataMap.getString("pluginName"); try { DetectPlugin detectPlugin = (DetectPlugin) jobDataMap.get("pluginObject"); MetricsCommon metricsValue = new DetectMetricsValue(detectPlugin); //可能会涉及到外网的连接,采用异步方式 ExecuteThreadUtil.execute(new JobThread(metricsValue,"detect job thread")); } catch (Exception e) { log.error("插件 {} 运行异常",pluginName,e); } }
Example 18
Source File: FileScanJob.java From AsuraFramework with Apache License 2.0 | 4 votes |
/** * @see org.quartz.Job#execute(org.quartz.JobExecutionContext) */ public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap mergedJobDataMap = context.getMergedJobDataMap(); SchedulerContext schedCtxt = null; try { schedCtxt = context.getScheduler().getContext(); } catch (SchedulerException e) { throw new JobExecutionException("Error obtaining scheduler context.", e, false); } String fileName = mergedJobDataMap.getString(FILE_NAME); String listenerName = mergedJobDataMap.getString(FILE_SCAN_LISTENER_NAME); if(fileName == null) { throw new JobExecutionException("Required parameter '" + FILE_NAME + "' not found in merged JobDataMap"); } if(listenerName == null) { throw new JobExecutionException("Required parameter '" + FILE_SCAN_LISTENER_NAME + "' not found in merged JobDataMap"); } FileScanListener listener = (FileScanListener)schedCtxt.get(listenerName); if(listener == null) { throw new JobExecutionException("FileScanListener named '" + listenerName + "' not found in SchedulerContext"); } long lastDate = -1; if(mergedJobDataMap.containsKey(LAST_MODIFIED_TIME)) { lastDate = mergedJobDataMap.getLong(LAST_MODIFIED_TIME); } long newDate = getLastModifiedDate(fileName); if(newDate < 0) { log.warn("File '"+fileName+"' does not exist."); return; } if(lastDate > 0 && (newDate != lastDate)) { // notify call back... log.info("File '"+fileName+"' updated, notifying listener."); listener.fileUpdated(fileName); } else if (log.isDebugEnabled()) { log.debug("File '"+fileName+"' unchanged."); } // It is the JobDataMap on the JobDetail which is actually stateful context.getJobDetail().getJobDataMap().put(LAST_MODIFIED_TIME, newDate); }
Example 19
Source File: ClusterStatJob.java From EserKnife with Apache License 2.0 | 4 votes |
/** * 定时收集集群信息 * */ @Override public void action(JobExecutionContext context) { try { JobDataMap dataMap = context.getMergedJobDataMap(); Date date = context.getTrigger().getPreviousFireTime(); String clusterName = dataMap.getString(Constant.CLUSTER_NAME); LOGGER.info("开始统计"+clusterName+"的信息!"); NodesStats nodesStats= new NodesStats.Builder().withJvm().withOs().withIndices() .withHttp().withTransport().withThreadPool().withFs().build(); JestResult result = JestManager.getJestClient(clusterName).execute(nodesStats); JSONObject json = JSONObject.parseObject(result.getJsonString()); if(json == null) { return; } asyncService.submitFuture(new CollectionCommonStatHandler(JobKey.buildFutureKey(clusterName, Constant.INDICES, date), clusterName, json, date)); asyncService.submitFuture(new CollectionIndicesStatHandler(JobKey.buildFutureKey(clusterName, Constant.INDICES, date), clusterName, json, date)); asyncService.submitFuture(new CollectionHttpStatHandler(JobKey.buildFutureKey(clusterName, Constant.HTTP, date), clusterName, json, date)); asyncService.submitFuture(new CollectionJVMStatHandler(JobKey.buildFutureKey(clusterName, Constant.JVM_NAME, date), clusterName, json, date)); asyncService.submitFuture(new CollectionTransportStatHandler(JobKey.buildFutureKey(clusterName, Constant.TRANSPORT, date), clusterName, json, date)); asyncService.submitFuture(new CollectionThreadPoolStatHandler(JobKey.buildFutureKey(clusterName, Constant.THREAD_POOL, date), clusterName, json, date)); asyncService.submitFuture(new CollectionOsStatHandler(JobKey.buildFutureKey(clusterName, Constant.OS, date), clusterName, json, date)); asyncService.submitFuture(new CollectionFsStatHandler(JobKey.buildFutureKey(clusterName, Constant.FS, date), clusterName, json, date)); //报警处理 List<AlarmRule> all = alarmRuleService.getList(); List<AlarmRule> alarmFilterRule = new ArrayList<AlarmRule>(); if(CollectionUtils.isEmpty(all)) { return; }else{ for(AlarmRule alarmRule:all){ if(clusterName.equals(alarmRule.getClusterName()) && alarmRule.getEnable() > 0){ alarmFilterRule.add(alarmRule); } } } asyncService.submitFuture(new ClusterNodeAlarm(JobKey.buildFutureKey(clusterName, Constant.NODE_ALARM, date),clusterName,json,alarmFilterRule)); }catch (Exception e) { LOGGER.error("定时获取集群统计异常", e.getStackTrace()); } }
Example 20
Source File: JmsHelper.java From AsuraFramework with Apache License 2.0 | 3 votes |
public static boolean isDestinationSecure(JobDataMap jobDataMap) { String user = jobDataMap.getString(JmsHelper.JMS_USER); String pw = jobDataMap.getString(JmsHelper.JMS_PASSWORD); return (user != null && pw != null); }