org.apache.commons.collections4.MapUtils Java Examples
The following examples show how to use
org.apache.commons.collections4.MapUtils.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TaskHeartBeatServiceImpl.java From chronus with Apache License 2.0 | 6 votes |
@Override public void removeTaskFromHeartBeatQueue(Integer heartBeatRate, TaskRuntimeEntity taskRuntime, boolean needClearRuntimeInfo) { String heartBeatRateStr = heartBeatRate.toString(); String key = getKey(taskRuntime); TASK_HEARTBEAT_INTERVAL_GROUP_TABLE.remove(heartBeatRateStr, key); // 如果这个间隔不存在需要发送心跳的任务 则清空相关队列 Map<String, TaskRuntimeEntity> rowMap = TASK_HEARTBEAT_INTERVAL_GROUP_TABLE.row(heartBeatRateStr); if (MapUtils.isEmpty(rowMap)) { ScheduledExecutorService heartbeatIntervalScheduledService = HEARTBEAT_INTERVAL_SCHEDULED_MAP.remove(heartBeatRateStr); if (heartbeatIntervalScheduledService != null) { heartbeatIntervalScheduledService.shutdown(); } log.info("移除{}/s周期的心跳数据队列!", heartBeatRateStr); CONCURRENT_HEARTBEAT_QUEUE_MAP.remove(heartBeatRateStr); } if (needClearRuntimeInfo) { taskRuntimeService.delete(taskRuntime); } }
Example #2
Source File: ContainerStorageManager.java From samza with Apache License 2.0 | 6 votes |
/** * For each standby task, we remove its changeLogSSPs from changelogSSP map and add it to the task's taskSideInputSSPs. * The task's sideInputManager will consume and restore these as well. * * @param containerModel the container's model * @param changelogSystemStreams the passed in set of changelogSystemStreams * @return A map of changeLogSSP to storeName across all tasks, assuming no two stores have the same changelogSSP */ private Map<String, SystemStream> getChangelogSystemStreams(ContainerModel containerModel, Map<String, SystemStream> changelogSystemStreams) { if (MapUtils.invertMap(changelogSystemStreams).size() != changelogSystemStreams.size()) { throw new SamzaException("Two stores cannot have the same changelog system-stream"); } Map<SystemStreamPartition, String> changelogSSPToStore = new HashMap<>(); changelogSystemStreams.forEach((storeName, systemStream) -> containerModel.getTasks().forEach((taskName, taskModel) -> { changelogSSPToStore.put(new SystemStreamPartition(systemStream, taskModel.getChangelogPartition()), storeName); }) ); getTasks(containerModel, TaskMode.Standby).forEach((taskName, taskModel) -> { this.taskSideInputStoreSSPs.putIfAbsent(taskName, new HashMap<>()); changelogSystemStreams.forEach((storeName, systemStream) -> { SystemStreamPartition ssp = new SystemStreamPartition(systemStream, taskModel.getChangelogPartition()); changelogSSPToStore.remove(ssp); this.taskSideInputStoreSSPs.get(taskName).put(storeName, Collections.singleton(ssp)); }); }); // changelogSystemStreams correspond only to active tasks (since those of standby-tasks moved to sideInputs above) return MapUtils.invertMap(changelogSSPToStore).entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> x.getValue().getSystemStream())); }
Example #3
Source File: UpstreamJobBuffer.java From DDMQ with Apache License 2.0 | 6 votes |
public synchronized void recoverTimeoutMessage() { if (MapUtils.isEmpty(workingJobs)) { return; } LOGGER.trace("recoverTimeoutMessage,group:{},topic:{},qid:{},workingJobs.size={}", groupId, topic, qid, workingJobs.size()); long curTime = TimeUtils.getCurTime(); Iterator<Map.Entry<Long, UpstreamJob>> itr = workingJobs.entrySet().iterator(); while (itr.hasNext()) { UpstreamJob job = itr.next().getValue(); if (curTime - job.getPullTimestamp() >= upstreamTopic.getTimeout()) { if (job.canDoErrorRetry()) { tryPutInNonEmptyQueue(); job.setState("PullSvr.Timeout#" + job.getErrorRetryCnt()); } else { itr.remove(); dropJob(job); //failure } } else { break; } } }
Example #4
Source File: SysPermissionServiceImpl.java From open-capacity-platform with Apache License 2.0 | 6 votes |
@Override public PageResult<SysPermission> findPermissions(Map<String, Object> params) { //设置分页信息,分别是当前页数和每页显示的总记录数【记住:必须在mapper接口中的方法执行之前设置该分页信息】 if (MapUtils.getInteger(params, "page")!=null && MapUtils.getInteger(params, "limit")!=null) PageHelper.startPage(MapUtils.getInteger(params, "page"),MapUtils.getInteger(params, "limit"),true); List<SysPermission> list = sysPermissionDao.findList(params); PageInfo<SysPermission> pageInfo = new PageInfo(list); return PageResult.<SysPermission>builder().data(pageInfo.getList()).code(0).count(pageInfo.getTotal()).build() ; // int total = sysPermissionDao.count(params); // List<SysPermission> list = Collections.emptyList(); // // if (total > 0) { // PageUtil.pageParamConver(params, false); // list = sysPermissionDao.findList(params); // // } // return PageResult.<SysPermission>builder().data(list).code(0).count((long)total).build() ; }
Example #5
Source File: AnomaliesResource.java From incubator-pinot with Apache License 2.0 | 6 votes |
public static Multimap<String, String> generateFilterSetWithDimensionMap(DimensionMap dimensionMap, Multimap<String, String> filterSet) { Multimap<String, String> newFilterSet = HashMultimap.create(); // Dimension map gives more specified dimension information than filter set (i.e., Dimension Map should be a subset // of filterSet), so it needs to be processed first. if (MapUtils.isNotEmpty(dimensionMap)) { for (Map.Entry<String, String> dimensionMapEntry : dimensionMap.entrySet()) { newFilterSet.put(dimensionMapEntry.getKey(), dimensionMapEntry.getValue()); } } if (filterSet != null && filterSet.size() != 0) { for (String key : filterSet.keySet()) { if (!newFilterSet.containsKey(key)) { newFilterSet.putAll(key, filterSet.get(key)); } } } return newFilterSet; }
Example #6
Source File: BaselineAlgorithm.java From incubator-pinot with Apache License 2.0 | 6 votes |
public BaselineAlgorithm(DataProvider provider, DetectionConfigDTO config, long startTime, long endTime) { super(provider, config, startTime, endTime); Preconditions.checkArgument(config.getProperties().containsKey(PROP_METRIC_URN)); String metricUrn = MapUtils.getString(config.getProperties(), PROP_METRIC_URN); MetricEntity me = MetricEntity.fromURN(metricUrn); this.slice = MetricSlice.from(me.getId(), this.startTime, this.endTime, me.getFilters()); int weeks = MapUtils.getIntValue(config.getProperties(), PROP_WEEKS, PROP_WEEKS_DEFAULT); BaselineAggregateType aggregation = BaselineAggregateType.valueOf(MapUtils.getString(config.getProperties(), PROP_AGGREGATION, PROP_AGGREGATION_DEFAULT)); DateTimeZone timezone = DateTimeZone.forID(MapUtils.getString(this.config.getProperties(), PROP_TIMEZONE, PROP_TIMEZONE_DEFAULT)); this.baseline = BaselineAggregate.fromWeekOverWeek(aggregation, weeks, 1, timezone); this.change = MapUtils.getDoubleValue(config.getProperties(), PROP_CHANGE, PROP_CHANGE_DEFAULT); this.difference = MapUtils.getDoubleValue(config.getProperties(), PROP_DIFFERENCE, PROP_DIFFERENCE_DEFAULT); }
Example #7
Source File: CurrencyConversionService.java From prebid-server-java with Apache License 2.0 | 6 votes |
/** * Finds intermediate conversion rate. * If pairs USD : AUD - 1.2 and EUR : AUD - 1.5 are present, and EUR to USD conversion is needed, will return * (1/1.5) * 1.2 conversion rate. */ private static BigDecimal findIntermediateConversionRate(Map<String, BigDecimal> adServerCurrencyRates, Map<String, BigDecimal> bidCurrencyRates) { BigDecimal conversionRate = null; if (MapUtils.isNotEmpty(adServerCurrencyRates) && MapUtils.isNotEmpty(bidCurrencyRates)) { final List<String> sharedCurrencies = new ArrayList<>(adServerCurrencyRates.keySet()); sharedCurrencies.retainAll(bidCurrencyRates.keySet()); if (!sharedCurrencies.isEmpty()) { // pick any found shared currency final String sharedCurrency = sharedCurrencies.get(0); final BigDecimal adServerCurrencyRateIntermediate = adServerCurrencyRates.get(sharedCurrency); final BigDecimal bidCurrencyRateIntermediate = bidCurrencyRates.get(sharedCurrency); conversionRate = adServerCurrencyRateIntermediate.divide(bidCurrencyRateIntermediate, // chose largest precision among intermediate rates bidCurrencyRateIntermediate.compareTo(adServerCurrencyRateIntermediate) > 0 ? bidCurrencyRateIntermediate.precision() : adServerCurrencyRateIntermediate.precision(), RoundingMode.HALF_EVEN); } } return conversionRate; }
Example #8
Source File: DetectionConfigPropertiesBuilder.java From incubator-pinot with Apache License 2.0 | 6 votes |
Map<String, Object> compositePropertyBuilderHelper(List<Map<String, Object>> nestedPropertiesList, Map<String, Object> compositeAlertConfigMap) { Map<String, Object> properties; String subEntityName = MapUtils.getString(compositeAlertConfigMap, PROP_NAME); // Wrap the entity level grouper, only 1 grouper is supported now List<Map<String, Object>> grouperProps = ConfigUtils.getList(compositeAlertConfigMap.get(PROP_GROUPER)); Map<String, Object> mergerProperties = ConfigUtils.getMap(compositeAlertConfigMap.get(PROP_MERGER)); if (!grouperProps.isEmpty()) { properties = buildWrapperProperties( EntityAnomalyMergeWrapper.class.getName(), Collections.singletonList(buildGroupWrapperProperties(subEntityName, grouperProps.get(0), nestedPropertiesList)), mergerProperties); nestedPropertiesList = Collections.singletonList(properties); } return buildWrapperProperties( ChildKeepingMergeWrapper.class.getName(), nestedPropertiesList, mergerProperties); }
Example #9
Source File: SystemSettingsActor.java From sunbird-lms-service with MIT License | 6 votes |
private void getAllSystemSettings() { ProjectLogger.log("SystemSettingsActor: getAllSystemSettings called", LoggerEnum.DEBUG.name()); Map<String, String> systemSettings = DataCacheHandler.getConfigSettings(); Response response = new Response(); List<SystemSetting> allSystemSettings = null; if (MapUtils.isNotEmpty(systemSettings)) { allSystemSettings = new ArrayList<>(); for (Map.Entry setting : systemSettings.entrySet()) { allSystemSettings.add( new SystemSetting( (String) setting.getKey(), (String) setting.getKey(), (String) setting.getValue())); } } else { allSystemSettings = systemSettingDaoImpl.readAll(); } response.put(JsonKey.RESPONSE, allSystemSettings); sender().tell(response, self()); }
Example #10
Source File: LegacyAnomalyFunctionAlgorithm.java From incubator-pinot with Apache License 2.0 | 6 votes |
/** * Instantiates a new Legacy anomaly function algorithm. * * @param provider the provider * @param config the config * @param startTime the start time * @param endTime the end time * @throws Exception the exception */ public LegacyAnomalyFunctionAlgorithm(DataProvider provider, DetectionConfigDTO config, long startTime, long endTime) throws Exception { super(provider, config, startTime, endTime); // TODO: Round start and end time stamps Preconditions.checkArgument(config.getProperties().containsKey(PROP_ANOMALY_FUNCTION_CLASS)); String anomalyFunctionClassName = MapUtils.getString(config.getProperties(), PROP_ANOMALY_FUNCTION_CLASS); String specs = OBJECT_MAPPER.writeValueAsString(ConfigUtils.getMap(config.getProperties().get(PROP_SPEC))); this.anomalyFunction = (BaseAnomalyFunction) Class.forName(anomalyFunctionClassName).newInstance(); this.anomalyFunction.init(OBJECT_MAPPER.readValue(specs, AnomalyFunctionDTO.class)); this.dataFilter = DataFilterFactory.fromSpec(this.anomalyFunction.getSpec().getDataFilter()); this.failOnError = MapUtils.getBooleanValue(config.getProperties(), PROP_FAIL_ON_ERROR, false); if (config.getProperties().containsKey(PROP_METRIC_URN)) { this.metricEntity = MetricEntity.fromURN(MapUtils.getString(config.getProperties(), PROP_METRIC_URN)); } else { this.metricEntity = makeEntity(this.anomalyFunction.getSpec()); } }
Example #11
Source File: JiraContentFormatter.java From incubator-pinot with Apache License 2.0 | 6 votes |
/** * Apply the parameter map to given jira template, and format it as JiraEntity */ private JiraEntity buildJiraEntity(String jiraTemplate, Map<String, Object> templateValues, Multimap<String, String> dimensionFilters) { String jiraProject = MapUtils.getString(alertClientConfig, PROP_PROJECT, this.jiraAdminConfig.getJiraDefaultProjectKey()); Long jiraIssueTypeId = MapUtils.getLong(alertClientConfig, PROP_ISSUE_TYPE, this.jiraAdminConfig.getJiraIssueTypeId()); JiraEntity jiraEntity = new JiraEntity(jiraProject, jiraIssueTypeId, buildSummary(templateValues, dimensionFilters)); jiraEntity.setAssignee(MapUtils.getString(alertClientConfig, PROP_ASSIGNEE, "")); // Default - Unassigned jiraEntity.setMergeGap(MapUtils.getLong(alertClientConfig, PROP_MERGE_GAP, -1L)); // Default - Always merge jiraEntity.setLabels(buildLabels(dimensionFilters)); jiraEntity.setDescription(buildDescription(jiraTemplate, templateValues)); jiraEntity.setComponents(ConfigUtils.getList(alertClientConfig.get(PROP_COMPONENTS))); jiraEntity.setSnapshot(buildSnapshot()); Map<String, Object> customFieldsMap = ConfigUtils.getMap(alertClientConfig.get(PROP_CUSTOM)); jiraEntity.setCustomFieldsMap(customFieldsMap); return jiraEntity; }
Example #12
Source File: AnomalyTimeBasedSummarizer.java From incubator-pinot with Apache License 2.0 | 6 votes |
/** * Given property keys from anomaly function, comparing if two anomalies have same property on the mergeable keys when doing anomaly detection * If key set is empty, or both properties for the two anomalies are empty or if all of the values on mergeable keys are equal on anomalies return true * Otherwise return false * @param anomaly1 The first anomaly result * @param anomaly2 The second anomaly result * @param mergeableKeys keys that passed by AnomalyMergeConfig, which is defined by Anomaly Detection Function * @return true if two anomalies are equal on mergeable keys, otherwise return false */ private static boolean isEqualOnMergeableKeys(MergedAnomalyResultDTO anomaly1, MergedAnomalyResultDTO anomaly2, List<String> mergeableKeys){ Map<String, String> prop1 = anomaly1.getProperties(); Map<String, String> prop2 = anomaly2.getProperties(); // degenerate case if(mergeableKeys.size() == 0 || (MapUtils.isEmpty(prop1) && MapUtils.isEmpty(prop2))){ return true; } // If both of anomalies have mergeable keys and the contents are equal, they are mergeable; // Otherwise it's indicating the two anomalies are detected by different function configurations, they are not mergeable for (String key : mergeableKeys) { // If both prop1 and prop2 do not contain key, the mergeable keys are not properly defined or the anomalies are not generated by the anomaly function if (!prop1.containsKey(key) && !prop2.containsKey(key)) { LOG.warn("Mergeable key: {} does not exist in properties! The mergeable keys are not properly defined or the anomalies are not generated by the anomaly function", key); } // If prop1 and prop2 have different value on key, return false if (!ObjectUtils.equals(prop1.get(key), prop2.get(key))) { return false; } } return true; }
Example #13
Source File: TimeWindowChart.java From JuniperBot with GNU General Public License v3.0 | 6 votes |
@Override @SuppressWarnings("unchecked") public synchronized void fromMap(Map<String, Object> data) { if (MapUtils.isEmpty(data)) { return; } Object window = data.get("window"); Object measurements = data.get("measurements"); if (window instanceof Number && measurements instanceof Map) { this.window = ((Number) window).longValue(); this.measurements.clear(); ((Map) measurements).forEach((k, v) -> this.measurements.put(Long.parseLong(k.toString()), Long.parseLong(v.toString()))); this.lastTick.set(System.currentTimeMillis()); trim(); } }
Example #14
Source File: KafkaFetcher.java From DDMQ with Apache License 2.0 | 6 votes |
@Override public boolean ack(AckResult result) { if (result.getOffsetsSize() == 0) { return true; } result.getOffsets().forEach((topic, qidMap) -> { if (MapUtils.getObject(config.getTopicMap(), topic) == null) { LOGGER.warn("invalid topic({}) in {},result={}", topic, this, result); return; } Set<String> currentQids = consumer.getCurrentTopicQids(topic); qidMap.forEach((qid, offset) -> { if (!currentQids.contains(qid)) { LOGGER.warn("invalid qid({}) in {},result={}", qid, this, result); return; } MetricUtils.maxOffsetCount(result.getGroupId(), topic, qid, "ack", offset); consumer.setCommitOffset(topic, QidUtils.getKafkaQid(config.getBrokerCluster(), qid), offset); LOGGER.debug("commit offset groupId:{}, topic:{}, qid:{}, offset:{}, consumer:{}", result.getGroupId(), topic, qid, offset, this); }); }); return true; }
Example #15
Source File: XUserServiceBase.java From ranger with Apache License 2.0 | 6 votes |
protected List<VXUser> mapEntityToViewBeans(Map<VXUser, XXUser> vxUserXXUserMap) { List<VXUser> vxUsers = new ArrayList<>(); if (MapUtils.isNotEmpty(vxUserXXUserMap)) { for (Map.Entry<VXUser, XXUser> vxUserXXUserEntry : vxUserXXUserMap.entrySet()) { VXUser vObj = vxUserXXUserEntry.getKey(); XXUser mObj = vxUserXXUserEntry.getValue(); vObj.setName(mObj.getName()); vObj.setIsVisible(mObj.getIsVisible()); vObj.setDescription(mObj.getDescription()); vObj.setCredStoreId(mObj.getCredStoreId()); vObj.setOtherAttributes(mObj.getOtherAttributes()); vxUsers.add(vObj); } } return vxUsers; }
Example #16
Source File: SearchServiceInternalImpl.java From studio with GNU General Public License v3.0 | 6 votes |
/** * Maps the item type for the given source based on the configuration * @param source the source to map * @return the item type */ protected String getItemType(Map<String, Object> source) { if(MapUtils.isNotEmpty(types)) { for (HierarchicalConfiguration<ImmutableNode> typeConfig : types.values()) { String fieldName = typeConfig.getString(CONFIG_KEY_TYPE_FIELD); if(source.containsKey(fieldName)) { String fieldValue = source.get(fieldName).toString(); if (StringUtils.isNotEmpty(fieldValue) && fieldValue.matches(typeConfig.getString(CONFIG_KEY_TYPE_MATCHES))) { return typeConfig.getString(CONFIG_KEY_TYPE_NAME); } } } } return defaultType; }
Example #17
Source File: ServiceGetterTest.java From multiapps-controller with Apache License 2.0 | 6 votes |
@ParameterizedTest @MethodSource public void testGetServiceInstanceEntity(Map<String, Object> serviceInstanceGetterResponse, Map<String, Object> userProvidedInstanceGetterResponse) { prepareServiceGetters(serviceInstanceGetterResponse, userProvidedInstanceGetterResponse); Map<String, Object> serviceInstanceEntity = serviceGetter.getServiceInstanceEntity(client, SERVICE_NAME, SPACE_ID); if (MapUtils.isEmpty(serviceInstanceGetterResponse)) { assertEquals(userProvidedInstanceGetterResponse, serviceInstanceEntity); verify(userProvidedInstanceGetter).getServiceInstanceEntity(client, SERVICE_NAME, SPACE_ID); return; } assertEquals(serviceInstanceGetterResponse, serviceInstanceEntity); verify(serviceInstanceGetter).getServiceInstanceEntity(client, SERVICE_NAME, SPACE_ID); }
Example #18
Source File: RouterProcessor.java From JIMU with Apache License 2.0 | 6 votes |
@Override public synchronized void init(ProcessingEnvironment processingEnv) { super.init(processingEnv); routerNodes = new ArrayList<>(); mFiler = processingEnv.getFiler(); types = processingEnv.getTypeUtils(); elements = processingEnv.getElementUtils(); typeUtils = new TypeUtils(types, elements); type_String = elements.getTypeElement("java.lang.String").asType(); logger = new Logger(processingEnv.getMessager()); Map<String, String> options = processingEnv.getOptions(); if (MapUtils.isNotEmpty(options)) { host = options.get(KEY_HOST_NAME); logger.info(">>> host is " + host + " <<<"); } if (host == null || host.equals("")) { host = "default"; } logger.info(">>> RouteProcessor init. <<<"); }
Example #19
Source File: DetermineServiceCreateUpdateServiceActionsStep.java From multiapps-controller with Apache License 2.0 | 6 votes |
private boolean shouldUpdateCredentials(CloudServiceInstanceExtended service, CloudServiceInstance existingService, CloudControllerClient client) { try { Map<String, Object> serviceParameters = client.getServiceInstanceParameters(existingService.getMetadata() .getGuid()); getStepLogger().debug("Existing service parameters: " + SecureSerialization.toJson(serviceParameters)); return !Objects.equals(service.getCredentials(), serviceParameters); } catch (CloudOperationException e) { if (HttpStatus.NOT_IMPLEMENTED == e.getStatusCode() || HttpStatus.BAD_REQUEST == e.getStatusCode()) { getStepLogger().warnWithoutProgressMessage(Messages.CANNOT_RETRIEVE_SERVICE_INSTANCE_PARAMETERS, service.getName()); // TODO: Optimization (Hack) that should be deprecated at some point. So here is a todo for that. return !MapUtils.isEmpty(service.getCredentials()); } throw e; } }
Example #20
Source File: CommitLagLimiter.java From DDMQ with Apache License 2.0 | 6 votes |
public void acquire(ConsumeOffsetTracker tracker, String topic, ConsumeContext context) throws InterruptedException { long maxCommitLag = MapUtils.getLong(maxCommitLagMap, topic, -1L); if (maxCommitLag < 0) { return; } long lag = tracker.getCommitLag(topic, context); if (lag < maxCommitLag) { return; } commitLagLock.lock(); try { while ((lag = tracker.getCommitLag(topic, context)) >= maxCommitLag) { LOGGER.warn("commit lag is over maxLag, block consuming...group={},topic={},qid={},lag={}", context.getGroupId(), topic, context.getQid(), lag); getCondition(topic, context).await(); } } finally { commitLagLock.unlock(); } }
Example #21
Source File: CarreraConfiguration.java From DDMQ with Apache License 2.0 | 6 votes |
@Override public boolean validate() throws ConfigException { if (CollectionUtils.isEmpty(retryDelays)) { throw new ConfigException("[CarreraConfiguration] retryDelays empty"); } else if (thriftServer == null || !thriftServer.validate()) { throw new ConfigException("[CarreraConfiguration] thriftServer error"); } else if (useKafka && (kafkaProducers <= 0 || MapUtils.isEmpty(kafkaConfigurationMap) || !kafkaConfigurationMap.values().stream().allMatch(KafkaConfiguration::validate))) { throw new ConfigException("[CarreraConfiguration] kafka config error"); } else if (useRocketmq && (rocketmqProducers <= 0 || MapUtils.isEmpty(rocketmqConfigurationMap) || !rocketmqConfigurationMap.values().stream().allMatch(RocketmqConfiguration::validate))) { throw new ConfigException("[CarreraConfiguration] rocketmq config error"); } else if (useAutoBatch && (autoBatch == null || !autoBatch.validate())) { throw new ConfigException("[CarreraConfiguration] autoBatch error"); } else if (maxTps <= 0) { throw new ConfigException("[CarreraConfiguration] maxTps <= 0"); } else if (tpsWarningRatio <= 0) { throw new ConfigException("[CarreraConfiguration] tpsWarningRatio <= 0"); } else if (defaultTopicInfoConf == null) { throw new ConfigException("[CarreraConfiguration] defaultTopicInfoConf is null"); } return true; }
Example #22
Source File: SchemaPropertiesKeysValidator.java From servicecomb-toolkit with Apache License 2.0 | 6 votes |
@Override protected List<OasViolation> validateCurrentSchemaObject(OasValidationContext context, Schema oasObject, OasObjectPropertyLocation location) { Map<String, Schema> properties = oasObject.getProperties(); if (MapUtils.isEmpty(properties)) { return emptyList(); } return OasObjectValidatorUtils.doValidateMapPropertyKeys( location, "properties", properties, keyPredicate, errorFunction ); }
Example #23
Source File: PropertyColumnUtil.java From spring-boot-plus with Apache License 2.0 | 6 votes |
/** * 从本地缓存中获取属性列名map * * @param clazz * @return */ public static Map<String, String> getPropertyColumnMap(Class<?> clazz) { Map<String, String> propertyColumnMap = cacheMap.get(clazz); if (MapUtils.isEmpty(propertyColumnMap)) { // 从TableInfo中获取,并缓存到内存map中 Map<String, String> fieldMap = getTableFieldMap(clazz); if (MapUtils.isEmpty(fieldMap)) { return null; } else { cacheMap.put(clazz, fieldMap); return fieldMap; } } else { return propertyColumnMap; } }
Example #24
Source File: MockThirdEyeDataSource.java From incubator-pinot with Apache License 2.0 | 5 votes |
static MockDataset fromMap(String name, Map<String, Object> map) { return new MockDataset( name, DateTimeZone.forID(MapUtils.getString(map, "timezone", "America/Los_Angeles")), ConfigUtils.<String>getList(map.get("dimensions")), ConfigUtils.<String, Map<String, Object>>getMap(map.get("metrics")), ConfigUtils.parsePeriod(MapUtils.getString(map, "granularity", "1hour"))); }
Example #25
Source File: TestController.java From momo-cloud-permission with Apache License 2.0 | 5 votes |
@RequestMapping("/sendWebsocket") public JSONResult sendWebsocket() { Map<String, Channel> channelHashMap = UserChannelCurrentMap.getAllChannel(); if (MapUtils.isNotEmpty(channelHashMap)) { channelHashMap.forEach((s, channel) -> channel.writeAndFlush(new TextWebSocketFrame(JSONObject.toJSONString(JSONResult.ok(s + "我是服务器端发送消息/n"))))); return JSONResult.ok(channelHashMap); } return JSONResult.errorException("没有通道"); }
Example #26
Source File: AuditAction.java From JuniperBot with GNU General Public License v3.0 | 5 votes |
@Transient @SuppressWarnings("unchecked") public <T> T getAttribute(String key, Class<T> type) { if (MapUtils.isEmpty(attributes)) { return null; } Object value = attributes.get(key); return value != null && type.isAssignableFrom(value.getClass()) ? (T) value : null; }
Example #27
Source File: MockThirdEyeDataSource.java From incubator-pinot with Apache License 2.0 | 5 votes |
/** * Returns a DataFrame populated with mock data for a given config and time range. * * @param config metric generator config * @param start start time * @param end end time * @param interval time granularity * @return DataFrame with mock data */ private static DataFrame makeData(Map<String, Object> config, DateTime start, DateTime end, Period interval) { List<Long> timestamps = new ArrayList<>(); List<Double> values = new ArrayList<>(); double mean = MapUtils.getDoubleValue(config, "mean", 0); double std = MapUtils.getDoubleValue(config, "std", 1); double daily = MapUtils.getDoubleValue(config, "daily", mean); double weekly = MapUtils.getDoubleValue(config, "weekly", daily); NormalDistribution dist = new NormalDistribution(mean, std); DateTime origin = start.withFields(DataFrameUtils.makeOrigin(PeriodType.days())); while (origin.isBefore(end)) { if (origin.isBefore(start)) { origin = origin.plus(interval); continue; } timestamps.add(origin.getMillis()); double compDaily = weekly * (COMPONENT_ALPHA_WEEKLY + Math.sin(origin.getDayOfWeek() / 7.0 * 2 * Math.PI + 1) / 2 * (1 - COMPONENT_ALPHA_WEEKLY)); double compHourly = daily * (COMPONENT_ALPHA_DAILY + Math.sin(origin.getHourOfDay() / 24.0 * 2 * Math.PI + 1) / 2 * (1 - COMPONENT_ALPHA_DAILY)); double compEpsilon = dist.sample(); values.add((double) Math.max(Math.round(compDaily + compHourly + compEpsilon), 0)); origin = origin.plus(interval); } return new DataFrame() .addSeries(COL_TIME, ArrayUtils.toPrimitive(timestamps.toArray(new Long[0]))) .addSeries(COL_VALUE, ArrayUtils.toPrimitive(values.toArray(new Double[0]))) .setIndex(COL_TIME); }
Example #28
Source File: RootCauseMetricResource.java From incubator-pinot with Apache License 2.0 | 5 votes |
/** * Returns a map of maps (keyed by dimension name, keyed by dimension value) derived from the * breakdown results dataframe. * * @param dataBreakdown (transformed) breakdown query results * @param dataAggregate (transformed) aggregate query results * @return map of maps of value (keyed by dimension name, keyed by dimension value) */ private static Map<String, Map<String, Double>> makeBreakdownMap(DataFrame dataBreakdown, DataFrame dataAggregate) { Map<String, Map<String, Double>> output = new TreeMap<>(); dataBreakdown = dataBreakdown.dropNull(); dataAggregate = dataAggregate.dropNull(); Map<String, Double> dimensionTotals = new HashMap<>(); for (int i = 0; i < dataBreakdown.size(); i++) { final String dimName = dataBreakdown.getString(COL_DIMENSION_NAME, i); final String dimValue = dataBreakdown.getString(COL_DIMENSION_VALUE, i); final double value = dataBreakdown.getDouble(COL_VALUE, i); // cell if (!output.containsKey(dimName)) { output.put(dimName, new HashMap<String, Double>()); } output.get(dimName).put(dimValue, value); // total dimensionTotals.put(dimName, MapUtils.getDoubleValue(dimensionTotals, dimName, 0) + value); } // add rollup column if (!dataAggregate.isEmpty()) { double total = dataAggregate.getDouble(COL_VALUE, 0); for (Map.Entry<String, Double> entry : dimensionTotals.entrySet()) { if (entry.getValue() < total) { output.get(entry.getKey()).put(ROLLUP_NAME, total - entry.getValue()); } } } return output; }
Example #29
Source File: DetectionMetricAttributeHolder.java From incubator-pinot with Apache License 2.0 | 5 votes |
private String loadMetricCache(Map<String, Object> metricAlertConfigMap) { String metricName = MapUtils.getString(metricAlertConfigMap, PROP_METRIC); String datasetName = MapUtils.getString(metricAlertConfigMap, PROP_DATASET); String cron = MapUtils.getString(metricAlertConfigMap, PROP_CRON); String metricAliasKey = ThirdEyeUtils.constructMetricAlias(datasetName, metricName); if (metricAttributesMap.containsKey(metricAliasKey)) { return metricAliasKey; } DatasetConfigDTO datasetConfig = fetchDatasetConfigDTO(this.dataProvider, datasetName); datasetConfigs.add(datasetConfig); MetricConfigDTO metricConfig = this.dataProvider.fetchMetric(metricName, datasetConfig.getDataset()); cron = cron == null ? buildCron(datasetConfig.bucketTimeGranularity()) : cron; metricAttributesMap.put(metricAliasKey, new DetectionMetricProperties(cron, metricConfig, datasetConfig)); return metricAliasKey; }
Example #30
Source File: JdbcResponseExtractor.java From elasticsearch-sql with MIT License | 5 votes |
private void parseTermsAggregation(Aggregation aggregation, Map<String, Object> aggMap, String parent) { Terms buckets = (Terms) aggregation; Map<String, Object> tmpSubAgg = new LinkedHashMap<>(0); Map<String, Object> subAggMap = new LinkedHashMap<>(0); int total = 0; for (Terms.Bucket bucket : buckets.getBuckets()) { total += bucket.getDocCount(); if (StringUtils.isNotBlank(bucket.getKeyAsString())) { if (bucket.getAggregations() != null && bucket.getAggregations().asList().size() > 0) { parseAggregations(bucket.getAggregations(), subAggMap, bucket.getKeyAsString()); if (parent != null) { aggMap.put(parent, subAggMap); } else { aggMap.put(buckets.getName(), subAggMap); } } else { tmpSubAgg.put(bucket.getKeyAsString(), bucket.getDocCount()); } } } if (MapUtils.isNotEmpty(tmpSubAgg)) { tmpSubAgg.put("total", total); if (parent != null) { aggMap.put(parent, tmpSubAgg); } else { aggMap.put(buckets.getName(), tmpSubAgg); } } else { if (!subAggMap.containsKey("total")) { subAggMap.put("total", total); } } }