Java Code Examples for java.util.LinkedHashMap#forEach()
The following examples show how to use
java.util.LinkedHashMap#forEach() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ClusteringPlugin.java From elasticsearch-carrot2 with Apache License 2.0 | 6 votes |
/** * This places Lingo3G in front of the algorithm list if it is available. */ private LinkedHashMap<String, ClusteringAlgorithmProvider> reorderAlgorithms( LinkedHashMap<String, ClusteringAlgorithmProvider> providers) { String[] desiredOrder = { "Lingo3G", "Lingo", "STC", "Bisecting K-Means" }; LinkedHashMap<String, ClusteringAlgorithmProvider> copy = new LinkedHashMap<>(); for (String name : desiredOrder) { if (providers.containsKey(name)) { copy.put(name, providers.get(name)); } } providers.forEach((name, provider) -> { if (!copy.containsKey(name)) { copy.put(name, provider); } }); return copy; }
Example 2
Source File: YamlProvider.java From EconomyAPI with GNU General Public License v3.0 | 6 votes |
@SuppressWarnings({ "unchecked", "serial" }) public void init(File path){ file = new Config(new File(path, "Money.yml"), Config.YAML, new LinkedHashMap<String, Object>(){ { put("version" , 2); put("money", new LinkedHashMap<String, Double>()); } }); LinkedHashMap<Object, Object> temp = (LinkedHashMap<Object, Object>) file.get("money"); data = new LinkedHashMap<>(); temp.forEach((key, money) -> { String username = key.toString(); if(money instanceof Integer){ data.put(username, ((Integer) money).doubleValue()); }else if(money instanceof Double){ data.put(username, (Double) money); }else if(money instanceof String){ data.put(username, Double.parseDouble(money.toString())); } }); }
Example 3
Source File: PojoSerializer.java From flink with Apache License 2.0 | 6 votes |
/** * This legacy snapshot delegates compatibility checks to the {@link PojoSerializerSnapshot}. */ @Override public TypeSerializerSchemaCompatibility<T> resolveSchemaCompatibility(TypeSerializer<T> newSerializer) { LinkedHashMap<String, TypeSerializerSnapshot<?>> legacyFieldSerializerSnapshots = preprocessLegacySerializerSnapshotTuples(fieldToSerializerConfigSnapshot); int numFields = legacyFieldSerializerSnapshots.size(); ArrayList<Field> fields = new ArrayList<>(numFields); ArrayList<TypeSerializerSnapshot<?>> fieldSerializerSnapshots = new ArrayList<>(numFields); legacyFieldSerializerSnapshots.forEach((fieldName, fieldSerializerSnapshot) -> { fields.add(PojoFieldUtils.getField(fieldName, getTypeClass())); fieldSerializerSnapshots.add(fieldSerializerSnapshot); }); PojoSerializerSnapshot<T> newSnapshot = new PojoSerializerSnapshot<>( getTypeClass(), fields.toArray(new Field[numFields]), fieldSerializerSnapshots.toArray(new TypeSerializerSnapshot[numFields]), preprocessLegacySerializerSnapshotTuples(registeredSubclassesToSerializerConfigSnapshots), preprocessLegacySerializerSnapshotTuples(nonRegisteredSubclassesToSerializerConfigSnapshots)); return newSnapshot.resolveSchemaCompatibility(newSerializer); }
Example 4
Source File: ConfigBuilder.java From dew with Apache License 2.0 | 6 votes |
private static LinkedHashMap mergeItems(LinkedHashMap source, LinkedHashMap target) { target.forEach((k, v) -> { if (source.containsKey(k) && v instanceof LinkedHashMap) { // 如果源map和目标map都存在,并且存在子项目,递归合并 // 并且存在子项目,递归合并 target.put(k, mergeItems((LinkedHashMap) source.get(k), (LinkedHashMap) v)); } // 否则不合并,即使用target的原始值 }); source.forEach((k, v) -> { if (!target.containsKey(k)) { // 添加 源map存在,目标map不存在的项目 target.put(k, v); } }); return target; }
Example 5
Source File: ConfigCenterService.java From DBus with Apache License 2.0 | 6 votes |
public ResultEntity updateGlobalConf(LinkedHashMap<String, String> map) { ResultEntity resultEntity = new ResultEntity(); try { resultEntity = initService.checkParams(resultEntity, map); if (resultEntity.getStatus() != 0) { return resultEntity; } //以下处理保留额外添加的特殊配置 Properties properties = zkService.getProperties(Constants.GLOBAL_PROPERTIES_ROOT); properties.putAll(map); StringBuilder sb = new StringBuilder(); map.forEach((k, v) -> sb.append(k).append("=").append(v).append("\n")); zkService.setData(Constants.GLOBAL_PROPERTIES_ROOT, sb.toString().getBytes("utf-8")); } catch (Exception e) { logger.error(e.getMessage(), e); resultEntity.setStatus(MessageCode.EXCEPTION); resultEntity.setMessage(e.getMessage()); } return resultEntity; }
Example 6
Source File: PojoSerializerSnapshot.java From flink with Apache License 2.0 | 6 votes |
/** * Transforms the subclass serializer registry structure, {@code LinkedHashMap<Class<?>, TypeSerializer<?>>} * to 2 separate structures: a map containing with registered classes as key and their corresponding ids (order * in the original map) as value, as well as a separate array of the corresponding subclass serializers. */ @SuppressWarnings("unchecked") private static Tuple2<LinkedHashMap<Class<?>, Integer>, TypeSerializer<Object>[]> decomposeSubclassSerializerRegistry( LinkedHashMap<Class<?>, TypeSerializer<?>> subclassSerializerRegistry) { final LinkedHashMap<Class<?>, Integer> subclassIds = new LinkedHashMap<>(subclassSerializerRegistry.size()); final TypeSerializer[] subclassSerializers = new TypeSerializer[subclassSerializerRegistry.size()]; subclassSerializerRegistry.forEach((registeredSubclassClass, serializer) -> { int id = subclassIds.size(); subclassIds.put(registeredSubclassClass, id); subclassSerializers[id] = serializer; }); return Tuple2.of(subclassIds, subclassSerializers); }
Example 7
Source File: RemoteCommand.java From terracotta-platform with Apache License 2.0 | 6 votes |
protected final Map<InetSocketAddress, LogicalServerState> findRuntimePeersStatus(InetSocketAddress expectedOnlineNode) { logger.trace("findRuntimePeersStatus({})", expectedOnlineNode); Cluster cluster = getRuntimeCluster(expectedOnlineNode); logger.info("Connecting to: {} (this can take time if some nodes are not reachable)", toString(cluster.getNodeAddresses())); Collection<InetSocketAddress> addresses = cluster.getNodeAddresses(); try (DiagnosticServices diagnosticServices = multiDiagnosticServiceProvider.fetchDiagnosticServices(addresses)) { LinkedHashMap<InetSocketAddress, LogicalServerState> status = addresses.stream() .collect(toMap( identity(), addr -> diagnosticServices.getDiagnosticService(addr).map(DiagnosticService::getLogicalServerState).orElse(UNREACHABLE), (o1, o2) -> { throw new UnsupportedOperationException(); }, LinkedHashMap::new)); status.forEach((address, state) -> { if (state.isUnreacheable()) { logger.info(" - {} is not reachable", address); } }); return status; } }
Example 8
Source File: PojoSerializer.java From flink with Apache License 2.0 | 6 votes |
/** * This legacy snapshot delegates compatibility checks to the {@link PojoSerializerSnapshot}. */ @Override public TypeSerializerSchemaCompatibility<T> resolveSchemaCompatibility(TypeSerializer<T> newSerializer) { LinkedHashMap<String, TypeSerializerSnapshot<?>> legacyFieldSerializerSnapshots = preprocessLegacySerializerSnapshotTuples(fieldToSerializerConfigSnapshot); int numFields = legacyFieldSerializerSnapshots.size(); ArrayList<Field> fields = new ArrayList<>(numFields); ArrayList<TypeSerializerSnapshot<?>> fieldSerializerSnapshots = new ArrayList<>(numFields); legacyFieldSerializerSnapshots.forEach((fieldName, fieldSerializerSnapshot) -> { fields.add(PojoFieldUtils.getField(fieldName, getTypeClass())); fieldSerializerSnapshots.add(fieldSerializerSnapshot); }); PojoSerializerSnapshot<T> newSnapshot = new PojoSerializerSnapshot<>( getTypeClass(), fields.toArray(new Field[numFields]), fieldSerializerSnapshots.toArray(new TypeSerializerSnapshot[numFields]), preprocessLegacySerializerSnapshotTuples(registeredSubclassesToSerializerConfigSnapshots), preprocessLegacySerializerSnapshotTuples(nonRegisteredSubclassesToSerializerConfigSnapshots)); return newSnapshot.resolveSchemaCompatibility(newSerializer); }
Example 9
Source File: PojoSerializerSnapshot.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Transforms the subclass serializer registry structure, {@code LinkedHashMap<Class<?>, TypeSerializer<?>>} * to 2 separate structures: a map containing with registered classes as key and their corresponding ids (order * in the original map) as value, as well as a separate array of the corresponding subclass serializers. */ @SuppressWarnings("unchecked") private static Tuple2<LinkedHashMap<Class<?>, Integer>, TypeSerializer<Object>[]> decomposeSubclassSerializerRegistry( LinkedHashMap<Class<?>, TypeSerializer<?>> subclassSerializerRegistry) { final LinkedHashMap<Class<?>, Integer> subclassIds = new LinkedHashMap<>(subclassSerializerRegistry.size()); final TypeSerializer[] subclassSerializers = new TypeSerializer[subclassSerializerRegistry.size()]; subclassSerializerRegistry.forEach((registeredSubclassClass, serializer) -> { int id = subclassIds.size(); subclassIds.put(registeredSubclassClass, id); subclassSerializers[id] = serializer; }); return Tuple2.of(subclassIds, subclassSerializers); }
Example 10
Source File: ComponentEventBus.java From flow with Apache License 2.0 | 6 votes |
/** * Creates a list of data objects which can be passed to the constructor * returned by {@link #getEventConstructor(Class)} as parameters 3+. * * @param domEvent * the DOM event containing the data * @param eventType * the component event type * @return a list of event data objects in the same order as defined in the * component event constructor */ private List<Object> createEventDataObjects(DomEvent domEvent, Class<? extends ComponentEvent<?>> eventType) { List<Object> eventDataObjects = new ArrayList<>(); LinkedHashMap<String, Class<?>> expressions = ComponentEventBusUtil .getEventDataExpressions(eventType); expressions.forEach((expression, type) -> { JsonValue jsonValue = domEvent.getEventData().get(expression); if (jsonValue == null) { jsonValue = Json.createNull(); } Object value = JsonCodec.decodeAs(jsonValue, type); eventDataObjects.add(value); }); return eventDataObjects; }
Example 11
Source File: RandomDistributionTests.java From super-cloudops with Apache License 2.0 | 5 votes |
public static void gaussianRandomStreamTest1(String[] args) throws Exception { System.out.println("=========gaussianRandomStreamTest1==========="); DoubleStream gaussianStream = Stream.generate(current()::nextGaussian).mapToDouble(e -> e); LinkedHashMap<Range, Integer> gaussianRangeCountMap = gaussianStream.filter(e -> (e >= -1.0 && e < 1.0)).limit(1000000) .boxed().map(Ranges::of) .collect(Ranges::emptyRangeCountMap, (m, e) -> m.put(e, m.get(e) + 1), Ranges::mergeRangeCountMaps); gaussianRangeCountMap.forEach((k, v) -> System.out.println(k.from() + "\t" + v)); }
Example 12
Source File: PojoSerializerSnapshotData.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Creates a {@link PojoSerializerSnapshotData} from configuration of a {@link PojoSerializer}. * * <p>This factory method is meant to be used in regular write paths, i.e. when taking a snapshot * of the {@link PojoSerializer}. All registered subclass classes, and non-registered * subclass classes are all present. Some POJO fields may be absent, if the originating * {@link PojoSerializer} was a restored one with already missing fields, and was never replaced * by a new {@link PojoSerializer} (i.e. because the serialized old data was never accessed). */ static <T> PojoSerializerSnapshotData<T> createFrom( Class<T> pojoClass, Field[] fields, TypeSerializer<?>[] fieldSerializers, LinkedHashMap<Class<?>, TypeSerializer<?>> registeredSubclassSerializers, Map<Class<?>, TypeSerializer<?>> nonRegisteredSubclassSerializers) { final LinkedOptionalMap<Field, TypeSerializerSnapshot<?>> fieldSerializerSnapshots = new LinkedOptionalMap<>(fields.length); for (int i = 0; i < fields.length; i++) { Field field = fields[i]; String fieldName = (field == null) ? getDummyNameForMissingField(i) : field.getName(); fieldSerializerSnapshots.put(fieldName, field, TypeSerializerUtils.snapshotBackwardsCompatible(fieldSerializers[i])); } LinkedHashMap<Class<?>, TypeSerializerSnapshot<?>> registeredSubclassSerializerSnapshots = new LinkedHashMap<>(registeredSubclassSerializers.size()); registeredSubclassSerializers.forEach((k, v) -> registeredSubclassSerializerSnapshots.put(k, TypeSerializerUtils.snapshotBackwardsCompatible(v))); Map<Class<?>, TypeSerializerSnapshot<?>> nonRegisteredSubclassSerializerSnapshots = new HashMap<>(nonRegisteredSubclassSerializers.size()); nonRegisteredSubclassSerializers.forEach((k, v) -> nonRegisteredSubclassSerializerSnapshots.put(k, TypeSerializerUtils.snapshotBackwardsCompatible(v))); return new PojoSerializerSnapshotData<>( pojoClass, fieldSerializerSnapshots, optionalMapOf(registeredSubclassSerializerSnapshots, Class::getName), optionalMapOf(nonRegisteredSubclassSerializerSnapshots, Class::getName)); }
Example 13
Source File: Main.java From quantumdb with Apache License 2.0 | 5 votes |
private static void printHelp(CliWriter writer, LinkedHashMap<String, Command> commands) { writer.write("Available commands:") .indent(1); commands.forEach((command, delegate) -> { Identifier identifier = delegate.getIdentifier(); writer.write(identifier.getCommand() + ": " + identifier.getDescription()); }); writer.indent(-1); }
Example 14
Source File: TraceabilityDashboardServiceImpl.java From Insights with Apache License 2.0 | 5 votes |
private JsonObject getPipeLineResponse(LinkedHashMap<String, List<JsonObject>> map, JsonObject dataModel) throws InsightsCustomException { JsonArray pipeLineArray = new JsonArray(); JsonObject pipeLineObject = new JsonObject(); LinkedHashMap<String, String> sortedHandoverTimeMap = new LinkedHashMap<>(); Set<Entry<String, List<JsonObject>>> keyset = map.entrySet(); for (Map.Entry<String, List<JsonObject>> keyvaluePair : keyset) { List<JsonObject> limitedList = keyvaluePair.getValue().stream().limit(4).collect(Collectors.toList()); limitedList.forEach(obj -> pipeLineArray.add(obj)); // Handover time object extraction and sorting try { List<String> childNodes = getDownTool(keyvaluePair.getKey(), dataModel); for (String eachNode : childNodes) { String construct = keyvaluePair.getKey() + " To " + eachNode; sortedHandoverTimeMap.put(construct, handOverTimeMap.get(construct)); } } catch (InsightsCustomException e) { LOG.debug(e.getMessage()); } } /* Prepare Summary */ JsonObject summaryObj = prepareSummary(map, dataModel); JsonArray summaryArray = new JsonArray(); summaryArray.add(summaryObj); /* Timelag Response */ JsonObject handOverTime = new JsonObject(); sortedHandoverTimeMap.forEach((k, v) -> handOverTime.addProperty(k, v)); JsonArray handOverArray = new JsonArray(); handOverArray.add(handOverTime); /* Pipeline Response */ pipeLineObject.add("pipeline", pipeLineArray); pipeLineObject.add("summary", summaryArray); pipeLineObject.add("timelag", handOverArray); return pipeLineObject; }
Example 15
Source File: Serializer.java From vavr-jackson with Apache License 2.0 | 5 votes |
private static String expectedMultimapJson(Multimap<?, ?> multimap, int opts) { final LinkedHashMap<Object, List<Object>> map = new LinkedHashMap<>(); multimap.forEach(e -> { List<Object> list = map.computeIfAbsent(e._1, k -> new ArrayList<>()); list.add(e._2); }); StringBuilder sb = new StringBuilder("{"); map.forEach((k, l) -> sb.append(expectedJson(k.toString(), opts)).append(":").append(expectedJson(io.vavr.collection.Stream.ofAll(l)))); sb.append("}"); return sb.toString(); }
Example 16
Source File: ProtectedTermFilterFactory.java From lucene-solr with Apache License 2.0 | 5 votes |
private void populateInnerFilters(LinkedHashMap<String, Map<String, String>> wrappedFilterArgs) { List<TokenFilterFactory> innerFilters = new ArrayList<>(); wrappedFilterArgs.forEach((filterName, filterArgs) -> { int idSuffixPos = filterName.indexOf(FILTER_NAME_ID_SEPARATOR); // Format: SPIname[-id] if (idSuffixPos != -1) { // Strip '-id' suffix, if any, prior to SPI lookup filterName = filterName.substring(0, idSuffixPos); } innerFilters.add(TokenFilterFactory.forName(filterName, filterArgs)); }); setInnerFilters(innerFilters); }
Example 17
Source File: PojoSerializerSnapshot.java From flink with Apache License 2.0 | 4 votes |
/** * Transforms a {@link LinkedHashMap} with {@link TypeSerializerSnapshot}s as * the value to {@link TypeSerializer} as the value by restoring the snapshot. */ private static <K> LinkedHashMap<K, TypeSerializer<?>> restoreSerializers(LinkedHashMap<K, TypeSerializerSnapshot<?>> snapshotsMap) { final LinkedHashMap<K, TypeSerializer<?>> restoredSerializersMap = new LinkedHashMap<>(snapshotsMap.size()); snapshotsMap.forEach((key, snapshot) -> restoredSerializersMap.put(key, snapshot.restoreSerializer())); return restoredSerializersMap; }
Example 18
Source File: TableContextUtil.java From datacollector with Apache License 2.0 | 4 votes |
/** * Determines if there are invalid values specified in the initial offset value * for columns. */ //@VisibleForTesting void checkForInvalidInitialOffsetValues( PushSource.Context context, List<Stage.ConfigIssue> issues, String qualifiedTableName, LinkedHashMap<String, Integer> offsetColumnToType, Map<String, String> offsetColumnToStartOffset ) throws StageException { List<String> invalidInitialOffsetFieldAndValue = new ArrayList<>(); offsetColumnToType.forEach((offsetColumn, offsetSqlType) -> { String initialOffsetValue = offsetColumnToStartOffset.get(offsetColumn); try { if (jdbcUtil.isSqlTypeOneOf(offsetSqlType, Types.DATE, Types.TIME, Types.TIMESTAMP)) { if (jdbcUtil.isSqlTypeOneOf(offsetSqlType, Types.TIMESTAMP)) { if (!isTimestampWithNanosFormat(initialOffsetValue)) { Long.valueOf(initialOffsetValue); } } else { Long.valueOf(initialOffsetValue); } } else { //Use native field conversion strategy to conver string to specify type and get value Field.create(OffsetQueryUtil.SQL_TYPE_TO_FIELD_TYPE.get(offsetSqlType), initialOffsetValue).getValue(); } } catch (IllegalArgumentException e) { LOG.error( Utils.format( "Invalid Initial Offset Value {} for column {} in table {}", initialOffsetValue, offsetColumn, qualifiedTableName ), e ); invalidInitialOffsetFieldAndValue.add(offsetColumn + " - " + initialOffsetValue); } }); if (!invalidInitialOffsetFieldAndValue.isEmpty()) { throw new StageException( JdbcErrors.JDBC_72, qualifiedTableName, COMMA_JOINER.join(invalidInitialOffsetFieldAndValue) ); } }
Example 19
Source File: PojoSerializerSnapshot.java From flink with Apache License 2.0 | 4 votes |
/** * Transforms a {@link LinkedHashMap} with {@link TypeSerializerSnapshot}s as * the value to {@link TypeSerializer} as the value by restoring the snapshot. */ private static <K> LinkedHashMap<K, TypeSerializer<?>> restoreSerializers(LinkedHashMap<K, TypeSerializerSnapshot<?>> snapshotsMap) { final LinkedHashMap<K, TypeSerializer<?>> restoredSerializersMap = new LinkedHashMap<>(snapshotsMap.size()); snapshotsMap.forEach((key, snapshot) -> restoredSerializersMap.put(key, snapshot.restoreSerializer())); return restoredSerializersMap; }
Example 20
Source File: MainPanel.java From java-swing-tips with MIT License | 4 votes |
private MainPanel() { super(new GridLayout(1, 3)); DefaultListModel<String> model = new DefaultListModel<>(); model.addElement("ABCDEFGHIJKLMNOPQRSTUVWXYZ"); model.addElement("aaaa"); model.addElement("aaaabbb"); model.addElement("aaaabbbcc"); model.addElement("1234567890abcdefghijklmnopqrstuvwxyz"); model.addElement("bbb1"); model.addElement("bbb12"); model.addElement("1234567890-+*/=ABCDEFGHIJKLMNOPQRSTUVWXYZ"); model.addElement("bbb123"); JList<String> list1 = new TooltipList<String>(model) { @Override public void updateUI() { super.updateUI(); setCellRenderer(new TooltipListCellRenderer<>()); } }; JList<String> list2 = new CellRendererTooltipList<String>(model) { @Override public void updateUI() { super.updateUI(); setCellRenderer(new TooltipListCellRenderer<>()); } }; JList<String> list3 = new JList<String>(model) { @Override public void updateUI() { super.updateUI(); setCellRenderer(new TooltipListCellRenderer<>()); } }; LinkedHashMap<String, Component> map = new LinkedHashMap<>(); map.put("CellBounds", list1); map.put("ListCellRenderer", list2); map.put("Default location", list3); map.forEach((title, c) -> add(makeTitledPanel(title, c))); setPreferredSize(new Dimension(320, 240)); }