Java Code Examples for com.google.common.base.Joiner#on()
The following examples show how to use
com.google.common.base.Joiner#on() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CsvUtil.java From jpmml-evaluator with GNU Affero General Public License v3.0 | 6 votes |
static public void writeTable(Table table, OutputStream os) throws IOException { try(BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(os, "UTF-8"))){ Joiner joiner = Joiner.on(table.getSeparator()); for(int i = 0; i < table.size(); i++){ List<String> row = table.get(i); if(i > 0){ writer.write('\n'); } writer.write(joiner.join(row)); } } }
Example 2
Source File: StringEL.java From datacollector with Apache License 2.0 | 6 votes |
@ElFunction( prefix = "map", name = "join", description = "Returns each element of a LIST field joined on the specified character sequence." ) public static String joinMap( @ElParam("map") Map<String, Field> map, @ElParam("separator") String separator, @ElParam("keyValueSeparator") String kvSeparator ) { if(map == null) { return ""; } Map<String, String> mapOfStrings = map.entrySet().stream() .collect(Collectors.toMap( Map.Entry::getKey, e -> e.getValue().getValue() == null ? "null" : e.getValue().getValueAsString() )); Joiner joiner = Joiner.on(separator); return joiner.withKeyValueSeparator(kvSeparator).join(mapOfStrings); }
Example 3
Source File: ReplaceManifestPlaceholdersStep.java From buck with Apache License 2.0 | 6 votes |
@VisibleForTesting static String replacePlaceholders(String content, ImmutableMap<String, String> placeholders) { Iterable<String> escaped = Iterables.transform(placeholders.keySet(), Pattern::quote); Joiner joiner = Joiner.on("|"); String patternString = Pattern.quote("${") + "(" + joiner.join(escaped) + ")" + Pattern.quote("}"); Pattern pattern = Pattern.compile(patternString); Matcher matcher = pattern.matcher(content); StringBuffer sb = new StringBuffer(); while (matcher.find()) { matcher.appendReplacement(sb, placeholders.get(matcher.group(1))); } matcher.appendTail(sb); return sb.toString(); }
Example 4
Source File: ObjectProperties.java From qpid-broker-j with Apache License 2.0 | 6 votes |
@Override public String toString() { StringBuilder sb = new StringBuilder(); Joiner joiner = Joiner.on(","); joiner.withKeyValueSeparator("=").appendTo(sb, _properties); if (_attributeNames != null && !_attributeNames.isEmpty()) { if (!_properties.isEmpty()) { sb.append(","); } sb.append("ATTRIBUTES=["); joiner.appendTo(sb, _attributeNames); sb.append("]"); } return sb.toString(); }
Example 5
Source File: ManifestProcessorTask.java From javaide with GNU General Public License v3.0 | 6 votes |
/** * Serialize a map key+value pairs into a comma separated list. Map elements are sorted to * ensure stability between instances. * * @param mapToSerialize the map to serialize. */ protected String serializeMap(Map<String, String> mapToSerialize) { final Joiner keyValueJoiner = Joiner.on(":"); // transform the map on a list of key:value items, sort it and concatenate it. return Joiner.on(",") .join( Lists.newArrayList( Iterables.transform(mapToSerialize.entrySet(), new Function<Map.Entry<String, String>, String>() { @Override public String apply(final Map.Entry<String, String> input) { return keyValueJoiner.join(input.getKey(), input.getValue()); } })) .iterator()); }
Example 6
Source File: InsertFromSubQueryAnalyzer.java From Elasticsearch with Apache License 2.0 | 6 votes |
/** * validate that result columns from subquery match explicit insert columns * or complete table schema */ private static void validateColumnsAndAddCastsIfNecessary(List<Reference> targetColumns, QuerySpec querySpec) { if (targetColumns.size() != querySpec.outputs().size()) { Joiner commaJoiner = Joiner.on(", "); throw new IllegalArgumentException(String.format("Number of target columns (%s) of insert statement doesn't match number of source columns (%s)", commaJoiner.join(Iterables.transform(targetColumns, Reference.TO_COLUMN_NAME)), commaJoiner.join(Iterables.transform(querySpec.outputs(), SymbolPrinter.FUNCTION)))); } int failedCastPosition = querySpec.castOutputs(Iterators.transform(targetColumns.iterator(), Symbols.TYPES_FUNCTION)); if (failedCastPosition >= 0) { Symbol failedSource = querySpec.outputs().get(failedCastPosition); Reference failedTarget = targetColumns.get(failedCastPosition); throw new IllegalArgumentException(String.format(Locale.ENGLISH, "Type of subquery column %s (%s) does not match is not convertable to the type of table column %s (%s)", failedSource, failedSource.valueType(), failedTarget.info().ident().columnIdent().fqn(), failedTarget.valueType() )); } }
Example 7
Source File: AbstractSolrMorphlineTest.java From kite with Apache License 2.0 | 5 votes |
protected static void myInitCore(String baseDirName) throws Exception { Joiner joiner = Joiner.on(File.separator); initCore( "solrconfig.xml", "schema.xml", joiner.join(RESOURCES_DIR, baseDirName) ); }
Example 8
Source File: ResolvedTypes.java From xtext-extras with Eclipse Public License 2.0 | 5 votes |
protected void appendListMapContent(/* @Nullable */ Map<?, ? extends Collection<?>> map, String prefix, StringBuilder result, String indentation) { if (map != null) { MultimapJoiner joiner = new MultimapJoiner( Joiner.on("\n " + indentation), "\n " + indentation, " ->\n" + indentation + " "); result.append("\n").append(indentation).append(prefix).append(":\n").append(indentation).append(" "); joiner.appendTo(result, map); } }
Example 9
Source File: PathType.java From jimfs with Apache License 2.0 | 5 votes |
protected PathType(boolean allowsMultipleRoots, char separator, char... otherSeparators) { this.separator = String.valueOf(separator); this.allowsMultipleRoots = allowsMultipleRoots; this.otherSeparators = String.valueOf(otherSeparators); this.joiner = Joiner.on(separator); this.splitter = createSplitter(separator, otherSeparators); }
Example 10
Source File: OrcFlowFileWriter.java From nifi with Apache License 2.0 | 5 votes |
private String getColumnNamesFromInspector(ObjectInspector inspector) { List<String> fieldNames = Lists.newArrayList(); Joiner joiner = Joiner.on(","); if (inspector instanceof StructObjectInspector) { StructObjectInspector soi = (StructObjectInspector) inspector; List<? extends StructField> fields = soi.getAllStructFieldRefs(); fieldNames.addAll(fields.stream().map((Function<StructField, String>) StructField::getFieldName).collect(Collectors.toList())); } return joiner.join(fieldNames); }
Example 11
Source File: GraphTest.java From c5-replicator with Apache License 2.0 | 5 votes |
@Test public void testDoTarjan() throws Exception { Map<NodeType, Node<NodeType>> nodes = new HashMap<>(); // create a node for each NodeType: for (NodeType t : NodeType.values()) { nodes.put(t, new Node<>(t)); } // attach nodes like so: // A -> B // B -> C // C -> D // C -> E // E -> F // D -> F connectFromTo(nodes, NodeType.A, NodeType.B); connectFromTo(nodes, NodeType.B, NodeType.C); connectFromTo(nodes, NodeType.C, NodeType.D); connectFromTo(nodes, NodeType.C, NodeType.E); connectFromTo(nodes, NodeType.E, NodeType.F); connectFromTo(nodes, NodeType.D, NodeType.F); List<ImmutableList<Node<NodeType>>> result = Graph.doTarjan(nodes.values()); System.out.println("no cycles"); Joiner joiner = Joiner.on("\n"); System.out.println(joiner.join(result)); assertEquals(nodes.size(), result.size()); // validate topo sort: validateTopoSort(ImmutableList.of(NodeType.A, NodeType.B, NodeType.C, NodeType.E, NodeType.D, NodeType.F), result); // create a cycle: connectFromTo(nodes, NodeType.F, NodeType.C); result = Graph.doTarjan(nodes.values()); System.out.println("cycles"); System.out.println(joiner.join(result)); assertFalse(nodes.size() == result.size()); }
Example 12
Source File: ReferencedEnvelope.java From arctic-sea with Apache License 2.0 | 5 votes |
/** * Creates the minimum and maximum values of this envelope in the default * EPSG. * * @return the {@code MinMax} describing the envelope */ public MinMax<String> getMinMaxFromEnvelope() { if (isSetEnvelope()) { Joiner joiner = Joiner.on(' '); return new MinMax<String>().setMaximum(joiner.join(envelope.getMaxX(), envelope.getMaxY())) .setMinimum(joiner.join(envelope.getMinX(), envelope.getMinY())); } return new MinMax<>(); }
Example 13
Source File: Options.java From japicmp with Apache License 2.0 | 5 votes |
public String joinOldVersions() { Joiner joiner = Joiner.on(";"); String join = joiner.join(toVersionList(oldArchives)); if (join.trim().length() == 0) { return N_A; } return join; }
Example 14
Source File: BasicDao.java From ob1k with Apache License 2.0 | 5 votes |
private String createQueryForIds(final String baseQuery, final String tableName, final String idColumnName, final List<?> ids) { final StringBuilder query = new StringBuilder(baseQuery); query.append(' '); query.append(withBackticks(tableName)); query.append(" where "); query.append(withBackticks(idColumnName)); query.append(" in ("); final Joiner joiner = Joiner.on(','); joiner.appendTo(query, transform(ids, BasicDao::withQuote)); // todo: why it's not just List<String> ? query.append(");"); return query.toString(); }
Example 15
Source File: AbstractQueriesTests.java From mat-calcite-plugin with Apache License 2.0 | 5 votes |
protected List<String> executeToCSV(String sql) throws SQLException { List<String> res = new ArrayList<String>(); System.out.println("sql = " + sql); try (Connection con = CalciteDataSource.getConnection(getSnapshot())) { PreparedStatement ps = con.prepareStatement(sql); ResultSet rs = ps.executeQuery(); ResultSetMetaData md = rs.getMetaData(); Joiner joiner = Joiner.on('|'); List<String> row = new ArrayList<String>(); final int columnCount = md.getColumnCount(); for (int i = 1; i <= columnCount; i++) { row.add(md.getColumnName(i)); } res.add(joiner.join(row)); while(rs.next()) { row.clear(); for (int i = 1; i <= columnCount; i++) { row.add(String.valueOf(rs.getObject(i))); } res.add(joiner.join(row)); } } catch (SQLException e) { e.printStackTrace(); // tycho-surefire-plugin forces trimStackTrace=true throw e; } return res; }
Example 16
Source File: SoaMsgEncoder.java From dapeng-soa with Apache License 2.0 | 5 votes |
private void updateSoaHeader(SoaHeader soaHeader, TransactionContext transactionContext) { Long requestTimestamp = (Long) transactionContext.getAttribute("dapeng_request_timestamp"); Long cost = System.currentTimeMillis() - requestTimestamp; soaHeader.setCalleeTime2(cost.intValue()); soaHeader.setCalleeIp(Optional.of(IPUtils.transferIp(SoaSystemEnvProperties.HOST_IP))); soaHeader.setCalleePort(Optional.of(SoaSystemEnvProperties.SOA_CONTAINER_PORT)); Joiner joiner = Joiner.on(":"); soaHeader.setCalleeMid(joiner.join(soaHeader.getServiceName(), soaHeader.getMethodName(), soaHeader.getVersionName())); soaHeader.setCalleeTid(transactionContext.calleeTid()); }
Example 17
Source File: Options.java From japicmp with Apache License 2.0 | 5 votes |
public String joinNewVersions() { Joiner joiner = Joiner.on(";"); String join = joiner.join(toVersionList(newArchives)); if (join.trim().length() == 0) { return N_A; } return join; }
Example 18
Source File: CatOLog.java From c5-replicator with Apache License 2.0 | 4 votes |
private static void formatPeerIdList(Formatter formatter, List<Long> peerIdList) { Joiner joiner = Joiner.on(", "); formatter.format(joiner.join(peerIdList)); }
Example 19
Source File: ModelDBHibernateUtil.java From modeldb with Apache License 2.0 | 4 votes |
public static Query getWorkspaceEntityQuery( Session session, String shortName, String command, String fieldName, String name, String workspaceColumnName, String workspaceId, WorkspaceType workspaceType, boolean shouldSetName, List<String> ordering) { StringBuilder stringQueryBuilder = new StringBuilder(command); stringQueryBuilder .append(" AND ") .append(shortName) .append(".") .append(ModelDBConstants.DELETED) .append(" = false "); if (workspaceId != null && !workspaceId.isEmpty()) { if (shouldSetName) { stringQueryBuilder.append(" AND "); } stringQueryBuilder .append(shortName) .append(".") .append(workspaceColumnName) .append(" =: ") .append(workspaceColumnName) .append(" AND ") .append(shortName) .append(".") .append(ModelDBConstants.WORKSPACE_TYPE) .append(" =: ") .append(ModelDBConstants.WORKSPACE_TYPE); } if (ordering != null && !ordering.isEmpty()) { stringQueryBuilder.append(" order by "); Joiner joiner = Joiner.on(","); stringQueryBuilder.append(joiner.join(ordering)); } Query query = session.createQuery(stringQueryBuilder.toString()); if (shouldSetName) { query.setParameter(fieldName, name); } if (workspaceId != null && !workspaceId.isEmpty()) { query.setParameter(workspaceColumnName, workspaceId); query.setParameter(ModelDBConstants.WORKSPACE_TYPE, workspaceType.getNumber()); } return query; }
Example 20
Source File: TestFieldHasherProcessorUpgrader.java From datacollector with Apache License 2.0 | 4 votes |
@Test public void testUpgradeV1toV2() throws StageException { //Old Config final String FIELD_HASHER_CONFIG = "fieldHasherConfigs"; //New Config final Joiner JOINER = Joiner.on("."); //v1 to v2 constants //old fields final String FIELD_HASHER_CONFIGS = "fieldHasherConfigs"; final String FIELDS_TO_HASH = "fieldsToHash"; //New fields final String HASHER_CONFIG = "hasherConfig"; final String RECORD_HASHER_CONFIG = "recordHasherConfig"; final String TARGET_FIELD_HASHER_CONFIGS = "targetFieldHasherConfigs"; final String INPLACE_FIELD_HASHER_CONFIGS = "inPlaceFieldHasherConfigs"; final String SOURCE_FIELDS_TO_HASH = "sourceFieldsToHash"; final String HASH_ENTIRE_RECORD = "hashEntireRecord"; final String HASH_TYPE = "hashType"; final String TARGET_FIELD = "targetField"; final String HEADER_ATTRIBUTE = "headerAttribute"; final String INCLUDE_RECORD_HEADER = "includeRecordHeaderForHashing"; List<Config> configs = new ArrayList<>(); LinkedHashMap<String, Object> fieldHasherConfig1 = new LinkedHashMap<String, Object>(); fieldHasherConfig1.put("fieldsToHash", ImmutableList.of("/a", "/b")); fieldHasherConfig1.put("hashType", HashType.MD5); LinkedHashMap<String, Object> fieldHasherConfig2 = new LinkedHashMap<String, Object>(); fieldHasherConfig2.put("fieldsToHash", ImmutableList.of("/c", "/d")); fieldHasherConfig2.put("hashType", HashType.SHA1); List<LinkedHashMap<String, Object>> fieldHasherConfigs = new ArrayList<LinkedHashMap<String, Object>>(); configs.add(new Config(FIELD_HASHER_CONFIG, fieldHasherConfigs)); fieldHasherConfigs.add(fieldHasherConfig1); fieldHasherConfigs.add(fieldHasherConfig2); FieldHasherProcessorUpgrader upgrader = new FieldHasherProcessorUpgrader(); upgrader.upgrade("a", "b", "c", 1, 2, configs); Set<String> configsToBePresentAfterUpgrade = new HashSet<String>(); configsToBePresentAfterUpgrade.add(JOINER.join(HASHER_CONFIG, RECORD_HASHER_CONFIG, HASH_ENTIRE_RECORD)); configsToBePresentAfterUpgrade.add(JOINER.join(HASHER_CONFIG, RECORD_HASHER_CONFIG, INCLUDE_RECORD_HEADER)); configsToBePresentAfterUpgrade.add(JOINER.join(HASHER_CONFIG, RECORD_HASHER_CONFIG, HASH_TYPE)); configsToBePresentAfterUpgrade.add(JOINER.join(HASHER_CONFIG, RECORD_HASHER_CONFIG, TARGET_FIELD)); configsToBePresentAfterUpgrade.add(JOINER.join(HASHER_CONFIG, RECORD_HASHER_CONFIG, HEADER_ATTRIBUTE)); configsToBePresentAfterUpgrade.add(JOINER.join(HASHER_CONFIG, INPLACE_FIELD_HASHER_CONFIGS)); configsToBePresentAfterUpgrade.add(JOINER.join(HASHER_CONFIG, TARGET_FIELD_HASHER_CONFIGS)); // previously this was 7. now it's 9 due to adding 2 more configs // for SDC-6540 which added the "useSeparator" options. // the upgrader runs both V1 to V2 then the V2 to V3 upgrades. Assert.assertEquals("There should be 9 configs after upgrade", configs.size(), 9); for (Config config : configs) { if (config.getName() == JOINER.join(HASHER_CONFIG, RECORD_HASHER_CONFIG, HASH_ENTIRE_RECORD)) { Assert.assertFalse("Record Hashing should be disabled after upgrade", ((Boolean)config.getValue()).booleanValue()); } else if (config.getName() == JOINER.join(HASHER_CONFIG, INPLACE_FIELD_HASHER_CONFIGS)) { List upgradedInPlaceHasherConfigs = (List)config.getValue(); Assert.assertEquals( "After upgrade the number of field hash configs should be same", upgradedInPlaceHasherConfigs.size(), 2); for(Object upgradedFieldHasherConfigObject : upgradedInPlaceHasherConfigs) { LinkedHashMap<String, Object> upgradedFieldHasherConfig = (LinkedHashMap<String, Object>) upgradedFieldHasherConfigObject; Assert.assertTrue("InPlace Field Hasher Config should contain source Fields To Hash And Hash Type", upgradedFieldHasherConfig.containsKey("sourceFieldsToHash") && upgradedFieldHasherConfig.containsKey("hashType")); } } else if (config.getName() == JOINER.join(HASHER_CONFIG, TARGET_FIELD_HASHER_CONFIGS)) { List upgradedTargetHasherConfigs = (List)config.getValue(); Assert.assertEquals( "After upgrade the number of target field hash configs should be 0", upgradedTargetHasherConfigs.size(), 0 ); } configsToBePresentAfterUpgrade.remove(config.getName()); } Assert.assertTrue( "After upgrade the following fields are not present" + getNotPresentConfigs(configsToBePresentAfterUpgrade), configsToBePresentAfterUpgrade.isEmpty() ); }