com.fasterxml.jackson.dataformat.csv.CsvMapper Java Examples
The following examples show how to use
com.fasterxml.jackson.dataformat.csv.CsvMapper.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CsvUtils.java From spring-boot-rest-api-helpers with MIT License | 11 votes |
public static <T> List<T> read(Class<T> clazz, InputStream stream, boolean withHeaders, char separator) throws IOException { CsvMapper mapper = new CsvMapper(); mapper.enable(CsvParser.Feature.TRIM_SPACES); mapper.enable(CsvParser.Feature.ALLOW_TRAILING_COMMA); mapper.enable(CsvParser.Feature.INSERT_NULLS_FOR_MISSING_COLUMNS); mapper.enable(CsvParser.Feature.SKIP_EMPTY_LINES); mapper.disable(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY); CsvSchema schema = mapper.schemaFor(clazz).withColumnReordering(true); ObjectReader reader; if (separator == '\t') { schema = schema.withColumnSeparator('\t'); } else { schema = schema.withColumnSeparator(','); } if (withHeaders) { schema = schema.withHeader(); } else { schema = schema.withoutHeader(); } reader = mapper.readerFor(clazz).with(schema); return reader.<T>readValues(stream).readAll(); }
Example #2
Source File: InventoryReportLineWriter.java From s3-inventory-usage-examples with Apache License 2.0 | 7 votes |
/** * Write a new inventory report to S3 and returns a locator which includes this inventory report's information * @return Locator which includes the information of this new report * @throws IOException thrown when GZIPOutputStream not created successfully or csvMapper.write() fails */ public InventoryManifest.Locator writeCsvFile(List<InventoryReportLine> inventoryReportLine) throws IOException{ CsvMapper csvMapper = new CsvMapper(); csvMapper.enable(JsonGenerator.Feature.IGNORE_UNKNOWN); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); GZIPOutputStream gzipOutputStream = new GZIPOutputStream(byteArrayOutputStream); csvMapper.writer(schema).writeValues(gzipOutputStream).writeAll(inventoryReportLine).close(); byte[] zipByteArray = byteArrayOutputStream.toByteArray(); InputStream zipInputStream = new ByteArrayInputStream(zipByteArray); ObjectMetadata metaData = new ObjectMetadata(); metaData.setContentLength(zipByteArray.length); PutObjectRequest request = new PutObjectRequest(bucketName, outputInventoryReportKey, zipInputStream, metaData); s3Client.putObject(request); return this.buildLocator(zipByteArray); }
Example #3
Source File: UtilitiesTestBase.java From hudi with Apache License 2.0 | 6 votes |
/** * Converts the json records into CSV format and writes to a file. * * @param hasHeader whether the CSV file should have a header line. * @param sep the column separator to use. * @param lines the records in JSON format. * @param fs {@link FileSystem} instance. * @param targetPath File path. * @throws IOException */ public static void saveCsvToDFS( boolean hasHeader, char sep, String[] lines, FileSystem fs, String targetPath) throws IOException { Builder csvSchemaBuilder = CsvSchema.builder(); ArrayNode arrayNode = mapper.createArrayNode(); Arrays.stream(lines).forEachOrdered( line -> { try { arrayNode.add(mapper.readValue(line, ObjectNode.class)); } catch (IOException e) { throw new HoodieIOException( "Error converting json records into CSV format: " + e.getMessage()); } }); arrayNode.get(0).fieldNames().forEachRemaining(csvSchemaBuilder::addColumn); ObjectWriter csvObjWriter = new CsvMapper() .writerFor(JsonNode.class) .with(csvSchemaBuilder.setUseHeader(hasHeader).setColumnSeparator(sep).build()); PrintStream os = new PrintStream(fs.create(new Path(targetPath), true)); csvObjWriter.writeValue(os, arrayNode); os.flush(); os.close(); }
Example #4
Source File: TruckEventsCsvConverter.java From registry with Apache License 2.0 | 6 votes |
private MappingIterator<TruckEvent> readTruckEventsFromCsv(InputStream csvStream) throws IOException { CsvSchema bootstrap = CsvSchema.builder() // driverId,truckId,eventTime,eventType,longitude,latitude,eventKey,correlationId,driverName,routeId,routeName,eventDate .addColumn("driverId", CsvSchema.ColumnType.NUMBER) .addColumn("truckId", CsvSchema.ColumnType.NUMBER) .addColumn("eventTime", CsvSchema.ColumnType.STRING) .addColumn("eventType", CsvSchema.ColumnType.STRING) .addColumn("longitude", CsvSchema.ColumnType.NUMBER) .addColumn("latitude", CsvSchema.ColumnType.NUMBER) .addColumn("eventKey", CsvSchema.ColumnType.STRING) .addColumn("correlationId", CsvSchema.ColumnType.NUMBER) .addColumn("driverName", CsvSchema.ColumnType.STRING) .addColumn("routeId", CsvSchema.ColumnType.NUMBER) .addColumn("routeName", CsvSchema.ColumnType.STRING) .addColumn("eventDate", CsvSchema.ColumnType.STRING) // .addColumn("miles", CsvSchema.ColumnType.NUMBER) .build().withHeader(); CsvMapper csvMapper = new CsvMapper(); return csvMapper.readerFor(TruckEvent.class).with(bootstrap).readValues(csvStream); }
Example #5
Source File: AvroProducerDemo.java From Kafka-Streams-Real-time-Stream-Processing with The Unlicense | 6 votes |
/** * private static method to read data from given dataFile * * @param dataFile data file name in resource folder * @return List of StockData Instance * @throws IOException, NullPointerException */ private static List<StockData> getStocks(String dataFile) throws IOException { File file = new File(dataFile); CsvSchema schema = CsvSchema.builder() .addColumn("symbol", CsvSchema.ColumnType.STRING) .addColumn("series", CsvSchema.ColumnType.STRING) .addColumn("open", CsvSchema.ColumnType.NUMBER) .addColumn("high", CsvSchema.ColumnType.NUMBER) .addColumn("low", CsvSchema.ColumnType.NUMBER) .addColumn("close", CsvSchema.ColumnType.NUMBER) .addColumn("last", CsvSchema.ColumnType.NUMBER) .addColumn("previousClose", CsvSchema.ColumnType.NUMBER) .addColumn("totalTradedQty", CsvSchema.ColumnType.NUMBER) .addColumn("totalTradedVal", CsvSchema.ColumnType.NUMBER) .addColumn("tradeDate", CsvSchema.ColumnType.STRING) .addColumn("totalTrades", CsvSchema.ColumnType.NUMBER) .addColumn("isinCode", CsvSchema.ColumnType.STRING) .build(); MappingIterator<StockData> stockDataIterator = new CsvMapper().readerFor(StockData.class).with(schema).readValues(file); return stockDataIterator.readAll(); }
Example #6
Source File: JacksonCsv.java From zerocode with Apache License 2.0 | 6 votes |
public static void main(String[] args) { List<User> users = new ArrayList<>(); User userHeader = new User("First", "Last", "Age"); User user1 = new User("First name1", "Last Name1", null); User user2 = new User("First name2", "Last Name2", "22"); users.add(userHeader); users.add(user1); users.add(user2); CsvSchema schema = CsvSchema.builder() .addColumn("firstName") .addColumn("lastName") .addColumn("age", CsvSchema.ColumnType.NUMBER) //.addColumn("comments") //.setUseHeader(true) //.setStrictHeaders(false) .build(); // CsvSchema bootstrapSchema = CsvSchema.emptySchema().withHeader(); // ObjectMapper mapper = new CsvMapper(); CsvMapper mapper = new CsvMapper(); mapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); ObjectWriter writer = mapper.writer(schema.withLineSeparator("\n")); try { writer.writeValue(new File("target/ModifiedUsers.csv"), users); } catch (IOException e) { e.printStackTrace(); } }
Example #7
Source File: TimeRangeStreamingQuoteActionHandler.java From ZStreamingQuote with MIT License | 6 votes |
/** * formatQuoteListToCSV - convert quote list to CSV * * @param quote list * @return CSV formatted Quote list */ private String formatQuoteListToCSV(List<StreamingQuote> quoteList) { String csvData = null; CsvMapper mapper = new CsvMapper(); CsvSchema schema = null; if(quoteList.get(0).getClass() == StreamingQuoteModeLtp.class){ schema = mapper.schemaFor(StreamingQuoteModeLtp.class).withHeader().withColumnSeparator(','); } else if(quoteList.get(0).getClass() == StreamingQuoteModeQuote.class){ schema = mapper.schemaFor(StreamingQuoteModeQuote.class).withHeader().withColumnSeparator(','); } else if(quoteList.get(0).getClass() == StreamingQuoteModeFull.class){ schema = mapper.schemaFor(StreamingQuoteModeFull.class).withHeader().withColumnSeparator(','); } else{ System.out.println("TimeRangeStreamingQuoteActionHandler.formatQuoteListToCSV(): ERROR: Wrong POJO class to map"); } try { csvData = mapper.writer(schema).writeValueAsString(quoteList); } catch (JsonProcessingException e) { System.out .println("TimeRangeStreamingQuoteActionHandler.formatQuoteListToCSV(): ERROR: JsonProcessingException on quote list !!!"); e.printStackTrace(); } return csvData; }
Example #8
Source File: TimeRangeOHLCActionHandler.java From ZStreamingQuote with MIT License | 6 votes |
/** * formatQuoteToCSV - convert quote to CSV * @param quote * @return CSV formatted Quote */ private String formatQuoteToCSV(OHLCquote quote){ String csvData = null; CsvMapper mapper = new CsvMapper(); CsvSchema schema = mapper.schemaFor(OHLCquote.class).withHeader().withColumnSeparator(','); try { csvData = mapper.writer(schema).writeValueAsString(quote); } catch (JsonProcessingException e) { System.out.println("TimeRangeOHLCActionHandler.formatQuoteToCSV(): ERROR: JsonProcessingException on quote"); e.printStackTrace(); } return csvData; }
Example #9
Source File: SerializationFeatureTest.java From vavr-jackson with Apache License 2.0 | 6 votes |
@Test public void vavr_List_DateTime_serialization_should_use_SerializationFeature() throws IOException { final DateTime dateTime = new DateTime(2016, 6, 6, 8, 0, DateTimeZone.forID("CET")); final io.vavr.collection.List<DateTime> dateTimeList = List.of(dateTime); final java.util.List<DateTime> dateTimeJavaList = new ArrayList<>(); dateTimeJavaList.add(dateTime); final CsvMapper mapper = getMapper(); final ObjectWriter writer = mapper.writer() .without(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); final String serializedDateTime = writer .writeValueAsString(dateTime); final String serializedDateTimeJavaList = writer .writeValueAsString(dateTimeJavaList); final String serializedDateTimeList = writer .writeValueAsString(dateTimeList); Assertions.assertEquals(serializedDateTime, serializedDateTimeJavaList); Assertions.assertEquals(serializedDateTimeJavaList, serializedDateTimeList); List<DateTime> restored = mapper.readValue(serializedDateTimeList, new TypeReference<List<DateTime>>() {}); Assertions.assertEquals(restored.head().getMillis(), dateTime.getMillis()); }
Example #10
Source File: JacksonCSVSplitter.java From java-client-api with Apache License 2.0 | 5 votes |
private CsvMapper configureCsvMapper() { if(csvMapper == null) { csvMapper = new CsvMapper() .configure(CsvParser.Feature.ALLOW_TRAILING_COMMA, true) .configure(CsvParser.Feature.FAIL_ON_MISSING_COLUMNS, false) .configure(CsvParser.Feature.IGNORE_TRAILING_UNMAPPABLE, false) .configure(CsvParser.Feature.INSERT_NULLS_FOR_MISSING_COLUMNS, false) .configure(CsvParser.Feature.SKIP_EMPTY_LINES, true) .configure(CsvParser.Feature.TRIM_SPACES, true) .configure(CsvParser.Feature.WRAP_AS_ARRAY, false) .configure(CsvParser.Feature.IGNORE_TRAILING_UNMAPPABLE, true); } return csvMapper; }
Example #11
Source File: ZeroCodeReportGeneratorImpl.java From zerocode with Apache License 2.0 | 5 votes |
public void generateCsvReport(List<ZeroCodeCsvReport> zeroCodeCsvReportRows) { /* * Write to a CSV file */ CsvSchema schema = CsvSchema.builder() .setUseHeader(true) .addColumn("scenarioName") .addColumn("scenarioLoop", CsvSchema.ColumnType.NUMBER) .addColumn("stepName") .addColumn("stepLoop", CsvSchema.ColumnType.NUMBER) .addColumn("correlationId") .addColumn("requestTimeStamp") .addColumn("responseDelayMilliSec", CsvSchema.ColumnType.NUMBER) .addColumn("responseTimeStamp") .addColumn("result") .addColumn("method") .build(); CsvMapper csvMapper = new CsvMapper(); csvMapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); ObjectWriter writer = csvMapper.writer(schema.withLineSeparator("\n")); try { writer.writeValue( new File(TARGET_FULL_REPORT_DIR + TARGET_FULL_REPORT_CSV_FILE_NAME //"_" + //LocalDateTime.now().toString().replace(":", "-") + //".csv" ), zeroCodeCsvReportRows); } catch (IOException e) { e.printStackTrace(); throw new RuntimeException("Exception while Writing full CSV report. Details: " + e); } }
Example #12
Source File: JsonCsvConverter.java From tutorials with MIT License | 5 votes |
public static void JsonToFormattedCsv(File jsonFile, File csvFile) throws IOException { CsvMapper csvMapper = new CsvMapper(); CsvSchema csvSchema = csvMapper .schemaFor(OrderLineForCsv.class) .withHeader(); csvMapper.addMixIn(OrderLine.class, OrderLineForCsv.class); OrderLine[] orderLines = new ObjectMapper() .readValue(jsonFile, OrderLine[].class); csvMapper.writerFor(OrderLine[].class) .with(csvSchema) .writeValue(csvFile, orderLines); }
Example #13
Source File: JacksonCSVSplitter.java From java-client-api with Apache License 2.0 | 5 votes |
private ObjectReader configureObjReader() { this.count=0; CsvSchema firstLineSchema = getCsvSchema()!=null? getCsvSchema():CsvSchema.emptySchema().withHeader(); CsvMapper csvMapper = getCsvMapper()!=null ? getCsvMapper() : configureCsvMapper(); ObjectReader objectReader = csvMapper.readerFor(JsonNode.class); return objectReader.with(firstLineSchema); }
Example #14
Source File: CSVStreamConnector.java From syncope with Apache License 2.0 | 5 votes |
public MappingIterator<Map<String, String>> reader() throws IOException { synchronized (this) { if (reader == null) { reader = new CsvMapper(). enable(CsvParser.Feature.SKIP_EMPTY_LINES). readerFor(Map.class).with(schemaBuilder.build()).readValues(in); } } return reader; }
Example #15
Source File: CSVStreamConnector.java From syncope with Apache License 2.0 | 5 votes |
public SequenceWriter writer() throws IOException { synchronized (this) { if (writer == null) { writer = new CsvMapper().writerFor(Map.class).with(schemaBuilder.build()).writeValues(out); } } return writer; }
Example #16
Source File: TCSV.java From Llunatic with GNU General Public License v3.0 | 5 votes |
public void test() throws IOException { CsvMapper mapper = new CsvMapper(); mapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); CsvSchema schema = CsvSchema.emptySchema(); // schema = schema.withHeader(); // schema = schema.withQuoteChar('\''); // File csvFile = new File("/Temp/llunatic/doctors/10k/doctor.csv"); File csvFile = new File("/Users/donatello/Temp/chaseBench-workspace/LUBM/data/01k/src-emailAddress.csv"); long start = new Date().getTime(); MappingIterator<String[]> it = mapper.readerFor(String[].class).with(schema).readValues(csvFile); String[] row = it.next(); System.out.println(Arrays.asList(row)); long end = new Date().getTime(); System.out.println("**** " + (end - start) + " ms"); // while (it.hasNext()) { // String[] row = it.next(); // System.out.println(Arrays.asList(row)); // } }
Example #17
Source File: JsonCsvConverter.java From tutorials with MIT License | 5 votes |
public static void JsonToCsv(File jsonFile, File csvFile) throws IOException { JsonNode jsonTree = new ObjectMapper().readTree(jsonFile); Builder csvSchemaBuilder = CsvSchema.builder(); JsonNode firstObject = jsonTree.elements().next(); firstObject.fieldNames().forEachRemaining(fieldName -> {csvSchemaBuilder.addColumn(fieldName);} ); CsvSchema csvSchema = csvSchemaBuilder .build() .withHeader(); CsvMapper csvMapper = new CsvMapper(); csvMapper.writerFor(JsonNode.class) .with(csvSchema) .writeValue(csvFile, jsonTree); }
Example #18
Source File: JsonCsvConverter.java From tutorials with MIT License | 5 votes |
public static void csvToJson(File csvFile, File jsonFile) throws IOException { CsvSchema orderLineSchema = CsvSchema.emptySchema().withHeader(); CsvMapper csvMapper = new CsvMapper(); MappingIterator<OrderLine> orderLines = csvMapper.readerFor(OrderLine.class) .with(orderLineSchema) .readValues(csvFile); new ObjectMapper() .configure(SerializationFeature.INDENT_OUTPUT, true) .writeValue(jsonFile, orderLines.readAll()); }
Example #19
Source File: FileTargetProcessorSubmitIssues.java From FortifyBugTrackerUtility with MIT License | 5 votes |
@Override protected boolean processMaps(Context context, String groupName, List<Object> currentGroup, List<LinkedHashMap<String, Object>> listOfMaps) { CsvSchema.Builder schemaBuilder = CsvSchema.builder(); for (String col : getFields().keySet()) { schemaBuilder.addColumn(col); } CsvSchema schema = schemaBuilder.build().withHeader(); try { new CsvMapper().writer(schema).writeValue(new File(groupName), listOfMaps); } catch (Exception e) { throw new RuntimeException("Error writing data to file "+groupName, e); } LOG.info(String.format("[File] Submitted %d vulnerabilities to %s", currentGroup.size(), groupName)); return true; }
Example #20
Source File: ApplicationReporter.java From cf-butler with Apache License 2.0 | 5 votes |
protected AppUsageReport readAppUsageReport(String filename) throws JsonParseException, JsonMappingException, IOException { String content = readFile(filename); if (filename.endsWith(".json")) { return mapper.readValue(content, AppUsageReport.class); } else if (filename.endsWith(".csv")) { CsvMapper csvMapper = new CsvMapper(); csvMapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); File csvFile = new File(filename); MappingIterator<String[]> it = csvMapper.readerFor(String[].class).readValues(csvFile); AppUsageReportBuilder builder = AppUsageReport.builder(); List<AppUsageMonthly> reports = new ArrayList<>(); int rowNum = 0; while (it.hasNext()) { String[] row = it.next(); if (rowNum > 0) { AppUsageMonthlyBuilder amb = AppUsageMonthly.builder(); for (int i = 0; i < row.length; i++) { if (i == 0) { String[] period = row[i].split("-"); if (period.length == 2) { amb.month(Integer.valueOf(period[1])); } amb.year(Integer.valueOf(period[0])); } if (i == 1) { amb.averageAppInstances(Double.valueOf(row[i])); } if (i == 2) { amb.maximumAppInstances(Integer.valueOf(row[i])); } if (i == 3) { amb.appInstanceHours(Double.valueOf(row[i])); } } reports.add(amb.build()); } rowNum++; } builder.monthlyReports(reports); return builder.build(); } else { return AppUsageReport.builder().build(); } }
Example #21
Source File: SimpleCSV.java From synthea with Apache License 2.0 | 5 votes |
/** * Parse the data from the given CSV file into a List of Maps, where the key is the * column name. Uses a LinkedHashMap specifically to ensure the order of columns is preserved in * the resulting maps. * * @param csvData * Raw CSV data * @return parsed data * @throws IOException * if any exception occurs while parsing the data */ public static List<LinkedHashMap<String, String>> parse(String csvData) throws IOException { // Read schema from the first line; start with bootstrap instance // to enable reading of schema from the first line // NOTE: reads schema and uses it for binding CsvMapper mapper = new CsvMapper(); // use first row as header; otherwise defaults are fine CsvSchema schema = CsvSchema.emptySchema().withHeader(); MappingIterator<LinkedHashMap<String, String>> it = mapper.readerFor(LinkedHashMap.class) .with(schema).readValues(csvData); return it.readAll(); }
Example #22
Source File: OffsetSerDe.java From mirus with BSD 3-Clause "New" or "Revised" License | 5 votes |
private static OffsetSerDe csvOffsetSerDe() { CsvMapper csvMapper = new CsvMapper().configure(CsvGenerator.Feature.STRICT_CHECK_FOR_QUOTING, true); CsvSchema schema = csvMapper.schemaFor(OffsetInfo.class).withLineSeparator(""); return new OffsetSerDe( csvMapper.writer(schema), csvMapper.reader(schema).forType(OffsetInfo.class)); }
Example #23
Source File: CsvReader.java From graphql-java-demo with MIT License | 5 votes |
public static <T> List<T> loadObjectList(Class<T> type, String fileName) throws IOException { CsvSchema bootstrapSchema = CsvSchema.emptySchema().withHeader(); CsvMapper mapper = new CsvMapper(); InputStream is = new ClassPathResource(fileName).getInputStream(); MappingIterator<T> readValues = mapper.readerFor(type).with(bootstrapSchema).readValues(is); return readValues.readAll(); }
Example #24
Source File: NHANESSample.java From synthea with Apache License 2.0 | 5 votes |
/** * Load the NHANES samples from resources. * @return A list of samples. */ public static List<NHANESSample> loadSamples() { CsvMapper mapper = new CsvMapper(); List<NHANESSample> samples = new LinkedList<NHANESSample>(); CsvSchema schema = CsvSchema.emptySchema().withHeader(); String filename = "nhanes_two_year_olds_bmi.csv"; try { String rawCSV = Utilities.readResource(filename); MappingIterator<NHANESSample> it = mapper.readerFor(NHANESSample.class).with(schema).readValues(rawCSV); while (it.hasNextValue()) { samples.add(it.nextValue()); } } catch (Exception e) { System.err.println("ERROR: unable to load CSV: " + filename); e.printStackTrace(); throw new RuntimeException(e); } return samples; }
Example #25
Source File: CsvView.java From spring-boot-doma2-sample with Apache License 2.0 | 5 votes |
/** * CSVマッパーを生成する。 * * @return */ static CsvMapper createCsvMapper() { CsvMapper mapper = new CsvMapper(); mapper.configure(ALWAYS_QUOTE_STRINGS, true); mapper.findAndRegisterModules(); return mapper; }
Example #26
Source File: InventoryReportLineMapper.java From s3-inventory-usage-examples with Apache License 2.0 | 5 votes |
/** * Map each line of the inventory report into a POJO * @return List<InventoryReportLine> which is a list of POJOs * @throws IOException when mapping with schema fails */ public List<InventoryReportLine> mapInventoryReportLine(List<String> inventoryReportLine) throws IOException{ CsvMapper mapper = new CsvMapper(); List<InventoryReportLine> inventoryReportLines = new ArrayList(); for (String eachLine : inventoryReportLine) { MappingIterator<InventoryReportLine> iterator = mapper.readerFor(InventoryReportLine.class).with(schema).readValues(eachLine); List<InventoryReportLine> rowValue = iterator.readAll(); inventoryReportLines.add(rowValue.get(0)); } return inventoryReportLines; }
Example #27
Source File: ServiceInstanceReporter.java From cf-butler with Apache License 2.0 | 4 votes |
protected List<NormalizedServicePlanMonthlyUsage> readServiceUsageReport(String filename) throws JsonParseException, JsonMappingException, IOException { String content = readFile(filename); if (filename.endsWith(".json")) { return NormalizedServicePlanMonthlyUsage.listOf(mapper.readValue(content, ServiceUsageReport.class)); } else if (filename.endsWith(".csv")) { CsvMapper csvMapper = new CsvMapper(); csvMapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); File csvFile = new File(filename); MappingIterator<String[]> it = csvMapper.readerFor(String[].class).readValues(csvFile); List<NormalizedServicePlanMonthlyUsage> reports = new ArrayList<>(); int rowNum = 0; while (it.hasNext()) { String[] row = it.next(); if (rowNum > 0) { NormalizedServicePlanMonthlyUsageBuilder amb = NormalizedServicePlanMonthlyUsage.builder(); for (int i = 0; i < row.length; i++) { if (i == 0) { String[] period = row[i].split("-"); if (period.length == 2) { amb.month(Integer.valueOf(period[1])); } amb.year(Integer.valueOf(period[0])); } if (i == 1) { amb.serviceName(row[i]); } if (i == 2) { amb.serviceGuid(row[i]); } if (i == 3) { amb.servicePlanName(row[i]); } if (i == 4) { amb.servicePlanGuid(row[i]); } if (i == 5) { amb.averageInstances(Double.valueOf(row[i])); } if (i == 6) { amb.maximumInstances(Integer.valueOf(row[i])); } if (i == 7) { amb.durationInHours(Double.valueOf(row[i])); } } NormalizedServicePlanMonthlyUsage usage = amb.build(); if (StringUtils.isNotBlank(usage.getServicePlanGuid())) { reports.add(usage); } } rowNum++; } return reports; } else { return NormalizedServicePlanMonthlyUsage.listOf(ServiceUsageReport.builder().build()); } }
Example #28
Source File: StreamPushJobDelegateTest.java From syncope with Apache License 2.0 | 4 votes |
@Test public void push() throws IOException { PipedInputStream in = new PipedInputStream(); PipedOutputStream os = new PipedOutputStream(in); PushTaskTO pushTask = new PushTaskTO(); pushTask.setMatchingRule(MatchingRule.UPDATE); pushTask.setUnmatchingRule(UnmatchingRule.PROVISION); List<ProvisioningReport> results = AuthContextUtils.callAsAdmin(SyncopeConstants.MASTER_DOMAIN, () -> { try (CSVStreamConnector connector = new CSVStreamConnector( null, ";", new CsvSchema.Builder().setUseHeader(true), null, os)) { return streamPushExecutor.push( anyTypeDAO.findUser(), userDAO.findAll(1, 100), List.of("username", "firstname", "surname", "email", "status", "loginDate"), connector, List.of(), pushTask, "user"); } catch (Exception e) { throw new RuntimeException(e); } }); assertEquals(userDAO.count(), results.size()); MappingIterator<Map<String, String>> reader = new CsvMapper().readerFor(Map.class).with(CsvSchema.emptySchema().withHeader()).readValues(in); for (int i = 0; i < results.size() && reader.hasNext(); i++) { Map<String, String> row = reader.next(); assertEquals(results.get(i).getName(), row.get("username")); assertEquals(userDAO.findByUsername(row.get("username")).getStatus(), row.get("status")); switch (row.get("username")) { case "rossini": assertEquals(StringUtils.EMPTY, row.get("email")); assertTrue(row.get("loginDate").contains(";")); break; case "verdi": assertEquals("[email protected]", row.get("email")); assertEquals(StringUtils.EMPTY, row.get("loginDate")); break; case "bellini": assertEquals(StringUtils.EMPTY, row.get("email")); assertFalse(row.get("loginDate").contains(";")); break; default: break; } } }
Example #29
Source File: ReconciliationLogicTest.java From syncope with Apache License 2.0 | 4 votes |
@Test public void pushToCSV() throws IOException { Pair<Integer, List<UserTO>> search = AuthContextUtils.callAsAdmin(SyncopeConstants.MASTER_DOMAIN, () -> userLogic.search(null, 1, 100, List.of(), SyncopeConstants.ROOT_REALM, false)); assertNotNull(search); CSVPushSpec spec = new CSVPushSpec.Builder(AnyTypeKind.USER.name()).ignorePaging(true). field("username"). field("status"). plainAttr("firstname"). plainAttr("surname"). plainAttr("email"). plainAttr("loginDate"). build(); PipedInputStream in = new PipedInputStream(); PipedOutputStream os = new PipedOutputStream(in); List<ProvisioningReport> results = AuthContextUtils.callAsAdmin(SyncopeConstants.MASTER_DOMAIN, () -> { return reconciliationLogic.push(null, 1, 1, List.of(), SyncopeConstants.ROOT_REALM, spec, os); }); assertEquals(search.getLeft(), results.size()); MappingIterator<Map<String, String>> reader = new CsvMapper().readerFor(Map.class).with(CsvSchema.emptySchema().withHeader()).readValues(in); for (int i = 0; i < results.size() && reader.hasNext(); i++) { Map<String, String> row = reader.next(); assertEquals(results.get(i).getName(), row.get("username")); assertEquals(search.getRight().stream().filter(user -> row.get("username").equals(user.getUsername())). findFirst().get().getStatus(), row.get("status")); switch (row.get("username")) { case "rossini": assertEquals(spec.getNullValue(), row.get("email")); assertTrue(row.get("loginDate").contains(spec.getArrayElementSeparator())); break; case "verdi": assertEquals("[email protected]", row.get("email")); assertEquals(spec.getNullValue(), row.get("loginDate")); break; case "bellini": assertEquals(spec.getNullValue(), row.get("email")); assertFalse(row.get("loginDate").contains(spec.getArrayElementSeparator())); break; default: break; } } }
Example #30
Source File: ReconciliationITCase.java From syncope with Apache License 2.0 | 4 votes |
@Test public void exportCSV() throws IOException { ReconciliationService service = adminClient.getService(ReconciliationService.class); Client client = WebClient.client(service); client.accept(RESTHeaders.TEXT_CSV); AnyQuery anyQuery = new AnyQuery.Builder().realm(SyncopeConstants.ROOT_REALM). fiql(SyncopeClient.getUserSearchConditionBuilder().is("username").equalTo("*ini").query()). page(1). size(1000). orderBy("username ASC"). build(); CSVPushSpec spec = new CSVPushSpec.Builder(AnyTypeKind.USER.name()).ignorePaging(true). field("username"). field("status"). plainAttr("firstname"). plainAttr("surname"). plainAttr("email"). plainAttr("loginDate"). build(); Response response = service.push(anyQuery, spec); assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); assertEquals( "attachment; filename=" + SyncopeConstants.MASTER_DOMAIN + ".csv", response.getHeaderString(HttpHeaders.CONTENT_DISPOSITION)); PagedResult<UserTO> users = userService.search(anyQuery); assertNotNull(users); MappingIterator<Map<String, String>> reader = new CsvMapper().readerFor(Map.class). with(CsvSchema.emptySchema().withHeader()).readValues((InputStream) response.getEntity()); int rows = 0; for (; reader.hasNext(); rows++) { Map<String, String> row = reader.next(); assertEquals(users.getResult().get(rows).getUsername(), row.get("username")); assertEquals(users.getResult().get(rows).getStatus(), row.get("status")); switch (row.get("username")) { case "rossini": assertEquals(spec.getNullValue(), row.get("email")); assertTrue(row.get("loginDate").contains(spec.getArrayElementSeparator())); break; case "verdi": assertEquals("[email protected]", row.get("email")); assertEquals(spec.getNullValue(), row.get("loginDate")); break; case "bellini": assertEquals(spec.getNullValue(), row.get("email")); assertFalse(row.get("loginDate").contains(spec.getArrayElementSeparator())); break; default: break; } } assertEquals(rows, users.getTotalCount()); }