com.fasterxml.jackson.dataformat.csv.CsvSchema Java Examples

The following examples show how to use com.fasterxml.jackson.dataformat.csv.CsvSchema. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CsvUtils.java    From spring-boot-rest-api-helpers with MIT License 11 votes vote down vote up
public static <T> List<T> read(Class<T> clazz, InputStream stream, boolean withHeaders, char separator) throws IOException {
    CsvMapper mapper = new CsvMapper();

    mapper.enable(CsvParser.Feature.TRIM_SPACES);
    mapper.enable(CsvParser.Feature.ALLOW_TRAILING_COMMA);
    mapper.enable(CsvParser.Feature.INSERT_NULLS_FOR_MISSING_COLUMNS);
    mapper.enable(CsvParser.Feature.SKIP_EMPTY_LINES);
    mapper.disable(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY);
    CsvSchema schema = mapper.schemaFor(clazz).withColumnReordering(true);
    ObjectReader reader;
    if (separator == '\t') {
        schema = schema.withColumnSeparator('\t');
    }
    else {
        schema = schema.withColumnSeparator(',');
    }
    if (withHeaders) {
        schema = schema.withHeader();
    }
    else {
        schema = schema.withoutHeader();
    }
    reader = mapper.readerFor(clazz).with(schema);
    return reader.<T>readValues(stream).readAll();
}
 
Example #2
Source File: CsvInputStreamMapper.java    From pocket-etl with Apache License 2.0 6 votes vote down vote up
/**
 * Converts an inputStream into an iterator of objects using Jackson CSV mapper.
 * @param inputStream inputStream in CSV format.
 * @return An iterator based on the inputStream.
 */
@Override
public Iterator<T> apply(InputStream inputStream) {
    try {
        CsvSchema csvSchema = mapper.schemaFor(objectClass);

        if (columnSeparator != null) {
            csvSchema = csvSchema.withColumnSeparator(columnSeparator);
        }
        
        ObjectReader reader = mapper.readerFor(objectClass).withFeatures(CsvParser.Feature.FAIL_ON_MISSING_COLUMNS)
                .with(csvSchema);

        return reader.readValues(inputStream);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}
 
Example #3
Source File: TimeRangeOHLCActionHandler.java    From ZStreamingQuote with MIT License 6 votes vote down vote up
/**
 * formatQuoteToCSV - convert quote to CSV
 * @param quote
 * @return CSV formatted Quote
 */
private String formatQuoteToCSV(OHLCquote quote){
	String csvData = null;
	CsvMapper mapper = new CsvMapper();
	CsvSchema schema = mapper.schemaFor(OHLCquote.class).withHeader().withColumnSeparator(',');
	
	try {
		csvData = mapper.writer(schema).writeValueAsString(quote);
	} catch (JsonProcessingException e) {
		System.out.println("TimeRangeOHLCActionHandler.formatQuoteToCSV(): ERROR: JsonProcessingException on quote");
		e.printStackTrace();
	}
	
	return csvData;
}
 
Example #4
Source File: TimeRangeStreamingQuoteActionHandler.java    From ZStreamingQuote with MIT License 6 votes vote down vote up
/**
 * formatQuoteListToCSV - convert quote list to CSV
 * 
 * @param quote list
 * @return CSV formatted Quote list
 */
private String formatQuoteListToCSV(List<StreamingQuote> quoteList) {
	String csvData = null;
	CsvMapper mapper = new CsvMapper();
	CsvSchema schema = null;
	
	if(quoteList.get(0).getClass() == StreamingQuoteModeLtp.class){
		schema = mapper.schemaFor(StreamingQuoteModeLtp.class).withHeader().withColumnSeparator(',');
	} else if(quoteList.get(0).getClass() == StreamingQuoteModeQuote.class){
		schema = mapper.schemaFor(StreamingQuoteModeQuote.class).withHeader().withColumnSeparator(',');
	} else if(quoteList.get(0).getClass() == StreamingQuoteModeFull.class){
		schema = mapper.schemaFor(StreamingQuoteModeFull.class).withHeader().withColumnSeparator(',');
	} else{
		System.out.println("TimeRangeStreamingQuoteActionHandler.formatQuoteListToCSV(): ERROR: Wrong POJO class to map");
	}

	try {
		csvData = mapper.writer(schema).writeValueAsString(quoteList);
	} catch (JsonProcessingException e) {
		System.out
				.println("TimeRangeStreamingQuoteActionHandler.formatQuoteListToCSV(): ERROR: JsonProcessingException on quote list !!!");
		e.printStackTrace();
	}

	return csvData;
}
 
Example #5
Source File: SystemController.java    From dhis2-core with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
@RequestMapping( value = { "/uid", "/id" }, method = RequestMethod.GET, produces = { "application/csv" } )
public void getUidCsv( @RequestParam( required = false, defaultValue = "1" ) Integer limit, HttpServletResponse response )
    throws IOException, InvalidTypeException
{
    limit = Math.min( limit, 10000 );

    CsvGenerator csvGenerator = CSV_FACTORY.createGenerator( response.getOutputStream() );

    CsvSchema.Builder schemaBuilder = CsvSchema.builder()
        .addColumn( "uid" )
        .setUseHeader( true );

    csvGenerator.setSchema( schemaBuilder.build() );

    for ( int i = 0; i < limit; i++ )
    {
        csvGenerator.writeStartObject();
        csvGenerator.writeStringField( "uid", CodeGenerator.generateUid() );
        csvGenerator.writeEndObject();
    }

    csvGenerator.flush();
}
 
Example #6
Source File: JacksonCsv.java    From zerocode with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {

        List<User> users = new ArrayList<>();
        User userHeader = new User("First", "Last", "Age");
        User user1 = new User("First name1", "Last Name1", null);
        User user2 = new User("First name2", "Last Name2", "22");
        users.add(userHeader);
        users.add(user1);
        users.add(user2);

        CsvSchema schema = CsvSchema.builder()
                .addColumn("firstName")
                .addColumn("lastName")
                .addColumn("age", CsvSchema.ColumnType.NUMBER)
                //.addColumn("comments")
                //.setUseHeader(true)
                //.setStrictHeaders(false)
                .build();

//        CsvSchema bootstrapSchema = CsvSchema.emptySchema().withHeader();
//        ObjectMapper mapper = new CsvMapper();

        CsvMapper mapper = new CsvMapper();
        mapper.enable(CsvParser.Feature.WRAP_AS_ARRAY);

        ObjectWriter writer = mapper.writer(schema.withLineSeparator("\n"));
        try {
            writer.writeValue(new File("target/ModifiedUsers.csv"), users);
        } catch (IOException e) {
            e.printStackTrace();
        }

    }
 
Example #7
Source File: UtilitiesTestBase.java    From hudi with Apache License 2.0 6 votes vote down vote up
/**
 * Converts the json records into CSV format and writes to a file.
 *
 * @param hasHeader  whether the CSV file should have a header line.
 * @param sep  the column separator to use.
 * @param lines  the records in JSON format.
 * @param fs  {@link FileSystem} instance.
 * @param targetPath  File path.
 * @throws IOException
 */
public static void saveCsvToDFS(
    boolean hasHeader, char sep,
    String[] lines, FileSystem fs, String targetPath) throws IOException {
  Builder csvSchemaBuilder = CsvSchema.builder();

  ArrayNode arrayNode = mapper.createArrayNode();
  Arrays.stream(lines).forEachOrdered(
      line -> {
        try {
          arrayNode.add(mapper.readValue(line, ObjectNode.class));
        } catch (IOException e) {
          throw new HoodieIOException(
              "Error converting json records into CSV format: " + e.getMessage());
        }
      });
  arrayNode.get(0).fieldNames().forEachRemaining(csvSchemaBuilder::addColumn);
  ObjectWriter csvObjWriter = new CsvMapper()
      .writerFor(JsonNode.class)
      .with(csvSchemaBuilder.setUseHeader(hasHeader).setColumnSeparator(sep).build());
  PrintStream os = new PrintStream(fs.create(new Path(targetPath), true));
  csvObjWriter.writeValue(os, arrayNode);
  os.flush();
  os.close();
}
 
Example #8
Source File: TruckEventsCsvConverter.java    From registry with Apache License 2.0 6 votes vote down vote up
private MappingIterator<TruckEvent> readTruckEventsFromCsv(InputStream csvStream) throws IOException {
        CsvSchema bootstrap = CsvSchema.builder()
// driverId,truckId,eventTime,eventType,longitude,latitude,eventKey,correlationId,driverName,routeId,routeName,eventDate
                .addColumn("driverId", CsvSchema.ColumnType.NUMBER)
                .addColumn("truckId", CsvSchema.ColumnType.NUMBER)
                .addColumn("eventTime", CsvSchema.ColumnType.STRING)
                .addColumn("eventType", CsvSchema.ColumnType.STRING)
                .addColumn("longitude", CsvSchema.ColumnType.NUMBER)
                .addColumn("latitude", CsvSchema.ColumnType.NUMBER)
                .addColumn("eventKey", CsvSchema.ColumnType.STRING)
                .addColumn("correlationId", CsvSchema.ColumnType.NUMBER)
                .addColumn("driverName", CsvSchema.ColumnType.STRING)
                .addColumn("routeId", CsvSchema.ColumnType.NUMBER)
                .addColumn("routeName", CsvSchema.ColumnType.STRING)
                .addColumn("eventDate", CsvSchema.ColumnType.STRING)
//                .addColumn("miles", CsvSchema.ColumnType.NUMBER)
                .build().withHeader();

        CsvMapper csvMapper = new CsvMapper();
        return csvMapper.readerFor(TruckEvent.class).with(bootstrap).readValues(csvStream);
    }
 
Example #9
Source File: CsvSchemaFactoryTest.java    From s3-inventory-usage-examples with Apache License 2.0 6 votes vote down vote up
@Test
public void buildCsvSchemaBuilderSuccess() {
    String testFileSchema = "Bucket, Key, Versionid";
    InventoryManifest testManifestStorage = new InventoryManifest();
    testManifestStorage.setFileSchema(testFileSchema);

    CsvSchema testCsvSchema = CsvSchemaFactory.buildSchema(testManifestStorage);
    CsvSchema expected = CsvSchema.builder()
            .addColumn("Bucket")
            .addColumn("Key")
            .addColumn("Versionid")
            .build();

    // Since there's no equal method built for CsvSchema class
    // Use samePropertyValuesAs to compare the values of two CsvSchema objects
    assertThat(testCsvSchema, samePropertyValuesAs(expected));
}
 
Example #10
Source File: AvroProducerDemo.java    From Kafka-Streams-Real-time-Stream-Processing with The Unlicense 6 votes vote down vote up
/**
 * private static method to read data from given dataFile
 *
 * @param dataFile data file name in resource folder
 * @return List of StockData Instance
 * @throws IOException, NullPointerException
 */
private static List<StockData> getStocks(String dataFile) throws IOException {
    File file = new File(dataFile);
    CsvSchema schema = CsvSchema.builder()
            .addColumn("symbol", CsvSchema.ColumnType.STRING)
            .addColumn("series", CsvSchema.ColumnType.STRING)
            .addColumn("open", CsvSchema.ColumnType.NUMBER)
            .addColumn("high", CsvSchema.ColumnType.NUMBER)
            .addColumn("low", CsvSchema.ColumnType.NUMBER)
            .addColumn("close", CsvSchema.ColumnType.NUMBER)
            .addColumn("last", CsvSchema.ColumnType.NUMBER)
            .addColumn("previousClose", CsvSchema.ColumnType.NUMBER)
            .addColumn("totalTradedQty", CsvSchema.ColumnType.NUMBER)
            .addColumn("totalTradedVal", CsvSchema.ColumnType.NUMBER)
            .addColumn("tradeDate", CsvSchema.ColumnType.STRING)
            .addColumn("totalTrades", CsvSchema.ColumnType.NUMBER)
            .addColumn("isinCode", CsvSchema.ColumnType.STRING)
            .build();
    MappingIterator<StockData> stockDataIterator = new CsvMapper().readerFor(StockData.class).with(schema).readValues(file);
    return stockDataIterator.readAll();
}
 
Example #11
Source File: NHANESSample.java    From synthea with Apache License 2.0 5 votes vote down vote up
/**
 * Load the NHANES samples from resources.
 * @return A list of samples.
 */
public static List<NHANESSample> loadSamples() {
  CsvMapper mapper = new CsvMapper();
  List<NHANESSample> samples = new LinkedList<NHANESSample>();
  CsvSchema schema = CsvSchema.emptySchema().withHeader();
  String filename = "nhanes_two_year_olds_bmi.csv";
  try {
    String rawCSV = Utilities.readResource(filename);
    MappingIterator<NHANESSample> it =
        mapper.readerFor(NHANESSample.class).with(schema).readValues(rawCSV);
    while (it.hasNextValue()) {
      samples.add(it.nextValue());
    }
  } catch (Exception e) {
    System.err.println("ERROR: unable to load CSV: " + filename);
    e.printStackTrace();
    throw new RuntimeException(e);
  }
  return samples;
}
 
Example #12
Source File: JsonCsvConverter.java    From tutorials with MIT License 5 votes vote down vote up
public static void JsonToFormattedCsv(File jsonFile, File csvFile) throws IOException {
    CsvMapper csvMapper = new CsvMapper();
    CsvSchema csvSchema = csvMapper
        .schemaFor(OrderLineForCsv.class)
        .withHeader();
    csvMapper.addMixIn(OrderLine.class, OrderLineForCsv.class);
    
    OrderLine[] orderLines = new ObjectMapper()
        .readValue(jsonFile, OrderLine[].class);
    csvMapper.writerFor(OrderLine[].class)
        .with(csvSchema)
        .writeValue(csvFile, orderLines);
}
 
Example #13
Source File: JsonCsvConverter.java    From tutorials with MIT License 5 votes vote down vote up
public static void csvToJson(File csvFile, File jsonFile) throws IOException {
    CsvSchema orderLineSchema = CsvSchema.emptySchema().withHeader();
    CsvMapper csvMapper = new CsvMapper();
    MappingIterator<OrderLine> orderLines = csvMapper.readerFor(OrderLine.class)
        .with(orderLineSchema)
        .readValues(csvFile);
    
    new ObjectMapper()
        .configure(SerializationFeature.INDENT_OUTPUT, true)
        .writeValue(jsonFile, orderLines.readAll());
}
 
Example #14
Source File: JsonCsvConverter.java    From tutorials with MIT License 5 votes vote down vote up
public static void JsonToCsv(File jsonFile, File csvFile) throws IOException {
    JsonNode jsonTree = new ObjectMapper().readTree(jsonFile);
    
    Builder csvSchemaBuilder = CsvSchema.builder();
    JsonNode firstObject = jsonTree.elements().next();
    firstObject.fieldNames().forEachRemaining(fieldName -> {csvSchemaBuilder.addColumn(fieldName);} );
    CsvSchema csvSchema = csvSchemaBuilder
        .build()
        .withHeader();
    
    CsvMapper csvMapper = new CsvMapper();
    csvMapper.writerFor(JsonNode.class)
        .with(csvSchema)
        .writeValue(csvFile, jsonTree);
}
 
Example #15
Source File: TCSV.java    From Llunatic with GNU General Public License v3.0 5 votes vote down vote up
public void test() throws IOException {
        CsvMapper mapper = new CsvMapper();
        mapper.enable(CsvParser.Feature.WRAP_AS_ARRAY);
        CsvSchema schema = CsvSchema.emptySchema();
//        schema = schema.withHeader();
//        schema = schema.withQuoteChar('\'');
//        File csvFile = new File("/Temp/llunatic/doctors/10k/doctor.csv");
        File csvFile = new File("/Users/donatello/Temp/chaseBench-workspace/LUBM/data/01k/src-emailAddress.csv");
        long start = new Date().getTime();
        MappingIterator<String[]> it = mapper.readerFor(String[].class).with(schema).readValues(csvFile);
        String[] row = it.next();
        System.out.println(Arrays.asList(row));
        long end = new Date().getTime();
        System.out.println("**** " + (end - start) + " ms");
//        while (it.hasNext()) {
//            String[] row = it.next();
//            System.out.println(Arrays.asList(row));
//        }
    }
 
Example #16
Source File: CSVStreamConnector.java    From syncope with Apache License 2.0 5 votes vote down vote up
public List<String> getColumns(final CSVPullSpec spec) throws IOException {
    List<String> fromSpec = new ArrayList<>();
    ((CsvSchema) reader().getParserSchema()).forEach(column -> {
        if (!spec.getIgnoreColumns().contains(column.getName())) {
            fromSpec.add(column.getName());
        }
    });
    return fromSpec;
}
 
Example #17
Source File: CSVStreamConnector.java    From syncope with Apache License 2.0 5 votes vote down vote up
public CSVStreamConnector(
        final String keyColumn,
        final String arrayElementsSeparator,
        final CsvSchema.Builder schemaBuilder,
        final InputStream in,
        final OutputStream out,
        final String... columns) {

    this.keyColumn = keyColumn;
    this.arrayElementsSeparator = arrayElementsSeparator;
    this.schemaBuilder = schemaBuilder;
    this.in = in;
    this.out = out;
    this.columns = List.of(columns);
}
 
Example #18
Source File: ReconciliationLogic.java    From syncope with Apache License 2.0 5 votes vote down vote up
private CsvSchema.Builder csvSchema(final AbstractCSVSpec spec) {
    CsvSchema.Builder schemaBuilder = new CsvSchema.Builder().setUseHeader(true).
            setColumnSeparator(spec.getColumnSeparator()).
            setArrayElementSeparator(spec.getArrayElementSeparator()).
            setQuoteChar(spec.getQuoteChar()).
            setLineSeparator(spec.getLineSeparator()).
            setNullValue(spec.getNullValue()).
            setAllowComments(spec.isAllowComments());
    if (spec.getEscapeChar() != null) {
        schemaBuilder.setEscapeChar(spec.getEscapeChar());
    }
    return schemaBuilder;
}
 
Example #19
Source File: JacksonCSVSplitter.java    From java-client-api with Apache License 2.0 5 votes vote down vote up
private ObjectReader configureObjReader() {
    this.count=0;
    CsvSchema firstLineSchema = getCsvSchema()!=null? getCsvSchema():CsvSchema.emptySchema().withHeader();
    CsvMapper csvMapper = getCsvMapper()!=null ? getCsvMapper() : configureCsvMapper();
    ObjectReader objectReader = csvMapper.readerFor(JsonNode.class);
    
    return objectReader.with(firstLineSchema);
}
 
Example #20
Source File: CsvStringSerializer.java    From pocket-etl with Apache License 2.0 5 votes vote down vote up
private void createObjectWriter() {
    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
    mapper.registerModule(new JodaModule());
    mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
    CsvSchema schema = mapper.schemaFor(classToSerialize).withColumnSeparator(getColumnSeparator()).withoutQuoteChar();
    writer = mapper.writer(schema);

    if (getWriteHeaderRow()) {
        schema = schema.withHeader();
    }

    firstRowWriter = mapper.writer(schema);
}
 
Example #21
Source File: OffsetSerDe.java    From mirus with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
private static OffsetSerDe csvOffsetSerDe() {
  CsvMapper csvMapper =
      new CsvMapper().configure(CsvGenerator.Feature.STRICT_CHECK_FOR_QUOTING, true);
  CsvSchema schema = csvMapper.schemaFor(OffsetInfo.class).withLineSeparator("");
  return new OffsetSerDe(
      csvMapper.writer(schema), csvMapper.reader(schema).forType(OffsetInfo.class));
}
 
Example #22
Source File: CsvReader.java    From graphql-java-demo with MIT License 5 votes vote down vote up
public static <T> List<T> loadObjectList(Class<T> type, String fileName) throws IOException {
    CsvSchema bootstrapSchema = CsvSchema.emptySchema().withHeader();

    CsvMapper mapper = new CsvMapper();
    InputStream is = new ClassPathResource(fileName).getInputStream();
    MappingIterator<T> readValues = mapper.readerFor(type).with(bootstrapSchema).readValues(is);
    return readValues.readAll();
}
 
Example #23
Source File: CsvView.java    From spring-boot-doma2-sample with Apache License 2.0 5 votes vote down vote up
@Override
protected final void renderMergedOutputModel(Map<String, Object> model, HttpServletRequest request,
        HttpServletResponse response) throws Exception {

    // ファイル名に日本語を含めても文字化けしないようにUTF-8にエンコードする
    val encodedFilename = EncodeUtils.encodeUtf8(filename);
    val contentDisposition = String.format("attachment; filename*=UTF-8''%s", encodedFilename);

    response.setHeader(CONTENT_TYPE, getContentType());
    response.setHeader(CONTENT_DISPOSITION, contentDisposition);

    // CSVヘッダをオブジェクトから作成する
    CsvSchema schema = csvMapper.schemaFor(clazz).withHeader();

    if (isNotEmpty(columns)) {
        // カラムが指定された場合は、スキーマを再構築する
        val builder = schema.rebuild().clearColumns();
        for (String column : columns) {
            builder.addColumn(column);
        }
        schema = builder.build();
    }

    // 書き出し
    val outputStream = createTemporaryOutputStream();
    try (Writer writer = new OutputStreamWriter(outputStream, "Windows-31J")) {
        csvMapper.writer(schema).writeValue(writer, data);
    }
}
 
Example #24
Source File: SimpleCSV.java    From synthea with Apache License 2.0 5 votes vote down vote up
/**
 * Parse the data from the given CSV file into a List of Maps, where the key is the
 * column name. Uses a LinkedHashMap specifically to ensure the order of columns is preserved in
 * the resulting maps.
 * 
 * @param csvData
 *          Raw CSV data
 * @return parsed data
 * @throws IOException
 *           if any exception occurs while parsing the data
 */
public static List<LinkedHashMap<String, String>> parse(String csvData) throws IOException {
  // Read schema from the first line; start with bootstrap instance
  // to enable reading of schema from the first line
  // NOTE: reads schema and uses it for binding
  CsvMapper mapper = new CsvMapper();
  // use first row as header; otherwise defaults are fine
  CsvSchema schema = CsvSchema.emptySchema().withHeader();

  MappingIterator<LinkedHashMap<String, String>> it = mapper.readerFor(LinkedHashMap.class)
      .with(schema).readValues(csvData);

  return it.readAll();
}
 
Example #25
Source File: FileTargetProcessorSubmitIssues.java    From FortifyBugTrackerUtility with MIT License 5 votes vote down vote up
@Override
protected boolean processMaps(Context context, String groupName, List<Object> currentGroup, List<LinkedHashMap<String, Object>> listOfMaps) {
	CsvSchema.Builder schemaBuilder = CsvSchema.builder();
    for (String col : getFields().keySet()) {
           schemaBuilder.addColumn(col);
       }
    CsvSchema schema = schemaBuilder.build().withHeader();
    try {
		new CsvMapper().writer(schema).writeValue(new File(groupName), listOfMaps);
	} catch (Exception e) {
		throw new RuntimeException("Error writing data to file "+groupName, e);
	}
	LOG.info(String.format("[File] Submitted %d vulnerabilities to %s", currentGroup.size(), groupName));
	return true;
}
 
Example #26
Source File: CsvSchemaFactory.java    From s3-inventory-usage-examples with Apache License 2.0 5 votes vote down vote up
/**
 * Build a CSV schema according to the content of the fileSchema in the manifest file
 * @param inventoryManifest the original manifest of the inventory report
 * @return CsvSchema
 */
public static CsvSchema buildSchema(InventoryManifest inventoryManifest){
    List<String> columnList = Arrays.asList(
            inventoryManifest.getFileSchema().split("\\s*,\\s*"));
    CsvSchema.Builder schemaBuilder = new CsvSchema.Builder();
    for (String eachColumn: columnList) {
        schemaBuilder.addColumn(eachColumn);
    }
    return schemaBuilder.build();
}
 
Example #27
Source File: ZeroCodeReportGeneratorImpl.java    From zerocode with Apache License 2.0 5 votes vote down vote up
public void generateCsvReport(List<ZeroCodeCsvReport> zeroCodeCsvReportRows) {

        /*
         * Write to a CSV file
         */
        CsvSchema schema = CsvSchema.builder()
                .setUseHeader(true)
                .addColumn("scenarioName")
                .addColumn("scenarioLoop", CsvSchema.ColumnType.NUMBER)
                .addColumn("stepName")
                .addColumn("stepLoop", CsvSchema.ColumnType.NUMBER)
                .addColumn("correlationId")
                .addColumn("requestTimeStamp")
                .addColumn("responseDelayMilliSec", CsvSchema.ColumnType.NUMBER)
                .addColumn("responseTimeStamp")
                .addColumn("result")
                .addColumn("method")
                .build();

        CsvMapper csvMapper = new CsvMapper();
        csvMapper.enable(CsvParser.Feature.WRAP_AS_ARRAY);

        ObjectWriter writer = csvMapper.writer(schema.withLineSeparator("\n"));
        try {
            writer.writeValue(
                    new File(TARGET_FULL_REPORT_DIR +
                            TARGET_FULL_REPORT_CSV_FILE_NAME
                            //"_" +
                            //LocalDateTime.now().toString().replace(":", "-") +
                            //".csv"
                    ),
                    zeroCodeCsvReportRows);

        } catch (IOException e) {
            e.printStackTrace();
            throw new RuntimeException("Exception while Writing full CSV report. Details: " + e);
        }
    }
 
Example #28
Source File: CsvNodeSerializer.java    From dhis2-core with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
@Override
protected void startSerialize( RootNode rootNode, OutputStream outputStream ) throws Exception
{
    csvGenerator = CSV_FACTORY.createGenerator( outputStream );

    CsvSchema.Builder schemaBuilder = CsvSchema.builder()
        .setUseHeader( true );

    // build schema
    for ( Node child : rootNode.getChildren() )
    {
        if ( child.isCollection() )
        {
            if ( !child.getChildren().isEmpty() )
            {
                Node node = child.getChildren().get( 0 );

                for ( Node property : node.getChildren() )
                {
                    if ( property.isSimple() )
                    {
                        schemaBuilder.addColumn( property.getName() );
                    }
                }
            }
        }
    }

    csvGenerator.setSchema( schemaBuilder.build() );
}
 
Example #29
Source File: SPARQLManagerTest.java    From java-client-api with Apache License 2.0 4 votes vote down vote up
private MappingIterator<Map<String,String>> parseCsv(String csv) throws JsonProcessingException, IOException {
  return new CsvMapper().reader(Map.class)
    .with(CsvSchema.emptySchema().withHeader()) // use first row as header
    .readValues(csv);
}
 
Example #30
Source File: JacksonDatabindTest.java    From java-client-api with Apache License 2.0 4 votes vote down vote up
/** Demonstrate using Jackson's CSV mapper directly to simplify reading in data, populating a
 * third-party pojo (one we cannot annotate) then writing it out
 * via JacksonDatabindHandle with configuration provided by mix-in annotations.
 **/
@Test
public void testDatabindingThirdPartyPojoWithMixinAnnotations() throws JsonProcessingException, IOException {
  CsvSchema schema = CsvSchema.builder()
    .setColumnSeparator('\t')
    .addColumn("geoNameId")
    .addColumn("name")
    .addColumn("asciiName")
    .addColumn("alternateNames")
    .addColumn("latitude", CsvSchema.ColumnType.NUMBER)
    .addColumn("longitude", CsvSchema.ColumnType.NUMBER)
    .addColumn("featureClass")
    .addColumn("featureCode")
    .addColumn("countryCode")
    .addColumn("countryCode2")
    .addColumn("adminCode1")
    .addColumn("adminCode2")
    .addColumn("adminCode3")
    .addColumn("adminCode4")
    .addColumn("population")
    .addColumn("elevation", CsvSchema.ColumnType.NUMBER)
    .addColumn("dem", CsvSchema.ColumnType.NUMBER)
    .addColumn("timezoneCode")
    .addColumn("lastModified")
    .build();
  CsvMapper mapper = new CsvMapper();
  mapper.addMixInAnnotations(Toponym.class, ToponymMixIn1.class);
  ObjectReader reader = mapper.reader(Toponym.class).with(schema);
  try (BufferedReader cityReader = new BufferedReader(Common.testFileToReader(CITIES_FILE))) {
    GenericDocumentManager docMgr = client.newDocumentManager();
    DocumentWriteSet set = docMgr.newWriteSet();
    String line = null;
    for (int numWritten = 0; numWritten < MAX_TO_WRITE && (line = cityReader.readLine()) != null; numWritten++ ) {
      Toponym city = reader.readValue(line);
      JacksonDatabindHandle handle = new JacksonDatabindHandle(city);
      handle.getMapper().addMixInAnnotations(Toponym.class, ToponymMixIn2.class);
      set.add(DIRECTORY + "/thirdPartyJsonCities/" + city.getGeoNameId() + ".json", handle);
    }   docMgr.write(set);
    // we can add assertions later, for now this test just serves as example code and
    // ensures no exceptions are thrown
  }
}