Java Code Examples for com.fasterxml.jackson.databind.MappingIterator#readAll()
The following examples show how to use
com.fasterxml.jackson.databind.MappingIterator#readAll() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AvroProducerDemo.java From Kafka-Streams-Real-time-Stream-Processing with The Unlicense | 6 votes |
/** * private static method to read data from given dataFile * * @param dataFile data file name in resource folder * @return List of StockData Instance * @throws IOException, NullPointerException */ private static List<StockData> getStocks(String dataFile) throws IOException { File file = new File(dataFile); CsvSchema schema = CsvSchema.builder() .addColumn("symbol", CsvSchema.ColumnType.STRING) .addColumn("series", CsvSchema.ColumnType.STRING) .addColumn("open", CsvSchema.ColumnType.NUMBER) .addColumn("high", CsvSchema.ColumnType.NUMBER) .addColumn("low", CsvSchema.ColumnType.NUMBER) .addColumn("close", CsvSchema.ColumnType.NUMBER) .addColumn("last", CsvSchema.ColumnType.NUMBER) .addColumn("previousClose", CsvSchema.ColumnType.NUMBER) .addColumn("totalTradedQty", CsvSchema.ColumnType.NUMBER) .addColumn("totalTradedVal", CsvSchema.ColumnType.NUMBER) .addColumn("tradeDate", CsvSchema.ColumnType.STRING) .addColumn("totalTrades", CsvSchema.ColumnType.NUMBER) .addColumn("isinCode", CsvSchema.ColumnType.STRING) .build(); MappingIterator<StockData> stockDataIterator = new CsvMapper().readerFor(StockData.class).with(schema).readValues(file); return stockDataIterator.readAll(); }
Example 2
Source File: FilePipelineStateStore.java From datacollector with Apache License 2.0 | 6 votes |
@Override public List<PipelineState> getHistory(String pipelineName, String rev, boolean fromBeginning) throws PipelineStoreException { if (!pipelineDirExists(pipelineName, rev) || !pipelineStateHistoryFileExists(pipelineName, rev)) { return Collections.emptyList(); } try (Reader reader = new FileReader(getPipelineStateHistoryFile(pipelineName, rev))){ ObjectMapper objectMapper = ObjectMapperFactory.get(); JsonParser jsonParser = objectMapper.getFactory().createParser(reader); MappingIterator<PipelineStateJson> pipelineStateMappingIterator = objectMapper.readValues(jsonParser, PipelineStateJson.class); List<PipelineStateJson> pipelineStateJsons = pipelineStateMappingIterator.readAll(); Collections.reverse(pipelineStateJsons); if (fromBeginning) { return BeanHelper.unwrapPipelineStatesNewAPI(pipelineStateJsons); } else { int toIndex = pipelineStateJsons.size() > 100 ? 100 : pipelineStateJsons.size(); return BeanHelper.unwrapPipelineStatesNewAPI(pipelineStateJsons.subList(0, toIndex)); } } catch (IOException e) { throw new PipelineStoreException(ContainerError.CONTAINER_0115, pipelineName, rev, e.toString(), e); } }
Example 3
Source File: CsvReader.java From graphql-java-demo with MIT License | 5 votes |
public static <T> List<T> loadObjectList(Class<T> type, String fileName) throws IOException { CsvSchema bootstrapSchema = CsvSchema.emptySchema().withHeader(); CsvMapper mapper = new CsvMapper(); InputStream is = new ClassPathResource(fileName).getInputStream(); MappingIterator<T> readValues = mapper.readerFor(type).with(bootstrapSchema).readValues(is); return readValues.readAll(); }
Example 4
Source File: InventoryReportLineMapper.java From s3-inventory-usage-examples with Apache License 2.0 | 5 votes |
/** * Map each line of the inventory report into a POJO * @return List<InventoryReportLine> which is a list of POJOs * @throws IOException when mapping with schema fails */ public List<InventoryReportLine> mapInventoryReportLine(List<String> inventoryReportLine) throws IOException{ CsvMapper mapper = new CsvMapper(); List<InventoryReportLine> inventoryReportLines = new ArrayList(); for (String eachLine : inventoryReportLine) { MappingIterator<InventoryReportLine> iterator = mapper.readerFor(InventoryReportLine.class).with(schema).readValues(eachLine); List<InventoryReportLine> rowValue = iterator.readAll(); inventoryReportLines.add(rowValue.get(0)); } return inventoryReportLines; }
Example 5
Source File: SimpleCSV.java From synthea with Apache License 2.0 | 5 votes |
/** * Parse the data from the given CSV file into a List of Maps, where the key is the * column name. Uses a LinkedHashMap specifically to ensure the order of columns is preserved in * the resulting maps. * * @param csvData * Raw CSV data * @return parsed data * @throws IOException * if any exception occurs while parsing the data */ public static List<LinkedHashMap<String, String>> parse(String csvData) throws IOException { // Read schema from the first line; start with bootstrap instance // to enable reading of schema from the first line // NOTE: reads schema and uses it for binding CsvMapper mapper = new CsvMapper(); // use first row as header; otherwise defaults are fine CsvSchema schema = CsvSchema.emptySchema().withHeader(); MappingIterator<LinkedHashMap<String, String>> it = mapper.readerFor(LinkedHashMap.class) .with(schema).readValues(csvData); return it.readAll(); }
Example 6
Source File: TruckEventsCsvConverter.java From registry with Apache License 2.0 | 5 votes |
public List<String> convertToJsonRecords(InputStream payloadStream, int limit) throws Exception { MappingIterator<TruckEvent> csvTruckEvents = readTruckEventsFromCsv(payloadStream); List<String> jsons = new ArrayList<>(); int ct = 0; for (TruckEvent truckEvent : csvTruckEvents.readAll()) { truckEvent.setMiles((long) new Random().nextInt(500)); String json = new ObjectMapper().writeValueAsString(truckEvent); jsons.add(json); if (++ct == limit) { break; } } return jsons; }
Example 7
Source File: TruckEventsCsvConverter.java From registry with Apache License 2.0 | 5 votes |
public void convertToJsonRecords(String payloadFile, String outputFileName) throws IOException { try (InputStream csvStream = new FileInputStream(payloadFile); FileWriter fos = new FileWriter(outputFileName)) { MappingIterator<TruckEvent> csvTruckEvents = readTruckEventsFromCsv(csvStream); for (TruckEvent truckEvent : csvTruckEvents.readAll()) { truckEvent.setMiles((long) new Random().nextInt(500)); String output = new ObjectMapper().writeValueAsString(truckEvent); fos.write(output); fos.write(System.lineSeparator()); } } }
Example 8
Source File: APIClientTest.java From data-prep with Apache License 2.0 | 5 votes |
public Folder getFolderByPath(String path) throws IOException { InputStream inputStream = with().queryParam("path", path).get("/api/folders/search").asInputStream(); MappingIterator<Folder> foldersIterator = mapper.readerFor(Folder.class).readValues(inputStream); List<Folder> folders = foldersIterator.readAll(); assertTrue(folders.size() == 1); return folders.iterator().next(); }
Example 9
Source File: CSVLineParser.java From crate with Apache License 2.0 | 5 votes |
public void parseHeader(String header) throws IOException { MappingIterator<String> iterator = csvReader.readValues(header.getBytes(StandardCharsets.UTF_8)); iterator.readAll(keyList); HashSet<String> keySet = new HashSet<>(keyList); keySet.remove(""); if (keySet.size() != keyList.size() || keySet.size() == 0) { throw new IllegalArgumentException("Invalid header: duplicate entries or no entries present"); } }
Example 10
Source File: CliMain.java From styx with Apache License 2.0 | 4 votes |
private void workflowCreate() throws IOException, ExecutionException, InterruptedException { final String component = namespace.getString(parser.workflowCreateComponentId.getDest()); final File file = namespace.get(parser.workflowCreateFile.getDest()); final ObjectReader workflowReader = Json.YAML_MAPPER.reader() .forType(WorkflowConfiguration.class); final MappingIterator<WorkflowConfiguration> iterator; if (file == null || file.getName().equals("-")) { iterator = workflowReader.readValues(System.in); } else { iterator = workflowReader.readValues(file); } final List<WorkflowConfiguration> configurations; try { configurations = iterator.readAll(); } catch (IOException e) { throw createInputErrorException(e); } boolean invalid = false; for (WorkflowConfiguration configuration : configurations) { var workflow = Workflow.create(component, configuration); var errors = cliContext.workflowValidator().validateWorkflow(workflow); if (!errors.isEmpty()) { cliOutput.printError("Invalid workflow configuration: " + configuration.id()); errors.forEach(error -> cliOutput.printError(" error: " + error)); invalid = true; } } if (invalid) { throw CliExitException.of(ExitStatus.ArgumentError); } final List<CompletionStage<Workflow>> futures = configurations.stream() .map(configuration -> styxClient.createOrUpdateWorkflow(component, configuration)) .collect(toList()); for (CompletionStage<Workflow> future : futures) { final Workflow created = future.toCompletableFuture().get(); cliOutput.printMessage("Workflow " + created.workflowId() + " in component " + created.componentId() + " created."); } }
Example 11
Source File: JsonProducerDemo.java From Kafka-Streams-Real-time-Stream-Processing with The Unlicense | 3 votes |
/** * private static method to read data from given dataFile * * @param dataFile data file name in resource folder * @return List of StockData Instance * @throws IOException, NullPointerException */ private static List<StockData> getStocks(String dataFile) throws IOException { File file = new File(dataFile); MappingIterator<StockData> stockDataIterator = new CsvMapper().readerWithTypedSchemaFor(StockData.class).readValues(file); return stockDataIterator.readAll(); }
Example 12
Source File: TransactionalProducer.java From Kafka-Streams-Real-time-Stream-Processing with The Unlicense | 3 votes |
/** * private static method to read data from given dataFile * * @param dataFile data file name in resource folder * @return List of StockData Instance * @throws IOException, NullPointerException */ private static List<StockData> getStocks(String dataFile) throws IOException, NullPointerException { File file = new File(dataFile); MappingIterator<StockData> stockDataIterator = new CsvMapper().readerWithTypedSchemaFor(StockData.class).readValues(file); return stockDataIterator.readAll(); }