Java Code Examples for com.fasterxml.jackson.databind.MappingIterator#hasNext()
The following examples show how to use
com.fasterxml.jackson.databind.MappingIterator#hasNext() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ApplicationReporter.java From cf-butler with Apache License 2.0 | 5 votes |
protected AppUsageReport readAppUsageReport(String filename) throws JsonParseException, JsonMappingException, IOException { String content = readFile(filename); if (filename.endsWith(".json")) { return mapper.readValue(content, AppUsageReport.class); } else if (filename.endsWith(".csv")) { CsvMapper csvMapper = new CsvMapper(); csvMapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); File csvFile = new File(filename); MappingIterator<String[]> it = csvMapper.readerFor(String[].class).readValues(csvFile); AppUsageReportBuilder builder = AppUsageReport.builder(); List<AppUsageMonthly> reports = new ArrayList<>(); int rowNum = 0; while (it.hasNext()) { String[] row = it.next(); if (rowNum > 0) { AppUsageMonthlyBuilder amb = AppUsageMonthly.builder(); for (int i = 0; i < row.length; i++) { if (i == 0) { String[] period = row[i].split("-"); if (period.length == 2) { amb.month(Integer.valueOf(period[1])); } amb.year(Integer.valueOf(period[0])); } if (i == 1) { amb.averageAppInstances(Double.valueOf(row[i])); } if (i == 2) { amb.maximumAppInstances(Integer.valueOf(row[i])); } if (i == 3) { amb.appInstanceHours(Double.valueOf(row[i])); } } reports.add(amb.build()); } rowNum++; } builder.monthlyReports(reports); return builder.build(); } else { return AppUsageReport.builder().build(); } }
Example 2
Source File: Models.java From fabric8-forge with Apache License 2.0 | 5 votes |
/** * Saves the json object to the given file */ public static <T> List<T> loadJsonValues(File json, Class<T> clazz) throws IOException { List<T> answer = new ArrayList<>(); if (json.exists() && json.isFile()) { MappingIterator<T> iter = objectMapper.readerFor(clazz).readValues(json); while (iter.hasNext()) { answer.add(iter.next()); } } return answer; }
Example 3
Source File: Funktions.java From funktion-connectors with Apache License 2.0 | 5 votes |
static <T> List<T> parseYamlValues(File file, Class<T> clazz) throws IOException { ObjectMapper mapper = createObjectMapper(); MappingIterator<T> iter = mapper.readerFor(clazz).readValues(file); List<T> answer = new ArrayList<>(); while (iter.hasNext()) { answer.add(iter.next()); } return answer; }
Example 4
Source File: MappingEventReader.java From fahrschein with Apache License 2.0 | 5 votes |
@Override public List<T> read(JsonParser jsonParser) throws IOException { expectToken(jsonParser, JsonToken.START_ARRAY); jsonParser.clearCurrentToken(); final MappingIterator<T> eventIterator = eventReader.readValues(jsonParser); final List<T> events = new ArrayList<>(); while (true) { try { // MappingIterator#hasNext can theoretically also throw RuntimeExceptions, that's why we use this strange loop structure if (eventIterator.hasNext()) { events.add(eventClass.cast(eventIterator.next())); } else { break; } } catch (RuntimeException e) { final Throwable cause = e.getCause(); if (cause instanceof JsonMappingException) { onMappingException((JsonMappingException) cause); } else if (cause instanceof IOException) { throw (IOException)cause; } else { throw e; } } } return events; }
Example 5
Source File: DAOCSVRepair.java From BART with MIT License | 5 votes |
public Map<String, Repair> loadRepairMap(String fileName) throws DAOException { Map<String, Repair> result = new HashMap<String, Repair>(); try { BufferedReader reader = utility.getBufferedReader(fileName); MappingIterator<String[]> it = getMappingIterator(reader); while (it.hasNext()) { String[] tokens = it.next(); if (tokens.length == 0 || tokens[0].startsWith("+++++++++++++++")) { continue; } String tid = tokens[0]; String oldValue; String newValue; if (tokens.length == 2) { oldValue = null; newValue = tokens[1]; } else if (tokens.length == 3) { oldValue = tokens[1]; newValue = tokens[2]; } else { throw new DAOException("Malformed file " + fileName + ".\nCSV file must contains at least two column (cell, newValue)"); } Repair repair = new Repair(tid, oldValue, newValue); result.put(repair.getCellId(), repair); } } catch (IOException exception) { throw new DAOException("Unable to load file: " + fileName + "\n" + exception); } return result; }
Example 6
Source File: SPARQLManagerTest.java From java-client-api with Apache License 2.0 | 5 votes |
private int countLines(MappingIterator<?> iter) { int numLines = 0; while (iter.hasNext()) { iter.next(); numLines++; } return numLines; }
Example 7
Source File: GitHubAuthFilter.java From para with Apache License 2.0 | 5 votes |
private String fetchUserEmail(Integer githubId, String accessToken) { HttpGet emailsGet = new HttpGet(PROFILE_URL + "/emails"); emailsGet.setHeader(HttpHeaders.AUTHORIZATION, "token " + accessToken); emailsGet.setHeader(HttpHeaders.ACCEPT, "application/json"); String defaultEmail = githubId + "@github.com"; try (CloseableHttpResponse resp = httpclient.execute(emailsGet)) { HttpEntity respEntity = resp.getEntity(); if (respEntity != null) { try (InputStream is = respEntity.getContent()) { MappingIterator<Map<String, Object>> emails = jreader.readValues(is); if (emails != null) { String email = null; while (emails.hasNext()) { Map<String, Object> next = emails.next(); email = (String) next.get("email"); if (next.containsKey("primary") && (Boolean) next.get("primary")) { break; } } return email; } } EntityUtils.consumeQuietly(respEntity); } } catch (IOException e) { logger.warn("Failed to fetch user email from GitHub, using default: " + defaultEmail); } return defaultEmail; }
Example 8
Source File: TestClient.java From Cardshifter with Apache License 2.0 | 5 votes |
private void listen() { try { MappingIterator<Message> values = mapper.readValues(new JsonFactory().createParser(in), Message.class); while (values.hasNext()) { Message msg = values.next(); System.out.println("Incoming message " + msg); messages.offer(msg); } } catch (IOException e) { e.printStackTrace(); } }
Example 9
Source File: CSVLineParser.java From crate with Apache License 2.0 | 5 votes |
public byte[] parse(String row) throws IOException { MappingIterator<Object> iterator = csvReader.readValues(row.getBytes(StandardCharsets.UTF_8)); out.reset(); XContentBuilder jsonBuilder = new XContentBuilder(JsonXContent.JSON_XCONTENT, out).startObject(); int i = 0; while (iterator.hasNext()) { if (i >= keyList.size()) { throw new IllegalArgumentException("Number of values exceeds number of keys"); } jsonBuilder.field(keyList.get(i), iterator.next()); i++; } jsonBuilder.endObject().close(); return out.toByteArray(); }
Example 10
Source File: ServiceInstanceReporter.java From cf-butler with Apache License 2.0 | 4 votes |
protected List<NormalizedServicePlanMonthlyUsage> readServiceUsageReport(String filename) throws JsonParseException, JsonMappingException, IOException { String content = readFile(filename); if (filename.endsWith(".json")) { return NormalizedServicePlanMonthlyUsage.listOf(mapper.readValue(content, ServiceUsageReport.class)); } else if (filename.endsWith(".csv")) { CsvMapper csvMapper = new CsvMapper(); csvMapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); File csvFile = new File(filename); MappingIterator<String[]> it = csvMapper.readerFor(String[].class).readValues(csvFile); List<NormalizedServicePlanMonthlyUsage> reports = new ArrayList<>(); int rowNum = 0; while (it.hasNext()) { String[] row = it.next(); if (rowNum > 0) { NormalizedServicePlanMonthlyUsageBuilder amb = NormalizedServicePlanMonthlyUsage.builder(); for (int i = 0; i < row.length; i++) { if (i == 0) { String[] period = row[i].split("-"); if (period.length == 2) { amb.month(Integer.valueOf(period[1])); } amb.year(Integer.valueOf(period[0])); } if (i == 1) { amb.serviceName(row[i]); } if (i == 2) { amb.serviceGuid(row[i]); } if (i == 3) { amb.servicePlanName(row[i]); } if (i == 4) { amb.servicePlanGuid(row[i]); } if (i == 5) { amb.averageInstances(Double.valueOf(row[i])); } if (i == 6) { amb.maximumInstances(Integer.valueOf(row[i])); } if (i == 7) { amb.durationInHours(Double.valueOf(row[i])); } } NormalizedServicePlanMonthlyUsage usage = amb.build(); if (StringUtils.isNotBlank(usage.getServicePlanGuid())) { reports.add(usage); } } rowNum++; } return reports; } else { return NormalizedServicePlanMonthlyUsage.listOf(ServiceUsageReport.builder().build()); } }
Example 11
Source File: BulkResultDeserializer.java From log4j2-elasticsearch with Apache License 2.0 | 4 votes |
@Override public Object deserialize(JsonParser p, DeserializationContext ctxt) throws IOException, JsonProcessingException { int took = 0; boolean errors = false; BulkError error = null; int status = -1; List<BulkResultItem> items = null; String fieldName; while ((fieldName = p.nextFieldName()) != null) { switch (fieldName) { case "took": { took = p.nextIntValue(-1); break; } case "errors": { errors = p.nextBooleanValue(); break; } case "status": { status = p.nextIntValue(-1); break; } case "error": { p.nextValue(); // skip to START_OBJECT or VALUE_NULL JsonDeserializer<Object> typeDeserializer = ctxt.findNonContextualValueDeserializer(ctxt.constructType(BulkError.class)); error = (BulkError) typeDeserializer.deserialize(p, ctxt); break; } case "items": { if (errors) { items = new ArrayList<>(); p.nextValue(); // skip to START_ARRAY p.nextValue(); // skip to START_OBJECT ObjectMapper mapper = (ObjectMapper) p.getCodec(); MappingIterator<BulkResultItem> bulkResultItemMappingIterator = mapper.readValues(p, BulkResultItem.class); while (bulkResultItemMappingIterator.hasNext()) { items.add(bulkResultItemMappingIterator.next()); } } break; } } } return new BufferedBulkResult(took, errors, error, status, items); }
Example 12
Source File: BatchResultDeserializer.java From log4j2-elasticsearch with Apache License 2.0 | 4 votes |
@Override public Object deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { int took = 0; boolean errors = false; Error error = null; int status = -1; List<BatchItemResult> items = null; String fieldName; while ((fieldName = p.nextFieldName()) != null) { switch (fieldName) { case "took": { took = p.nextIntValue(-1); break; } case "errors": { errors = p.nextBooleanValue(); break; } case "status": { status = p.nextIntValue(-1); break; } case "error": { p.nextValue(); // skip to START_OBJECT or VALUE_NULL JsonDeserializer<Object> typeDeserializer = ctxt.findNonContextualValueDeserializer(ctxt.constructType(Error.class)); error = (Error) typeDeserializer.deserialize(p, ctxt); break; } case "items": { if (errors) { items = new ArrayList<>(); p.nextValue(); // skip to START_ARRAY p.nextValue(); // skip to START_OBJECT ObjectMapper mapper = (ObjectMapper) p.getCodec(); MappingIterator<BatchItemResult> batchResultItemMappingIterator = mapper.readValues(p, BatchItemResult.class); while (batchResultItemMappingIterator.hasNext()) { items.add(batchResultItemMappingIterator.next()); } } break; } } } return new BatchResult(took, errors, error, status, items); }
Example 13
Source File: DefaultCsvEventService.java From dhis2-core with BSD 3-Clause "New" or "Revised" License | 4 votes |
@Override public Events readEvents( InputStream inputStream, boolean skipFirst ) throws IOException, ParseException { Events events = new Events(); ObjectReader reader = CSV_MAPPER.readerFor( CsvEventDataValue.class ) .with( CSV_SCHEMA.withSkipFirstDataRow( skipFirst ) ); MappingIterator<CsvEventDataValue> iterator = reader.readValues( inputStream ); Event event = new Event(); event.setEvent( "not_valid" ); while ( iterator.hasNext() ) { CsvEventDataValue dataValue = iterator.next(); if ( !event.getEvent().equals( dataValue.getEvent() ) ) { event = new Event(); event.setEvent( dataValue.getEvent() ); event.setStatus( StringUtils.isEmpty( dataValue.getStatus() ) ? EventStatus.ACTIVE : Enum.valueOf( EventStatus.class, dataValue.getStatus() ) ); event.setProgram( dataValue.getProgram() ); event.setProgramStage( dataValue.getProgramStage() ); event.setEnrollment( dataValue.getEnrollment() ); event.setOrgUnit( dataValue.getOrgUnit() ); event.setEventDate( dataValue.getEventDate() ); event.setDueDate( dataValue.getDueDate() ); event.setCompletedDate( dataValue.getCompletedDate() ); event.setCompletedBy( dataValue.getCompletedBy() ); if ( dataValue.getGeometry() != null ) { event.setGeometry( new WKTReader().read( dataValue.getGeometry() ) ); } else if ( dataValue.getLongitude() != null && dataValue.getLatitude() != null ) { event.setGeometry( new WKTReader() .read( "Point(" + dataValue.getLongitude() + " " + dataValue.getLatitude() + ")" ) ); } events.getEvents().add( event ); } DataValue value = new DataValue( dataValue.getDataElement(), dataValue.getValue() ); value.setStoredBy( dataValue.getStoredBy() ); value.setProvidedElsewhere( dataValue.getProvidedElsewhere() ); event.getDataValues().add( value ); } return events; }
Example 14
Source File: ReconciliationITCase.java From syncope with Apache License 2.0 | 4 votes |
@Test public void exportCSV() throws IOException { ReconciliationService service = adminClient.getService(ReconciliationService.class); Client client = WebClient.client(service); client.accept(RESTHeaders.TEXT_CSV); AnyQuery anyQuery = new AnyQuery.Builder().realm(SyncopeConstants.ROOT_REALM). fiql(SyncopeClient.getUserSearchConditionBuilder().is("username").equalTo("*ini").query()). page(1). size(1000). orderBy("username ASC"). build(); CSVPushSpec spec = new CSVPushSpec.Builder(AnyTypeKind.USER.name()).ignorePaging(true). field("username"). field("status"). plainAttr("firstname"). plainAttr("surname"). plainAttr("email"). plainAttr("loginDate"). build(); Response response = service.push(anyQuery, spec); assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); assertEquals( "attachment; filename=" + SyncopeConstants.MASTER_DOMAIN + ".csv", response.getHeaderString(HttpHeaders.CONTENT_DISPOSITION)); PagedResult<UserTO> users = userService.search(anyQuery); assertNotNull(users); MappingIterator<Map<String, String>> reader = new CsvMapper().readerFor(Map.class). with(CsvSchema.emptySchema().withHeader()).readValues((InputStream) response.getEntity()); int rows = 0; for (; reader.hasNext(); rows++) { Map<String, String> row = reader.next(); assertEquals(users.getResult().get(rows).getUsername(), row.get("username")); assertEquals(users.getResult().get(rows).getStatus(), row.get("status")); switch (row.get("username")) { case "rossini": assertEquals(spec.getNullValue(), row.get("email")); assertTrue(row.get("loginDate").contains(spec.getArrayElementSeparator())); break; case "verdi": assertEquals("[email protected]", row.get("email")); assertEquals(spec.getNullValue(), row.get("loginDate")); break; case "bellini": assertEquals(spec.getNullValue(), row.get("email")); assertFalse(row.get("loginDate").contains(spec.getArrayElementSeparator())); break; default: break; } } assertEquals(rows, users.getTotalCount()); }
Example 15
Source File: ReconciliationLogicTest.java From syncope with Apache License 2.0 | 4 votes |
@Test public void pushToCSV() throws IOException { Pair<Integer, List<UserTO>> search = AuthContextUtils.callAsAdmin(SyncopeConstants.MASTER_DOMAIN, () -> userLogic.search(null, 1, 100, List.of(), SyncopeConstants.ROOT_REALM, false)); assertNotNull(search); CSVPushSpec spec = new CSVPushSpec.Builder(AnyTypeKind.USER.name()).ignorePaging(true). field("username"). field("status"). plainAttr("firstname"). plainAttr("surname"). plainAttr("email"). plainAttr("loginDate"). build(); PipedInputStream in = new PipedInputStream(); PipedOutputStream os = new PipedOutputStream(in); List<ProvisioningReport> results = AuthContextUtils.callAsAdmin(SyncopeConstants.MASTER_DOMAIN, () -> { return reconciliationLogic.push(null, 1, 1, List.of(), SyncopeConstants.ROOT_REALM, spec, os); }); assertEquals(search.getLeft(), results.size()); MappingIterator<Map<String, String>> reader = new CsvMapper().readerFor(Map.class).with(CsvSchema.emptySchema().withHeader()).readValues(in); for (int i = 0; i < results.size() && reader.hasNext(); i++) { Map<String, String> row = reader.next(); assertEquals(results.get(i).getName(), row.get("username")); assertEquals(search.getRight().stream().filter(user -> row.get("username").equals(user.getUsername())). findFirst().get().getStatus(), row.get("status")); switch (row.get("username")) { case "rossini": assertEquals(spec.getNullValue(), row.get("email")); assertTrue(row.get("loginDate").contains(spec.getArrayElementSeparator())); break; case "verdi": assertEquals("[email protected]", row.get("email")); assertEquals(spec.getNullValue(), row.get("loginDate")); break; case "bellini": assertEquals(spec.getNullValue(), row.get("email")); assertFalse(row.get("loginDate").contains(spec.getArrayElementSeparator())); break; default: break; } } }
Example 16
Source File: StreamPushJobDelegateTest.java From syncope with Apache License 2.0 | 4 votes |
@Test public void push() throws IOException { PipedInputStream in = new PipedInputStream(); PipedOutputStream os = new PipedOutputStream(in); PushTaskTO pushTask = new PushTaskTO(); pushTask.setMatchingRule(MatchingRule.UPDATE); pushTask.setUnmatchingRule(UnmatchingRule.PROVISION); List<ProvisioningReport> results = AuthContextUtils.callAsAdmin(SyncopeConstants.MASTER_DOMAIN, () -> { try (CSVStreamConnector connector = new CSVStreamConnector( null, ";", new CsvSchema.Builder().setUseHeader(true), null, os)) { return streamPushExecutor.push( anyTypeDAO.findUser(), userDAO.findAll(1, 100), List.of("username", "firstname", "surname", "email", "status", "loginDate"), connector, List.of(), pushTask, "user"); } catch (Exception e) { throw new RuntimeException(e); } }); assertEquals(userDAO.count(), results.size()); MappingIterator<Map<String, String>> reader = new CsvMapper().readerFor(Map.class).with(CsvSchema.emptySchema().withHeader()).readValues(in); for (int i = 0; i < results.size() && reader.hasNext(); i++) { Map<String, String> row = reader.next(); assertEquals(results.get(i).getName(), row.get("username")); assertEquals(userDAO.findByUsername(row.get("username")).getStatus(), row.get("status")); switch (row.get("username")) { case "rossini": assertEquals(StringUtils.EMPTY, row.get("email")); assertTrue(row.get("loginDate").contains(";")); break; case "verdi": assertEquals("[email protected]", row.get("email")); assertEquals(StringUtils.EMPTY, row.get("loginDate")); break; case "bellini": assertEquals(StringUtils.EMPTY, row.get("email")); assertFalse(row.get("loginDate").contains(";")); break; default: break; } } }