Java Code Examples for com.fasterxml.jackson.databind.MappingIterator#next()
The following examples show how to use
com.fasterxml.jackson.databind.MappingIterator#next() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PackageMetadataService.java From spring-cloud-skipper with Apache License 2.0 | 6 votes |
protected List<PackageMetadata> deserializeFromIndexFiles(List<File> indexFiles) { List<PackageMetadata> packageMetadataList = new ArrayList<>(); YAMLMapper yamlMapper = new YAMLMapper(); yamlMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); for (File indexFile : indexFiles) { try { MappingIterator<PackageMetadata> it = yamlMapper.readerFor(PackageMetadata.class).readValues(indexFile); while (it.hasNextValue()) { PackageMetadata packageMetadata = it.next(); packageMetadataList.add(packageMetadata); } } catch (IOException e) { throw new IllegalArgumentException("Can't parse Release manifest YAML", e); } } return packageMetadataList; }
Example 2
Source File: ApplicationReporter.java From cf-butler with Apache License 2.0 | 5 votes |
protected AppUsageReport readAppUsageReport(String filename) throws JsonParseException, JsonMappingException, IOException { String content = readFile(filename); if (filename.endsWith(".json")) { return mapper.readValue(content, AppUsageReport.class); } else if (filename.endsWith(".csv")) { CsvMapper csvMapper = new CsvMapper(); csvMapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); File csvFile = new File(filename); MappingIterator<String[]> it = csvMapper.readerFor(String[].class).readValues(csvFile); AppUsageReportBuilder builder = AppUsageReport.builder(); List<AppUsageMonthly> reports = new ArrayList<>(); int rowNum = 0; while (it.hasNext()) { String[] row = it.next(); if (rowNum > 0) { AppUsageMonthlyBuilder amb = AppUsageMonthly.builder(); for (int i = 0; i < row.length; i++) { if (i == 0) { String[] period = row[i].split("-"); if (period.length == 2) { amb.month(Integer.valueOf(period[1])); } amb.year(Integer.valueOf(period[0])); } if (i == 1) { amb.averageAppInstances(Double.valueOf(row[i])); } if (i == 2) { amb.maximumAppInstances(Integer.valueOf(row[i])); } if (i == 3) { amb.appInstanceHours(Double.valueOf(row[i])); } } reports.add(amb.build()); } rowNum++; } builder.monthlyReports(reports); return builder.build(); } else { return AppUsageReport.builder().build(); } }
Example 3
Source File: DAOCSVRepair.java From BART with MIT License | 5 votes |
public Map<String, Repair> loadRepairMap(String fileName) throws DAOException { Map<String, Repair> result = new HashMap<String, Repair>(); try { BufferedReader reader = utility.getBufferedReader(fileName); MappingIterator<String[]> it = getMappingIterator(reader); while (it.hasNext()) { String[] tokens = it.next(); if (tokens.length == 0 || tokens[0].startsWith("+++++++++++++++")) { continue; } String tid = tokens[0]; String oldValue; String newValue; if (tokens.length == 2) { oldValue = null; newValue = tokens[1]; } else if (tokens.length == 3) { oldValue = tokens[1]; newValue = tokens[2]; } else { throw new DAOException("Malformed file " + fileName + ".\nCSV file must contains at least two column (cell, newValue)"); } Repair repair = new Repair(tid, oldValue, newValue); result.put(repair.getCellId(), repair); } } catch (IOException exception) { throw new DAOException("Unable to load file: " + fileName + "\n" + exception); } return result; }
Example 4
Source File: SPARQLManagerTest.java From java-client-api with Apache License 2.0 | 5 votes |
@Test public void testInference() throws Exception { gmgr.write("/ontology", new StringHandle(ontology).withMimetype("application/n-triples")); SPARQLQueryDefinition qdef = smgr.newQueryDefinition( "SELECT ?s { ?s a <http://example.org/C1> }"); qdef.setIncludeDefaultRulesets(false); StringHandle handle = new StringHandle().withMimetype(SPARQLMimeTypes.SPARQL_CSV); String results = smgr.executeSelect(qdef, handle).get(); assertEquals("%0D%0A", URLEncoder.encode(results, "utf8")); qdef.setRulesets(SPARQLRuleset.RANGE); results = smgr.executeSelect(qdef, handle).get(); assertEquals(1, countLines(parseCsv(results))); qdef.setRulesets(SPARQLRuleset.RANGE, SPARQLRuleset.DOMAIN); results = smgr.executeSelect(qdef, handle).get(); MappingIterator<Map<String,String>> csvRows = parseCsv(results); assertTrue(csvRows.hasNext()); Map<String,String> row = csvRows.next(); assertEquals("http://example.org/o1", row.get("s")); assertTrue(csvRows.hasNext()); row = csvRows.next(); assertEquals("http://example.org/s2", row.get("s")); assertFalse(csvRows.hasNext()); gmgr.delete("/ontology"); }
Example 5
Source File: SPARQLManagerTest.java From java-client-api with Apache License 2.0 | 5 votes |
private int countLines(MappingIterator<?> iter) { int numLines = 0; while (iter.hasNext()) { iter.next(); numLines++; } return numLines; }
Example 6
Source File: TCSV.java From Llunatic with GNU General Public License v3.0 | 5 votes |
public void test() throws IOException { CsvMapper mapper = new CsvMapper(); mapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); CsvSchema schema = CsvSchema.emptySchema(); // schema = schema.withHeader(); // schema = schema.withQuoteChar('\''); // File csvFile = new File("/Temp/llunatic/doctors/10k/doctor.csv"); File csvFile = new File("/Users/donatello/Temp/chaseBench-workspace/LUBM/data/01k/src-emailAddress.csv"); long start = new Date().getTime(); MappingIterator<String[]> it = mapper.readerFor(String[].class).with(schema).readValues(csvFile); String[] row = it.next(); System.out.println(Arrays.asList(row)); long end = new Date().getTime(); System.out.println("**** " + (end - start) + " ms"); // while (it.hasNext()) { // String[] row = it.next(); // System.out.println(Arrays.asList(row)); // } }
Example 7
Source File: GitHubAuthFilter.java From para with Apache License 2.0 | 5 votes |
private String fetchUserEmail(Integer githubId, String accessToken) { HttpGet emailsGet = new HttpGet(PROFILE_URL + "/emails"); emailsGet.setHeader(HttpHeaders.AUTHORIZATION, "token " + accessToken); emailsGet.setHeader(HttpHeaders.ACCEPT, "application/json"); String defaultEmail = githubId + "@github.com"; try (CloseableHttpResponse resp = httpclient.execute(emailsGet)) { HttpEntity respEntity = resp.getEntity(); if (respEntity != null) { try (InputStream is = respEntity.getContent()) { MappingIterator<Map<String, Object>> emails = jreader.readValues(is); if (emails != null) { String email = null; while (emails.hasNext()) { Map<String, Object> next = emails.next(); email = (String) next.get("email"); if (next.containsKey("primary") && (Boolean) next.get("primary")) { break; } } return email; } } EntityUtils.consumeQuietly(respEntity); } } catch (IOException e) { logger.warn("Failed to fetch user email from GitHub, using default: " + defaultEmail); } return defaultEmail; }
Example 8
Source File: JsonSerialization.java From Cardshifter with Apache License 2.0 | 5 votes |
@Override public void read(InputStream in, MessageHandler onReceived) throws CardshifterSerializationException { try { MappingIterator<Message> values; values = mapper.readValues(new JsonFactory().createParser(in), Message.class); while (values.hasNextValue()) { Message message = values.next(); if (!onReceived.messageReceived(message)) { return; } } } catch (IOException ex) { throw new CardshifterSerializationException(ex); } }
Example 9
Source File: TestClient.java From Cardshifter with Apache License 2.0 | 5 votes |
private void listen() { try { MappingIterator<Message> values = mapper.readValues(new JsonFactory().createParser(in), Message.class); while (values.hasNext()) { Message msg = values.next(); System.out.println("Incoming message " + msg); messages.offer(msg); } } catch (IOException e) { e.printStackTrace(); } }
Example 10
Source File: ServiceInstanceReporter.java From cf-butler with Apache License 2.0 | 4 votes |
protected List<NormalizedServicePlanMonthlyUsage> readServiceUsageReport(String filename) throws JsonParseException, JsonMappingException, IOException { String content = readFile(filename); if (filename.endsWith(".json")) { return NormalizedServicePlanMonthlyUsage.listOf(mapper.readValue(content, ServiceUsageReport.class)); } else if (filename.endsWith(".csv")) { CsvMapper csvMapper = new CsvMapper(); csvMapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); File csvFile = new File(filename); MappingIterator<String[]> it = csvMapper.readerFor(String[].class).readValues(csvFile); List<NormalizedServicePlanMonthlyUsage> reports = new ArrayList<>(); int rowNum = 0; while (it.hasNext()) { String[] row = it.next(); if (rowNum > 0) { NormalizedServicePlanMonthlyUsageBuilder amb = NormalizedServicePlanMonthlyUsage.builder(); for (int i = 0; i < row.length; i++) { if (i == 0) { String[] period = row[i].split("-"); if (period.length == 2) { amb.month(Integer.valueOf(period[1])); } amb.year(Integer.valueOf(period[0])); } if (i == 1) { amb.serviceName(row[i]); } if (i == 2) { amb.serviceGuid(row[i]); } if (i == 3) { amb.servicePlanName(row[i]); } if (i == 4) { amb.servicePlanGuid(row[i]); } if (i == 5) { amb.averageInstances(Double.valueOf(row[i])); } if (i == 6) { amb.maximumInstances(Integer.valueOf(row[i])); } if (i == 7) { amb.durationInHours(Double.valueOf(row[i])); } } NormalizedServicePlanMonthlyUsage usage = amb.build(); if (StringUtils.isNotBlank(usage.getServicePlanGuid())) { reports.add(usage); } } rowNum++; } return reports; } else { return NormalizedServicePlanMonthlyUsage.listOf(ServiceUsageReport.builder().build()); } }
Example 11
Source File: DefaultCsvEventService.java From dhis2-core with BSD 3-Clause "New" or "Revised" License | 4 votes |
@Override public Events readEvents( InputStream inputStream, boolean skipFirst ) throws IOException, ParseException { Events events = new Events(); ObjectReader reader = CSV_MAPPER.readerFor( CsvEventDataValue.class ) .with( CSV_SCHEMA.withSkipFirstDataRow( skipFirst ) ); MappingIterator<CsvEventDataValue> iterator = reader.readValues( inputStream ); Event event = new Event(); event.setEvent( "not_valid" ); while ( iterator.hasNext() ) { CsvEventDataValue dataValue = iterator.next(); if ( !event.getEvent().equals( dataValue.getEvent() ) ) { event = new Event(); event.setEvent( dataValue.getEvent() ); event.setStatus( StringUtils.isEmpty( dataValue.getStatus() ) ? EventStatus.ACTIVE : Enum.valueOf( EventStatus.class, dataValue.getStatus() ) ); event.setProgram( dataValue.getProgram() ); event.setProgramStage( dataValue.getProgramStage() ); event.setEnrollment( dataValue.getEnrollment() ); event.setOrgUnit( dataValue.getOrgUnit() ); event.setEventDate( dataValue.getEventDate() ); event.setDueDate( dataValue.getDueDate() ); event.setCompletedDate( dataValue.getCompletedDate() ); event.setCompletedBy( dataValue.getCompletedBy() ); if ( dataValue.getGeometry() != null ) { event.setGeometry( new WKTReader().read( dataValue.getGeometry() ) ); } else if ( dataValue.getLongitude() != null && dataValue.getLatitude() != null ) { event.setGeometry( new WKTReader() .read( "Point(" + dataValue.getLongitude() + " " + dataValue.getLatitude() + ")" ) ); } events.getEvents().add( event ); } DataValue value = new DataValue( dataValue.getDataElement(), dataValue.getValue() ); value.setStoredBy( dataValue.getStoredBy() ); value.setProvidedElsewhere( dataValue.getProvidedElsewhere() ); event.getDataValues().add( value ); } return events; }
Example 12
Source File: ReconciliationITCase.java From syncope with Apache License 2.0 | 4 votes |
@Test public void exportCSV() throws IOException { ReconciliationService service = adminClient.getService(ReconciliationService.class); Client client = WebClient.client(service); client.accept(RESTHeaders.TEXT_CSV); AnyQuery anyQuery = new AnyQuery.Builder().realm(SyncopeConstants.ROOT_REALM). fiql(SyncopeClient.getUserSearchConditionBuilder().is("username").equalTo("*ini").query()). page(1). size(1000). orderBy("username ASC"). build(); CSVPushSpec spec = new CSVPushSpec.Builder(AnyTypeKind.USER.name()).ignorePaging(true). field("username"). field("status"). plainAttr("firstname"). plainAttr("surname"). plainAttr("email"). plainAttr("loginDate"). build(); Response response = service.push(anyQuery, spec); assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()); assertEquals( "attachment; filename=" + SyncopeConstants.MASTER_DOMAIN + ".csv", response.getHeaderString(HttpHeaders.CONTENT_DISPOSITION)); PagedResult<UserTO> users = userService.search(anyQuery); assertNotNull(users); MappingIterator<Map<String, String>> reader = new CsvMapper().readerFor(Map.class). with(CsvSchema.emptySchema().withHeader()).readValues((InputStream) response.getEntity()); int rows = 0; for (; reader.hasNext(); rows++) { Map<String, String> row = reader.next(); assertEquals(users.getResult().get(rows).getUsername(), row.get("username")); assertEquals(users.getResult().get(rows).getStatus(), row.get("status")); switch (row.get("username")) { case "rossini": assertEquals(spec.getNullValue(), row.get("email")); assertTrue(row.get("loginDate").contains(spec.getArrayElementSeparator())); break; case "verdi": assertEquals("[email protected]", row.get("email")); assertEquals(spec.getNullValue(), row.get("loginDate")); break; case "bellini": assertEquals(spec.getNullValue(), row.get("email")); assertFalse(row.get("loginDate").contains(spec.getArrayElementSeparator())); break; default: break; } } assertEquals(rows, users.getTotalCount()); }
Example 13
Source File: ReconciliationLogicTest.java From syncope with Apache License 2.0 | 4 votes |
@Test public void pushToCSV() throws IOException { Pair<Integer, List<UserTO>> search = AuthContextUtils.callAsAdmin(SyncopeConstants.MASTER_DOMAIN, () -> userLogic.search(null, 1, 100, List.of(), SyncopeConstants.ROOT_REALM, false)); assertNotNull(search); CSVPushSpec spec = new CSVPushSpec.Builder(AnyTypeKind.USER.name()).ignorePaging(true). field("username"). field("status"). plainAttr("firstname"). plainAttr("surname"). plainAttr("email"). plainAttr("loginDate"). build(); PipedInputStream in = new PipedInputStream(); PipedOutputStream os = new PipedOutputStream(in); List<ProvisioningReport> results = AuthContextUtils.callAsAdmin(SyncopeConstants.MASTER_DOMAIN, () -> { return reconciliationLogic.push(null, 1, 1, List.of(), SyncopeConstants.ROOT_REALM, spec, os); }); assertEquals(search.getLeft(), results.size()); MappingIterator<Map<String, String>> reader = new CsvMapper().readerFor(Map.class).with(CsvSchema.emptySchema().withHeader()).readValues(in); for (int i = 0; i < results.size() && reader.hasNext(); i++) { Map<String, String> row = reader.next(); assertEquals(results.get(i).getName(), row.get("username")); assertEquals(search.getRight().stream().filter(user -> row.get("username").equals(user.getUsername())). findFirst().get().getStatus(), row.get("status")); switch (row.get("username")) { case "rossini": assertEquals(spec.getNullValue(), row.get("email")); assertTrue(row.get("loginDate").contains(spec.getArrayElementSeparator())); break; case "verdi": assertEquals("[email protected]", row.get("email")); assertEquals(spec.getNullValue(), row.get("loginDate")); break; case "bellini": assertEquals(spec.getNullValue(), row.get("email")); assertFalse(row.get("loginDate").contains(spec.getArrayElementSeparator())); break; default: break; } } }
Example 14
Source File: StreamPushJobDelegateTest.java From syncope with Apache License 2.0 | 4 votes |
@Test public void push() throws IOException { PipedInputStream in = new PipedInputStream(); PipedOutputStream os = new PipedOutputStream(in); PushTaskTO pushTask = new PushTaskTO(); pushTask.setMatchingRule(MatchingRule.UPDATE); pushTask.setUnmatchingRule(UnmatchingRule.PROVISION); List<ProvisioningReport> results = AuthContextUtils.callAsAdmin(SyncopeConstants.MASTER_DOMAIN, () -> { try (CSVStreamConnector connector = new CSVStreamConnector( null, ";", new CsvSchema.Builder().setUseHeader(true), null, os)) { return streamPushExecutor.push( anyTypeDAO.findUser(), userDAO.findAll(1, 100), List.of("username", "firstname", "surname", "email", "status", "loginDate"), connector, List.of(), pushTask, "user"); } catch (Exception e) { throw new RuntimeException(e); } }); assertEquals(userDAO.count(), results.size()); MappingIterator<Map<String, String>> reader = new CsvMapper().readerFor(Map.class).with(CsvSchema.emptySchema().withHeader()).readValues(in); for (int i = 0; i < results.size() && reader.hasNext(); i++) { Map<String, String> row = reader.next(); assertEquals(results.get(i).getName(), row.get("username")); assertEquals(userDAO.findByUsername(row.get("username")).getStatus(), row.get("status")); switch (row.get("username")) { case "rossini": assertEquals(StringUtils.EMPTY, row.get("email")); assertTrue(row.get("loginDate").contains(";")); break; case "verdi": assertEquals("[email protected]", row.get("email")); assertEquals(StringUtils.EMPTY, row.get("loginDate")); break; case "bellini": assertEquals(StringUtils.EMPTY, row.get("email")); assertFalse(row.get("loginDate").contains(";")); break; default: break; } } }