Java Code Examples for au.com.bytecode.opencsv.CSVReader#close()
The following examples show how to use
au.com.bytecode.opencsv.CSVReader#close() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GetFusionCsv.java From collect-earth with MIT License | 6 votes |
private void processFile() throws IOException { final CSVReader csvReader = new CSVReader(new FileReader(new File("ullaan.csv")), ';'); final CSVWriter csvWriter = new CSVWriter(new FileWriter(new File("resultFusion.csv")), ';'); String[] nextRow; final String[] writeRow = new String[4]; writeRow[0] = "Coordinates"; writeRow[1] = "Land Use ID"; writeRow[2] = "Land Use name"; writeRow[3] = "Placemark ID"; csvWriter.writeNext(writeRow); while ((nextRow = csvReader.readNext()) != null) { writeRow[0] = "<Point><coordinates>" + replaceComma(nextRow[2]) + "," + replaceComma(nextRow[3]) + ",0.0</coordinates></Point>"; final String landUse = nextRow[5]; final int classId = getId(landUse); writeRow[1] = classId + ""; writeRow[2] = landUse; writeRow[3] = nextRow[0]; csvWriter.writeNext(writeRow); } csvWriter.close(); csvReader.close(); }
Example 2
Source File: CsvUtils.java From pxf with Apache License 2.0 | 6 votes |
/** * Get Table of data from CSV file * * @param pathToCsvFile to read from to Table * @return {@link Table} with data list from CSV file * @throws IOException */ public static Table getTable(String pathToCsvFile) throws IOException { // direct CSVReader to csv file CSVReader csvReader = new CSVReader(new FileReader(pathToCsvFile)); // read csv file to List List<String[]> list = csvReader.readAll(); // create table and load csv as list to it Table dataTable = new Table(pathToCsvFile, null); try { for (Iterator<String[]> iterator = list.iterator(); iterator.hasNext();) { dataTable.addRow(iterator.next()); } } finally { csvReader.close(); } return dataTable; }
Example 3
Source File: CVSRemoteFileFormatter.java From AIDR with GNU Affero General Public License v3.0 | 6 votes |
public List<MicromapperInput> getFileBaseImageClickerInputData(String csvFilename) throws Exception{ //[Twitter username] // [Tweet message] // [optional: time-stamp] // [optional: location] // [optional: latitude] // [optional: longitude] // [image link] List<MicromapperInput> sourceSet = new ArrayList<MicromapperInput>(); CSVReader csvReader = new CSVReader(new FileReader(csvFilename)); String[] row = null; while ((row = csvReader.readNext()) != null) { if(row!=null){ if(row.length > 8){ MicromapperInput source = new MicromapperInput(row[8], row[1], row[0], row[5], row[6], row[7], row[2]); sourceSet.add(source); } } } csvReader.close(); // REMOVEW HEADER if(sourceSet.size() > 1){ sourceSet.remove(0); } return sourceSet; }
Example 4
Source File: WanWrapper.java From gsn with GNU General Public License v3.0 | 6 votes |
public String[][] getHeader(String filename) throws IOException { CSVReader reader = new CSVReader(new FileReader(filename)); String[] data = reader.readNext(); if (data == null) return null; String[][] headers = new String[4][data.length]; headers[0]=data; headers[1]= reader.readNext(); headers[2]= reader.readNext(); headers[3]= reader.readNext(); if (headers[0]==null||headers[1]==null ||headers[2]==null||headers[3]==null) { logger.debug("Header read incompletely."); logger.debug(""+(headers[0]==null)); logger.debug(""+(headers[1]==null)); logger.debug(""+(headers[2]==null)); logger.debug(""+(headers[3]==null)); return null; } reader.close(); return headers; }
Example 5
Source File: MySpreadsheetIntegration.java From AIDR with GNU Affero General Public License v3.0 | 6 votes |
public static void main(String[] args) throws Exception{ String[] row = null; URL stockURL = new URL("http://localhost:8888/tornadotweets.csv"); BufferedReader in = new BufferedReader(new InputStreamReader(stockURL.openStream())); CSVReader csvReader = new CSVReader(in); List content = csvReader.readAll(); for (Object object : content) { row = (String[]) object; System.out.println(row[0] + " # " + row[1] + " # " + row[2]); } csvReader.close(); }
Example 6
Source File: Correspondence.java From winter with Apache License 2.0 | 5 votes |
public static Processable<Correspondence<RecordId, RecordId>> loadFromCsv(File location) throws IOException { CSVReader r = new CSVReader(new FileReader(location)); Processable<Correspondence<RecordId, RecordId>> correspondences = new ProcessableCollection<>(); String[] values = null; while((values = r.readNext())!=null) { if(values.length>=3) { String id1 = values[0]; String id2 = values[1]; String sim = values[2]; Double similarityScore = 0.0; try { similarityScore = Double.parseDouble(sim); } catch(Exception ex) { logger.error(ex.getMessage()); } Correspondence<RecordId, RecordId> cor = new Correspondence<RecordId, RecordId>(new RecordId(id1), new RecordId(id2), similarityScore, null); correspondences.add(cor); } else { logger.error(String.format("Invalid format: \"%s\"", StringUtils.join(values, "\",\""))); } } r.close(); return correspondences; }
Example 7
Source File: TravelDocumentServiceImpl.java From kfs with GNU Affero General Public License v3.0 | 5 votes |
/** * */ @Override public List<GroupTraveler> importGroupTravelers(final TravelDocument document, final String csvData) throws Exception { final List<GroupTraveler> retval = new LinkedList<GroupTraveler>(); final BufferedReader bufferedFileReader = new BufferedReader(new StringReader(csvData)); final CSVReader csvReader = new CSVReader(bufferedFileReader); final List<String[]> rows; try { rows = csvReader.readAll(); } catch (IOException ex) { ex.printStackTrace(); throw new ParseException("Could not parse CSV file data", ex); } finally { try { csvReader.close(); } catch (Exception e) {} } final Map<String,List<Integer>> header = getGroupTravelerHeaders(); for (final String[] row : rows) { final GroupTravelerCsvRecord record = createGroupTravelerCsvRecord(header, row); final GroupTraveler traveler = new GroupTraveler(); traveler.setGroupTravelerEmpId(record.getGroupTravelerEmpId()); traveler.setName(record.getName()); traveler.setGroupTravelerType(record.getGroupTravelerType()); retval.add(traveler); } return retval; }
Example 8
Source File: CVSRemoteFileFormatter.java From AIDR with GNU Affero General Public License v3.0 | 5 votes |
public List<MicromapperInput> getGeoClickerInputData(String url) throws Exception{ //"tweetID","tweet","author","lat","lng","url","created","answer" String[] row = null; List<MicromapperInput> sourceSet = new ArrayList<MicromapperInput>(); CSVReader csvReader = getCVSContentReader(url) ; List content = csvReader.readAll(); for (Object object : content) { row = (String[]) object; if(row!=null){ if(row.length > 7){ String tweetID = row[0]; String tweet=row[1]; String author=row[2]; String lat=row[3]; String lng=row[4]; String imgURL = row[5]; String created = row[6]; String answer = row[7]; MicromapperInput source = new MicromapperInput(tweetID, tweet, author, lat, lng, imgURL, created, answer); sourceSet.add(source); } } } csvReader.close(); if(sourceSet.size() > 1){ sourceSet.remove(0); } return sourceSet; }
Example 9
Source File: DataSet.java From aifh with Apache License 2.0 | 5 votes |
/** * Load a CSV from an input stream. * * @param is The input stream. * @return The loaded file. */ public static DataSet load(final InputStream is) { final DataSet result; try { final Reader reader = new InputStreamReader(is); final CSVReader csv = new CSVReader(reader); final String[] headers = csv.readNext(); result = new DataSet(headers); String[] nextLine; while ((nextLine = csv.readNext()) != null) { if (nextLine.length <= 1) { continue; } else if (nextLine.length != result.getHeaderCount()) { throw new AIFHError("Found a CSV line with " + nextLine.length + " columns, when expecting " + result.getHeaderCount()); } final Object[] obj = new Object[result.getHeaderCount()]; System.arraycopy(nextLine, 0, obj, 0, nextLine.length); result.add(obj); } csv.close(); } catch (IOException ex) { throw (new AIFHError(ex)); } return result; }
Example 10
Source File: Correspondence.java From winter with Apache License 2.0 | 5 votes |
public static <RecordType extends Matchable, SchemaElementType extends Matchable, CorrespondenceType extends Matchable> Processable<Correspondence<RecordType, CorrespondenceType>> loadFromCsv(File location, DataSet<RecordType, SchemaElementType> leftData, DataSet<RecordType, SchemaElementType> rightData) throws IOException { CSVReader r = new CSVReader(new FileReader(location)); Processable<Correspondence<RecordType, CorrespondenceType>> correspondences = new ProcessableCollection<>(); String[] values = null; while((values = r.readNext())!=null) { if(values.length>=3) { String id1 = values[0]; String id2 = values[1]; String sim = values[2]; Double similarityScore = 0.0; try { similarityScore = Double.parseDouble(sim); } catch(Exception ex) { logger.error(ex.getMessage()); } Correspondence<RecordType, CorrespondenceType> cor = new Correspondence<RecordType, CorrespondenceType>(leftData.getRecord(id1), rightData.getRecord(id2), similarityScore, null); correspondences.add(cor); } else { logger.error(String.format("Invalid format: \"%s\"", StringUtils.join(values, "\",\""))); } } r.close(); return correspondences; }
Example 11
Source File: CorrespondenceSet.java From winter with Apache License 2.0 | 5 votes |
/** * Loads correspondences from a file and adds them to this correspondence * set. Can be called multiple times. * * @param correspondenceFile the file to load from * @param first the dataset that contains the records * @throws IOException thrown if there is a problem loading the file */ public void loadCorrespondences(File correspondenceFile, FusibleDataSet<RecordType, SchemaElementType> first) throws IOException { CSVReader reader = new CSVReader(new FileReader(correspondenceFile)); String[] values = null; int skipped = 0; while ((values = reader.readNext()) != null) { // check if the ids exist in the provided data sets if (first.getRecord(values[0]) == null) { skipped++; continue; } // we only have the records from the source data sets, so we group by the id in the target data set RecordGroup<RecordType, SchemaElementType> grp2 = recordIndex.get(values[1]); if (grp2 == null) { // no existing groups, create a new one RecordGroup<RecordType, SchemaElementType> grp = groupFactory.createRecordGroup(); grp.addRecord(values[0], first); recordIndex.put(values[1], grp); groups.add(grp); } else { // one existing group, add to this group grp2.addRecord(values[0], first); recordIndex.put(values[0], grp2); } } reader.close(); if (skipped>0) { logger.error(String.format("Skipped %,d records (not found in provided dataset)", skipped)); } }
Example 12
Source File: EvaluateCsv.java From science-result-extractor with Apache License 2.0 | 5 votes |
private static Map<String, String> loadFileScoreMap(File inpScore) throws IOException { Map<String, String> retMap = new HashMap<>(); CSVReader csvReader = new CSVReader(new FileReader(inpScore)); String[] row = csvReader.readNext(); // skip header while ((row = csvReader.readNext()) != null) { String file = row[0]; String score = row[1]; retMap.put(file, score); } csvReader.close(); return retMap; }
Example 13
Source File: NormalizeDataSet.java From aifh with Apache License 2.0 | 5 votes |
/** * Load a CSV from an input stream. * * @param is The input stream. * @return The loaded file. */ public static NormalizeDataSet load(final InputStream is) { final NormalizeDataSet result; try { final Reader reader = new InputStreamReader(is); final CSVReader csv = new CSVReader(reader); final String[] headers = csv.readNext(); result = new NormalizeDataSet(headers); String[] nextLine; while ((nextLine = csv.readNext()) != null) { if (nextLine.length <= 1) { continue; } else if (nextLine.length != result.getHeaderCount()) { throw new AIFHError("Found a CSV line with " + nextLine.length + " columns, when expecting " + result.getHeaderCount()); } final Object[] obj = new Object[result.getHeaderCount()]; System.arraycopy(nextLine, 0, obj, 0, nextLine.length); result.add(obj); } csv.close(); } catch (IOException ex) { throw (new AIFHError(ex)); } return result; }
Example 14
Source File: DataSet.java From aifh with Apache License 2.0 | 5 votes |
/** * Load a CSV from an input stream. * * @param is The input stream. * @return The loaded file. */ public static DataSet load(final InputStream is) { final DataSet result; try { final Reader reader = new InputStreamReader(is); final CSVReader csv = new CSVReader(reader); final String[] headers = csv.readNext(); result = new DataSet(headers); String[] nextLine; while ((nextLine = csv.readNext()) != null) { if (nextLine.length <= 1) { continue; } else if (nextLine.length != result.getHeaderCount()) { throw new AIFHError("Found a CSV line with " + nextLine.length + " columns, when expecting " + result.getHeaderCount()); } final Object[] obj = new Object[result.getHeaderCount()]; System.arraycopy(nextLine, 0, obj, 0, nextLine.length); result.add(obj); } csv.close(); } catch (IOException ex) { throw (new AIFHError(ex)); } return result; }
Example 15
Source File: CSVConfig.java From micro-integrator with Apache License 2.0 | 5 votes |
@Override public boolean isActive() { try { CSVReader reader = this.createCSVReader(); reader.close(); return true; } catch (Exception e) { log.error("Error in checking CSV config availability", e); return false; } }
Example 16
Source File: CVSRemoteFileFormatter.java From AIDR with GNU Affero General Public License v3.0 | 4 votes |
public List<MicromapperInput> getClickerLocalFileInputData(String csvFilename) throws Exception{ //[Twitter username] // [Tweet message] // [optional: time-stamp] // [optional: location] // [optional: latitude] // [optional: longitude] // [image link] String[] row = null; List<MicromapperInput> sourceSet = new ArrayList<MicromapperInput>(); CSVReader csvReader = getCVSContentReader(csvFilename) ; List content = csvReader.readAll(); for (Object object : content) { //User-Name(0) Tweet(1) Time-stamp(2) Location(3) Latitude(4) Longitude(5) Image-link(6) TweetID(7) // public MicromapperInput(String tweetID, String tweet, String author, String lat, String lng , String url, String created){ row = (String[]) object; if(row!=null){ if(row.length > 7){ String tweetID = row[7]; String tweet=row[1]; String author=row[0]; String lat=row[4]; String lng=row[5]; String imgURL = row[6]; String created = row[2]; String dataSourceLocation; MicromapperInput source = new MicromapperInput(tweetID, tweet, author, lat, lng, imgURL, created); sourceSet.add(source); } } // tweetID,tweet,author,lat,lng,url,created } csvReader.close(); // REMOVEW HEADER if(sourceSet.size() > 1){ sourceSet.remove(0); } return sourceSet; }
Example 17
Source File: ImportExportTask.java From Passbook with Apache License 2.0 | 4 votes |
private String importPbCSV() { String result; try { CSVReader csvReader = new CSVReader(new FileReader(mFilePath)); List<String[]> content = csvReader.readAll(); csvReader.close(); AccountManager appAm = Application.getInstance().getAccountManager(); Hashtable<String, Integer> existingCategory = new Hashtable<>(); Hashtable<String, Integer> existingAccounts = new Hashtable<>(); List<AccountManager.Category> categories = appAm.getCategoryList(true, false); for(AccountManager.Category c : categories) { existingCategory.put(c.mName, c.mId); } for(String s : content.get(0)) { if(existingCategory.get(s) == null) { existingCategory.put(s, appAm.addCategory(0, s)); } } String[] line; String names[]; String fields[]; AccountManager.Account account; Integer accId; int categoryId; if(mOption != OPTION_KEEPALL) { List<AccountManager.Account> accounts = appAm.getAllAccounts(false); for(AccountManager.Account a : accounts) { existingAccounts.put(a.mProfile, a.mId); } for(int i = 1; i < content.size(); ++i) { line = content.get(i); names = line[0].split("\t", 2); accId = existingAccounts.get(names[1]); if(accId != null && mOption == OPTION_IGNORE) { continue; } categoryId = existingCategory.get(names[0]); if(accId == null) { account = appAm.newAccount(categoryId); } else { account = appAm.getAccountById(accId); account.setCategory(categoryId); } account.setName(names[1]); account.clearEntries(); for(int j = 1; j < line.length; ++j) { fields = line[j].split("\t", 3); account.addEntry(Integer.parseInt(fields[0]), fields[1], fields[2]); } if(accId == null) { appAm.addAccount(categoryId, account); } else { appAm.setAccount(account); } } } else { for(int i = 1; i < content.size(); ++i) { line = content.get(i); names = line[0].split("\t", 2); categoryId = existingCategory.get(names[0]); account = appAm.newAccount(categoryId); account.setName(names[1]); for(int j = 1; j < line.length; ++j) { fields = line[j].split("\t", 3); account.addEntry(Integer.parseInt(fields[0]), fields[1], fields[2]); } appAm.addAccount(categoryId, account); } } result = mFilePath; } catch (Exception e) { result = null; } return result; }
Example 18
Source File: MatchingGoldStandard.java From winter with Apache License 2.0 | 3 votes |
/** * Loads a gold standard from a TSV file * * @param file * @throws IOException */ public void loadFromTSVFile(File file) throws IOException { CSVReader reader = new CSVReader(new FileReader(file), '\t'); readAllLines(reader); reader.close(); printGSReport(); }
Example 19
Source File: MatchingGoldStandard.java From winter with Apache License 2.0 | 3 votes |
/** * Loads a gold standard from a CSV file * * @param file * @throws IOException */ public void loadFromCSVFile(File file) throws IOException { CSVReader reader = new CSVReader(new FileReader(file)); readAllLines(reader); reader.close(); printGSReport(); }
Example 20
Source File: CSVMatchableReader.java From winter with Apache License 2.0 | 3 votes |
public void loadFromCSV(File file, DataSet<RecordType, SchemaElementType> dataset) throws IOException { CSVReader reader = new CSVReader(new FileReader(file)); String[] values = null; int rowNumber = 0; while((values = reader.readNext()) != null) { readLine(file, rowNumber++, values, dataset); } reader.close(); }