org.supercsv.io.ICsvMapWriter Java Examples
The following examples show how to use
org.supercsv.io.ICsvMapWriter.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ReadWriteCSV.java From AIDR with GNU Affero General Public License v3.0 | 5 votes |
public ICsvMapWriter getCSVMapWriter(String fileToWrite) { try { return new CsvMapWriter(new FileWriterWithEncoding(fileToWrite,"UTF-8", true), new CsvPreference.Builder(CsvPreference.EXCEL_PREFERENCE) .useEncoder(new DefaultCsvEncoder()) .build() ); } catch (IOException e) { logger.error("Error in creating CSV Bean writer!"+e); } return null; }
Example #2
Source File: SimpleCsvDownloadActor.java From occurrence with Apache License 2.0 | 5 votes |
/** * Executes the job.query and creates a data file that will contains the records from job.from to job.to positions. */ private void doWork(DownloadFileWork work) throws IOException { final DatasetUsagesCollector datasetUsagesCollector = new DatasetUsagesCollector(); try (ICsvMapWriter csvMapWriter = new CsvMapWriter(new FileWriterWithEncoding(work.getJobDataFileName(), StandardCharsets.UTF_8), CsvPreference.TAB_PREFERENCE)) { SearchQueryProcessor.processQuery(work, occurrence -> { try { Map<String, String> occurrenceRecordMap = buildInterpretedOccurrenceMap(occurrence, DownloadTerms.SIMPLE_DOWNLOAD_TERMS); populateVerbatimCsvFields(occurrenceRecordMap, occurrence); //collect usages datasetUsagesCollector.collectDatasetUsage(occurrenceRecordMap.get(GbifTerm.datasetKey.simpleName()), occurrenceRecordMap.get(DcTerm.license.simpleName())); //write results csvMapWriter.write(occurrenceRecordMap, COLUMNS); } catch (Exception e) { throw Throwables.propagate(e); } } ); } finally { // Release the lock work.getLock().unlock(); LOG.info("Lock released, job detail: {} ", work); } getSender().tell(new Result(work, datasetUsagesCollector.getDatasetUsages(), datasetUsagesCollector.getDatasetLicenses()), getSelf()); }
Example #3
Source File: DownloadDwcaActor.java From occurrence with Apache License 2.0 | 5 votes |
/** * Executes the job.query and creates a data file that will contains the records from job.from to job.to positions. */ public void doWork(DownloadFileWork work) throws IOException { DatasetUsagesCollector datasetUsagesCollector = new DatasetUsagesCollector(); try ( ICsvMapWriter intCsvWriter = new CsvMapWriter(new FileWriterWithEncoding(work.getJobDataFileName() + TableSuffixes.INTERPRETED_SUFFIX, Charsets.UTF_8), CsvPreference.TAB_PREFERENCE); ICsvMapWriter verbCsvWriter = new CsvMapWriter(new FileWriterWithEncoding(work.getJobDataFileName() + TableSuffixes.VERBATIM_SUFFIX, Charsets.UTF_8), CsvPreference.TAB_PREFERENCE); ICsvBeanWriter multimediaCsvWriter = new CsvBeanWriter(new FileWriterWithEncoding(work.getJobDataFileName() + TableSuffixes.MULTIMEDIA_SUFFIX, Charsets.UTF_8), CsvPreference.TAB_PREFERENCE)) { SearchQueryProcessor.processQuery(work, occurrence -> { try { // Writes the occurrence record obtained from Elasticsearch as Map<String,Object>. if (occurrence != null) { datasetUsagesCollector.incrementDatasetUsage(occurrence.getDatasetKey().toString()); intCsvWriter.write(OccurrenceMapReader.buildInterpretedOccurrenceMap(occurrence), INT_COLUMNS); verbCsvWriter.write(OccurrenceMapReader.buildVerbatimOccurrenceMap(occurrence), VERB_COLUMNS); writeMediaObjects(multimediaCsvWriter, occurrence); } } catch (Exception e) { throw Throwables.propagate(e); } }); } finally { // Unlock the assigned lock. work.getLock().unlock(); LOG.info("Lock released, job detail: {} ", work); } getSender().tell(new Result(work, datasetUsagesCollector.getDatasetUsages()), getSelf()); }