org.apache.commons.io.output.FileWriterWithEncoding Java Examples
The following examples show how to use
org.apache.commons.io.output.FileWriterWithEncoding.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KeyCodec.java From hadoop-ozone with Apache License 2.0 | 6 votes |
/** * Writes a given public key using the default config options. * * @param key - Key to write to file. * @throws IOException - On I/O failure. */ public void writePublicKey(PublicKey key) throws IOException { File publicKeyFile = Paths.get(location.toString(), securityConfig.getPublicKeyFileName()).toFile(); if (Files.exists(publicKeyFile.toPath())) { throw new IOException("Private key already exist."); } try (PemWriter keyWriter = new PemWriter(new FileWriterWithEncoding(publicKeyFile, DEFAULT_CHARSET))) { keyWriter.writeObject( new PemObject(PUBLIC_KEY, key.getEncoded())); } Files.setPosixFilePermissions(publicKeyFile.toPath(), permissionSet); }
Example #2
Source File: KeyCodec.java From hadoop-ozone with Apache License 2.0 | 6 votes |
/** * Helper function that actually writes data to the files. * * @param basePath - base path to write key * @param keyPair - Key pair to write to file. * @param privateKeyFileName - private key file name. * @param publicKeyFileName - public key file name. * @param force - forces overwriting the keys. * @throws IOException - On I/O failure. */ private synchronized void writeKey(Path basePath, KeyPair keyPair, String privateKeyFileName, String publicKeyFileName, boolean force) throws IOException { checkPreconditions(basePath); File privateKeyFile = Paths.get(location.toString(), privateKeyFileName).toFile(); File publicKeyFile = Paths.get(location.toString(), publicKeyFileName).toFile(); checkKeyFile(privateKeyFile, force, publicKeyFile); try (PemWriter privateKeyWriter = new PemWriter(new FileWriterWithEncoding(privateKeyFile, DEFAULT_CHARSET))) { privateKeyWriter.writeObject( new PemObject(PRIVATE_KEY, keyPair.getPrivate().getEncoded())); } try (PemWriter publicKeyWriter = new PemWriter(new FileWriterWithEncoding(publicKeyFile, DEFAULT_CHARSET))) { publicKeyWriter.writeObject( new PemObject(PUBLIC_KEY, keyPair.getPublic().getEncoded())); } Files.setPosixFilePermissions(privateKeyFile.toPath(), permissionSet); Files.setPosixFilePermissions(publicKeyFile.toPath(), permissionSet); }
Example #3
Source File: VelocityUtil.java From feiqu-opensource with Apache License 2.0 | 6 votes |
/** * 根据模板生成文件 * @param inputVmFilePath 模板路径 * @param outputFilePath 输出文件路径 * @param context * @throws Exception */ public static void generate(String inputVmFilePath, String outputFilePath, VelocityContext context) throws Exception { try { Properties properties = new Properties(); String path = getPath(inputVmFilePath); properties.setProperty(VelocityEngine.FILE_RESOURCE_LOADER_PATH, path); Velocity.init(properties); //VelocityEngine engine = new VelocityEngine(); Template template = Velocity.getTemplate(getFile(inputVmFilePath), "utf-8"); File outputFile = new File(outputFilePath); FileWriterWithEncoding writer = new FileWriterWithEncoding(outputFile, "utf-8"); template.merge(context, writer); writer.close(); } catch (Exception ex) { throw ex; } }
Example #4
Source File: KeyCodec.java From hadoop-ozone with Apache License 2.0 | 6 votes |
/** * Writes a given private key using the default config options. * * @param key - Key to write to file. * @throws IOException - On I/O failure. */ public void writePrivateKey(PrivateKey key) throws IOException { File privateKeyFile = Paths.get(location.toString(), securityConfig.getPrivateKeyFileName()).toFile(); if (Files.exists(privateKeyFile.toPath())) { throw new IOException("Private key already exist."); } try (PemWriter privateKeyWriter = new PemWriter(new FileWriterWithEncoding(privateKeyFile, DEFAULT_CHARSET))) { privateKeyWriter.writeObject( new PemObject(PRIVATE_KEY, key.getEncoded())); } Files.setPosixFilePermissions(privateKeyFile.toPath(), permissionSet); }
Example #5
Source File: VelocityUtil.java From zheng with MIT License | 6 votes |
/** * 根据模板生成文件 * @param inputVmFilePath 模板路径 * @param outputFilePath 输出文件路径 * @param context * @throws Exception */ public static void generate(String inputVmFilePath, String outputFilePath, VelocityContext context) throws Exception { try { Properties properties = new Properties(); properties.setProperty(VelocityEngine.FILE_RESOURCE_LOADER_PATH, getPath(inputVmFilePath)); Velocity.init(properties); //VelocityEngine engine = new VelocityEngine(); Template template = Velocity.getTemplate(getFile(inputVmFilePath), "utf-8"); File outputFile = new File(outputFilePath); FileWriterWithEncoding writer = new FileWriterWithEncoding(outputFile, "utf-8"); template.merge(context, writer); writer.close(); } catch (Exception ex) { throw ex; } }
Example #6
Source File: CitationsFileWriter.java From occurrence with Apache License 2.0 | 6 votes |
/** * Creates the dataset citation file using the the search query response. * * @param datasetUsages record count per dataset * @param citationFileName output file name * @param occDownloadService occurrence downlaod service * @param downloadKey download key */ public static void createCitationFile(Map<UUID, Long> datasetUsages, String citationFileName, OccurrenceDownloadService occDownloadService, String downloadKey) { if (datasetUsages != null && !datasetUsages.isEmpty()) { try (ICsvBeanWriter beanWriter = new CsvBeanWriter(new FileWriterWithEncoding(citationFileName, Charsets.UTF_8), CsvPreference.TAB_PREFERENCE)) { for (Entry<UUID, Long> entry : datasetUsages.entrySet()) { if (entry.getKey() != null) { beanWriter.write(new Facet.Count(entry.getKey().toString(), entry.getValue()), HEADER, PROCESSORS); } } beanWriter.flush(); persistUsages(occDownloadService, downloadKey, datasetUsages); } catch (IOException e) { LOG.error("Error creating citations file", e); throw Throwables.propagate(e); } } }
Example #7
Source File: WriteToXMLUtil.java From CEC-Automatic-Annotation with Apache License 2.0 | 6 votes |
public static boolean writeToXML(Document document, String tempPath) { try { // 使用XMLWriter写入,可以控制格式,经过调试,发现这种方式会出现乱码,主要是因为Eclipse中xml文件和JFrame中文件编码不一致造成的 OutputFormat format = OutputFormat.createPrettyPrint(); format.setEncoding(EncodingUtil.CHARSET_UTF8); // format.setSuppressDeclaration(true);//这句话会压制xml文件的声明,如果为true,就不打印出声明语句 format.setIndent(true);// 设置缩进 format.setIndent(" ");// 空行方式缩进 format.setNewlines(true);// 设置换行 XMLWriter writer = new XMLWriter(new FileWriterWithEncoding(new File(tempPath), EncodingUtil.CHARSET_UTF8), format); writer.write(document); writer.close(); } catch (IOException e) { e.printStackTrace(); MyLogger.logger.error("写入xml文件出错!"); return false; } return true; }
Example #8
Source File: MetaGroupMenuAssistantService.java From axelor-open-suite with GNU Affero General Public License v3.0 | 6 votes |
@Transactional(rollbackOn = {Exception.class}) public void createGroupMenuFile(MetaGroupMenuAssistant groupMenuAssistant) throws IOException { setBundle(new Locale(groupMenuAssistant.getLanguage())); File groupMenuFile = MetaFiles.createTempFile("MenuGroup", ".csv").toFile(); try { List<String[]> rows = createHeader(groupMenuAssistant); addMenuRows(groupMenuAssistant, rows); addGroupAccess(rows); try (CSVWriter csvWriter = new CSVWriter(new FileWriterWithEncoding(groupMenuFile, "utf-8"), ';'); FileInputStream fis = new FileInputStream(groupMenuFile)) { csvWriter.writeAll(rows); groupMenuAssistant.setMetaFile(metaFiles.upload(fis, getFileName(groupMenuAssistant))); } menuAssistantRepository.save(groupMenuAssistant); } catch (Exception e) { TraceBackService.trace(e); } }
Example #9
Source File: PermissionAssistantService.java From axelor-open-suite with GNU Affero General Public License v3.0 | 6 votes |
public void createFile(PermissionAssistant assistant) { File permFile = new File(Files.createTempDir(), getFileName(assistant)); try { try (FileWriterWithEncoding fileWriter = new FileWriterWithEncoding(permFile, StandardCharsets.UTF_8)) { CSVWriter csvWriter = new CSVWriter(fileWriter, ';'); writeGroup(csvWriter, assistant); } createMetaFile(permFile, assistant); } catch (Exception e) { LOG.error(e.getLocalizedMessage()); TraceBackService.trace(e); } }
Example #10
Source File: DownloadDwcaActor.java From occurrence with Apache License 2.0 | 5 votes |
/** * Executes the job.query and creates a data file that will contains the records from job.from to job.to positions. */ public void doWork(DownloadFileWork work) throws IOException { DatasetUsagesCollector datasetUsagesCollector = new DatasetUsagesCollector(); try ( ICsvMapWriter intCsvWriter = new CsvMapWriter(new FileWriterWithEncoding(work.getJobDataFileName() + TableSuffixes.INTERPRETED_SUFFIX, Charsets.UTF_8), CsvPreference.TAB_PREFERENCE); ICsvMapWriter verbCsvWriter = new CsvMapWriter(new FileWriterWithEncoding(work.getJobDataFileName() + TableSuffixes.VERBATIM_SUFFIX, Charsets.UTF_8), CsvPreference.TAB_PREFERENCE); ICsvBeanWriter multimediaCsvWriter = new CsvBeanWriter(new FileWriterWithEncoding(work.getJobDataFileName() + TableSuffixes.MULTIMEDIA_SUFFIX, Charsets.UTF_8), CsvPreference.TAB_PREFERENCE)) { SearchQueryProcessor.processQuery(work, occurrence -> { try { // Writes the occurrence record obtained from Elasticsearch as Map<String,Object>. if (occurrence != null) { datasetUsagesCollector.incrementDatasetUsage(occurrence.getDatasetKey().toString()); intCsvWriter.write(OccurrenceMapReader.buildInterpretedOccurrenceMap(occurrence), INT_COLUMNS); verbCsvWriter.write(OccurrenceMapReader.buildVerbatimOccurrenceMap(occurrence), VERB_COLUMNS); writeMediaObjects(multimediaCsvWriter, occurrence); } } catch (Exception e) { throw Throwables.propagate(e); } }); } finally { // Unlock the assigned lock. work.getLock().unlock(); LOG.info("Lock released, job detail: {} ", work); } getSender().tell(new Result(work, datasetUsagesCollector.getDatasetUsages()), getSelf()); }
Example #11
Source File: SimpleCsvDownloadActor.java From occurrence with Apache License 2.0 | 5 votes |
/** * Executes the job.query and creates a data file that will contains the records from job.from to job.to positions. */ private void doWork(DownloadFileWork work) throws IOException { final DatasetUsagesCollector datasetUsagesCollector = new DatasetUsagesCollector(); try (ICsvMapWriter csvMapWriter = new CsvMapWriter(new FileWriterWithEncoding(work.getJobDataFileName(), StandardCharsets.UTF_8), CsvPreference.TAB_PREFERENCE)) { SearchQueryProcessor.processQuery(work, occurrence -> { try { Map<String, String> occurrenceRecordMap = buildInterpretedOccurrenceMap(occurrence, DownloadTerms.SIMPLE_DOWNLOAD_TERMS); populateVerbatimCsvFields(occurrenceRecordMap, occurrence); //collect usages datasetUsagesCollector.collectDatasetUsage(occurrenceRecordMap.get(GbifTerm.datasetKey.simpleName()), occurrenceRecordMap.get(DcTerm.license.simpleName())); //write results csvMapWriter.write(occurrenceRecordMap, COLUMNS); } catch (Exception e) { throw Throwables.propagate(e); } } ); } finally { // Release the lock work.getLock().unlock(); LOG.info("Lock released, job detail: {} ", work); } getSender().tell(new Result(work, datasetUsagesCollector.getDatasetUsages(), datasetUsagesCollector.getDatasetLicenses()), getSelf()); }
Example #12
Source File: ReadWriteCSV.java From AIDR with GNU Affero General Public License v3.0 | 5 votes |
public ICsvMapWriter getCSVMapWriter(String fileToWrite) { try { return new CsvMapWriter(new FileWriterWithEncoding(fileToWrite,"UTF-8", true), new CsvPreference.Builder(CsvPreference.EXCEL_PREFERENCE) .useEncoder(new DefaultCsvEncoder()) .build() ); } catch (IOException e) { logger.error("Error in creating CSV Bean writer!"+e); } return null; }
Example #13
Source File: HtmlReport.java From sahagin-java with Apache License 2.0 | 5 votes |
private void generateVelocityOutput( VelocityContext context, String templateResourcePath, File outputFile) { if (outputFile.getParentFile() != null) { outputFile.getParentFile().mkdirs(); } try (InputStream stream = this.getClass().getResourceAsStream(templateResourcePath); Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); Reader bfReader = new BufferedReader(streamReader); FileWriterWithEncoding writer = new FileWriterWithEncoding(outputFile, StandardCharsets.UTF_8)) { Velocity.evaluate(context, writer, this.getClass().getSimpleName(), bfReader); } catch (IOException e) { throw new RuntimeException(e); } }
Example #14
Source File: YamlUtils.java From sahagin-java with Apache License 2.0 | 5 votes |
public static void dump(Map<String, Object> yamlObj, File dumpFile) { if (dumpFile.getParentFile() != null) { dumpFile.getParentFile().mkdirs(); } try (FileWriterWithEncoding writer = new FileWriterWithEncoding(dumpFile, StandardCharsets.UTF_8)) { Yaml yaml = new Yaml(); yaml.dump(yamlObj, writer); } catch (IOException e) { throw new RuntimeException(e); } }
Example #15
Source File: EntityCount.java From baleen with Apache License 2.0 | 5 votes |
@Override public void doProcess(JCas jCas) throws AnalysisEngineProcessException { DocumentAnnotation da = getDocumentAnnotation(jCas); try (PrintWriter pw = new PrintWriter( new BufferedWriter(new FileWriterWithEncoding(output, StandardCharsets.UTF_8, true)))) { int count = JCasUtil.select(jCas, Entity.class).size(); pw.println(da.getSourceUri() + "\t" + count); } catch (IOException e) { getMonitor().warn("Unable to write to output", e); } }
Example #16
Source File: FileIOUtils.java From super-cloudops with Apache License 2.0 | 5 votes |
/** * Write string to file. * * @param file * @param data * @param append */ public static void writeFile(File file, String data, Charset charset, boolean append) { notNull(file, "Write file must not be null"); notNull(data, "Write data must not be empty"); notNull(charset, "Write charset must not be null"); ensureFile(file); try (Writer w = new FileWriterWithEncoding(file, charset, append)) { w.write(data); w.flush(); } catch (IOException e) { throw new IllegalStateException(e); } }
Example #17
Source File: FilterUtil.java From CEC-Automatic-Annotation with Apache License 2.0 | 4 votes |
public static void parseXML(String filePath) { // InputStream is = null; // try { // is = new InputStreamReader(new FileInputStream(new File(filePath))); // } catch (FileNotFoundException e1) { // e1.printStackTrace(); // MyLogger.logger.error(e1.getMessage()); // } SAXReader saxReader = new SAXReader(); Document document = null; try { document = saxReader.read(new BufferedReader(new InputStreamReader(new FileInputStream(new File(filePath)), EncodingUtil.CHARSET_UTF8))); } catch (DocumentException | UnsupportedEncodingException | FileNotFoundException e1) { e1.printStackTrace(); MyLogger.logger.error(e1.getMessage()); } // Element rootElement = document.getRootElement(); // System.out.println("根节点名称:" + rootElement.getName()); // System.out.println("根节点的属性个数:" + rootElement.attributeCount()); // System.out.println("根节点id属性的值:" + rootElement.attributeValue("id")); // System.out.println("根节点内文本:" + rootElement.getText()); // System.out.println("根节点内去掉换行tab键等字符:" + rootElement.getTextTrim()); // System.out.println("根节点子节点文本内容:" + rootElement.getStringValue()); // Element content = rootElement.element("Content"); // Element paragraph = content.element("Paragraph"); // Element sentence = paragraph.element("Sentence"); // Element event = sentence.element("Event"); // Element event = paragraph.element("Event"); @SuppressWarnings("unchecked") List<Element> event_list = document.selectNodes("//Sentence/Event"); @SuppressWarnings("unchecked") List<Element> denoter_list = document.selectNodes("//Sentence/Event/Denoter"); Iterator<Element> denoterIter = denoter_list.iterator(); Iterator<Element> eventIter = event_list.iterator(); // Element para = doc.element("para"); // Element sent = para.element("sent"); while (denoterIter.hasNext()) { Element denoter = denoterIter.next(); Element event = eventIter.next(); String denoterValue = denoter.getTextTrim(); for (int i = 0; i < treeSetsList.size(); i++) { if (treeSetsList.get(i).contains(denoterValue)) { String typeValue = type_value.get(i);// 获取denoter的type值 if (0 == i) { // 说明是意念事件,那么这时候拿到的typeValue是Event的属性值 event.addAttribute("type", typeValue); denoter.addAttribute("type", "statement");// 默认意念事件触发词的类型都是statement // 注意如果是意念事件的话,event的type是thoughtEvent,denoter的属性是statement // 只要发现了一个意念事件,那个根据原则,就应该将意念事件的关系加到文档末尾 document.getRootElement().addElement(Annotation.THOUGHTEVENT_RELATION); } else { // 为event添加属性和值 denoter.addAttribute("type", typeValue); } } } } // 这部分用来实现判断Time是不是绝对时间 @SuppressWarnings("unchecked") List<Element> time_list = document.selectNodes("//Sentence/Event/Time"); Iterator<Element> timeIter = time_list.iterator(); while (timeIter.hasNext()) { Element time = timeIter.next(); String timeValue = time.getTextTrim(); if (isAbsTime(timeValue)) { time.addAttribute("type", "absTime"); } } try { // 使用XMLWriter写入,可以控制格式,经过调试,发现这种方式会出现乱码,主要是因为Eclipse中xml文件和JFrame中文件编码不一致造成的 OutputFormat format = OutputFormat.createPrettyPrint(); format.setEncoding(EncodingUtil.CHARSET_UTF8); // format.setSuppressDeclaration(true);//这句话会压制xml文件的声明,如果为true,就不打印出声明语句 format.setIndent(true);// 设置缩进 format.setIndent(" ");// 空行方式缩进 format.setNewlines(true);// 设置换行 XMLWriter writer = new XMLWriter(new FileWriterWithEncoding(new File(filePath), EncodingUtil.CHARSET_UTF8), format); writer.write(document); writer.close(); // 使用common的Util包写入 // FileWriterWithEncoding out = new FileWriterWithEncoding(new File(filePath), "UTF-8"); // document.write(out); // out.flush(); // out.close(); } catch (IOException e) { e.printStackTrace(); } }