Java Code Examples for org.apache.commons.io.FileUtils#writeLines()
The following examples show how to use
org.apache.commons.io.FileUtils#writeLines() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AtlasProguardHelper.java From atlas with Apache License 2.0 | 6 votes |
@NotNull private static File generateKeepFile(List<AwbBundle> awbBundles, File dir) throws IOException { KeepConverter refClazzContainer = new KeepConverter(); for (AwbBundle awbBundle : awbBundles) { if (null != awbBundle.getKeepProguardFile() && awbBundle.getKeepProguardFile().exists()) { String json = FileUtils.readFileToString(awbBundle.getKeepProguardFile()); Map<String, ClazzRefInfo> refClazzMap = JSON.parseObject(json, new TypeReference<Map<String, ClazzRefInfo>> () {}); refClazzContainer.addRefClazz(refClazzMap); } else { sLogger.error( "missing " + awbBundle.getKeepProguardFile().getAbsolutePath()); } } File maindexkeep = new File(dir, "maindexkeep.cfg"); FileUtils.writeLines(maindexkeep, refClazzContainer.convertToKeeplines()); return maindexkeep; }
Example 2
Source File: GenericExternalProcessTest.java From proarc with GNU General Public License v3.0 | 6 votes |
@Test public void testEscapePropertiesConfiguration() throws Exception { File confFile = temp.newFile("props.cfg"); FileUtils.writeLines(confFile, Arrays.asList( "input.file.name=RESOLVED", "-1=ERR", "*=ERR2", "1-3=ERR3", ">1=ERR4", "1,2,3=1\\,2\\,3", "escape=-escape $${input.file.name}", "resolve=-resolve ${input.file.name}[0]" )); PropertiesConfiguration conf = new PropertiesConfiguration(confFile); assertEquals("ERR", conf.getString("-1")); assertEquals("ERR2", conf.getString("*")); assertEquals("ERR3", conf.getString("1-3")); assertEquals("ERR4", conf.getString(">1")); assertEquals("1,2,3", conf.getString("1,2,3")); assertEquals("-escape ${input.file.name}", conf.getString("escape")); assertEquals("-resolve RESOLVED[0]", conf.getString("resolve")); }
Example 3
Source File: Test.java From exec-maven-plugin with Apache License 2.0 | 6 votes |
public static void main( String[] args ) throws Exception { if ( args.length == 0 ) { throw new IllegalArgumentException ( "missing output file path" ) ; } List myProperties = new ArrayList(); for ( int i = 0; i < args.length - 1; i++ ) { myProperties.add( "arg." + args[ i + 1 ] ); } Properties systemProperties = System.getProperties(); for ( Iterator it = systemProperties.keySet().iterator(); it.hasNext(); ) { String key = it.next().toString(); if ( key.startsWith( "project." ) ) { myProperties.add( key + "=" + systemProperties.get( key ) ); } } myProperties.add("user.dir=" + systemProperties.get( "user.dir" )); myProperties.add("java.class.path=" + systemProperties.get( "java.class.path" )); Collections.sort( myProperties ); File toFile = new File( args[0] ); toFile.getParentFile().mkdirs(); FileUtils.writeLines( new File( args[0] ), "UTF-8", myProperties ); }
Example 4
Source File: SparkExport.java From DataVec with Apache License 2.0 | 6 votes |
public static void exportCSVLocal(String outputDir, String baseFileName, int numFiles, String delimiter, String quote, JavaRDD<List<Writable>> data) throws Exception { JavaRDD<String> lines = data.map(new WritablesToStringFunction(delimiter, quote)); double[] split = new double[numFiles]; for (int i = 0; i < split.length; i++) split[i] = 1.0 / numFiles; JavaRDD<String>[] splitData = lines.randomSplit(split); int count = 0; for (JavaRDD<String> subset : splitData) { String path = FilenameUtils.concat(outputDir, baseFileName + (count++) + ".csv"); // subset.saveAsTextFile(path); List<String> linesList = subset.collect(); FileUtils.writeLines(new File(path), linesList); } }
Example 5
Source File: Test.java From exec-maven-plugin with Apache License 2.0 | 6 votes |
public static void main( String[] args ) throws Exception { if ( args.length == 0 ) { throw new IllegalArgumentException ( "missing output file path" ) ; } List myProperties = new ArrayList(); for ( int i = 0; i < args.length - 1; i++ ) { myProperties.add( "arg." + args[ i + 1 ] ); } Properties systemProperties = System.getProperties(); for ( Iterator it = systemProperties.keySet().iterator(); it.hasNext(); ) { String key = it.next().toString(); if ( key.startsWith( "project." ) ) { myProperties.add( key + "=" + systemProperties.get( key ) ); } } Collections.sort( myProperties ); File toFile = new File( args[0] ); toFile.getParentFile().mkdirs(); FileUtils.writeLines( new File( args[0] ), "UTF-8", myProperties ); }
Example 6
Source File: KeyFileStore.java From matrix-java-sdk with GNU Affero General Public License v3.0 | 5 votes |
@Override public void store(String key) { try { FileUtils.writeLines(file, charset.name(), Collections.singletonList(key), false); } catch (IOException e) { throw new RuntimeException(e); } }
Example 7
Source File: DeploymentConfigurationFactoryTest.java From flow with Apache License 2.0 | 5 votes |
@Test public void createInitParameters_initParamtersAreSet_tokenDevModePropertiesAreNotSet() throws Exception { FileUtils.writeLines(tokenFile, Arrays.asList("{", "\"pnpm.enable\": true,", "\"require.home.node\": true,", "}")); DeploymentConfiguration config = createConfig(Collections .singletonMap(PARAM_TOKEN_FILE, tokenFile.getPath())); config.getInitParameters().setProperty( InitParameters.SERVLET_PARAMETER_ENABLE_PNPM, Boolean.FALSE.toString()); config.getInitParameters().setProperty( InitParameters.REQUIRE_HOME_NODE_EXECUTABLE, Boolean.FALSE.toString()); config.getInitParameters().setProperty( InitParameters.SERVLET_PARAMETER_DEVMODE_OPTIMIZE_BUNDLE, Boolean.FALSE.toString()); Assert.assertEquals(Boolean.FALSE.toString(), config.getInitParameters() .getProperty(InitParameters.SERVLET_PARAMETER_ENABLE_PNPM)); Assert.assertEquals(Boolean.FALSE.toString(), config.getInitParameters() .getProperty(InitParameters.REQUIRE_HOME_NODE_EXECUTABLE)); Assert.assertEquals(Boolean.FALSE.toString(), config.getInitParameters().getProperty( InitParameters.SERVLET_PARAMETER_DEVMODE_OPTIMIZE_BUNDLE)); }
Example 8
Source File: PatchFieldTool.java From atlas with Apache License 2.0 | 5 votes |
/** * relace const string in dexFile * * @param dexFile * @param outDexFile * @param orgFieldValue * @param newFieldValue * @return */ public static boolean modifyFieldValue(File dexFile, File outDexFile, String orgFieldValue, String newFieldValue) throws IOException, RecognitionException { File smaliFolder = new File(outDexFile.getParentFile(), "smali"); if (smaliFolder.exists()) { FileUtils.deleteDirectory(smaliFolder); } smaliFolder.mkdirs(); boolean disassembled = SmaliUtils.disassembleDexFile(dexFile, smaliFolder); if (disassembled) { Collection<File> smaliFiles = FileUtils.listFiles(smaliFolder, new String[]{"smali"}, true); for (File smaliFile : smaliFiles) { List<String> lines = FileUtils.readLines(smaliFile); for (int index = 0; index < lines.size(); index++) { String line = lines.get(index); String newLine = StringUtils.replace(line, "\"" + orgFieldValue + "\"", "\"" + newFieldValue + "\""); lines.set(index, newLine); } FileUtils.writeLines(smaliFile, lines); } //转换为dex文件 boolean assembled = SmaliUtils.assembleSmaliFile(smaliFolder, outDexFile); if (assembled) { FileUtils.deleteDirectory(smaliFolder); return true; } } return false; }
Example 9
Source File: PCGIOHandler.java From pcgen with GNU Lesser General Public License v2.1 | 5 votes |
public static void write(File partyFile, List<File> characterFiles) { String versionLine = "VERSION:" + PCGenPropBundle.getVersionNumber(); String[] files = new String[characterFiles.size()]; Arrays.setAll(files, i -> FileHelper.findRelativePath(partyFile, characterFiles.get(i))); String filesLine = StringUtils.join(files, ','); try { FileUtils.writeLines(partyFile, "UTF-8", Arrays.asList(versionLine, filesLine)); } catch (IOException ex) { Logging.errorPrint("Could not save the party file: " + partyFile.getAbsolutePath(), ex); } }
Example 10
Source File: DeploymentConfigurationFactoryTest.java From flow with Apache License 2.0 | 5 votes |
@Test public void createInitParameters_readDevModeProperties() throws Exception { FileUtils.writeLines(tokenFile, Arrays.asList("{", "\"pnpm.enable\": true,", "\"require.home.node\": true,", "}")); DeploymentConfiguration config = createConfig(Collections .singletonMap(PARAM_TOKEN_FILE, tokenFile.getPath())); Assert.assertEquals(Boolean.TRUE.toString(), config.getInitParameters() .getProperty(InitParameters.SERVLET_PARAMETER_ENABLE_PNPM)); Assert.assertEquals(Boolean.TRUE.toString(), config.getInitParameters() .getProperty(InitParameters.REQUIRE_HOME_NODE_EXECUTABLE)); }
Example 11
Source File: DeploymentConfigurationFactoryTest.java From flow with Apache License 2.0 | 5 votes |
@Test public void shouldThrow_tokenFileContainsNonExistingFrontendFolderNoNpmFolder() throws Exception { exception.expect(IllegalStateException.class); exception.expectMessage( String.format(DEV_FOLDER_MISSING_MESSAGE, "frontend")); FileUtils.writeLines(tokenFile, Arrays.asList("{", "\"productionMode\": false,", "\"frontendFolder\": \"frontend\"", "}")); createConfig(Collections.singletonMap(PARAM_TOKEN_FILE, tokenFile.getPath())); }
Example 12
Source File: TempMetadataBuilder.java From kylin-on-parquet-v2 with Apache License 2.0 | 5 votes |
private void overrideEngineTypeAndStorageType(String tempMetadataDir, Pair<Integer, Integer> typePair, List<String> includeFiles) throws IOException { int engineType = typePair.getFirst(); int storageType = typePair.getSecond(); if (debug) { logger.info("Override engine type to be {}", engineType); logger.info("Override storage type to be {}", storageType); } // re-write cube_desc/*.json File cubeDescDir = new File(tempMetadataDir, "cube_desc"); File[] cubeDescFiles = cubeDescDir.listFiles(); if (cubeDescFiles == null) return; for (File f : cubeDescFiles) { if (includeFiles != null && !includeFiles.contains(f.getName())) { continue; } if (debug) { logger.info("Process override {}", f.getCanonicalPath()); } List<String> lines = FileUtils.readLines(f, Charsets.UTF_8); for (int i = 0, n = lines.size(); i < n; i++) { String l = lines.get(i); if (l.contains("\"engine_type\"")) { lines.set(i, " \"engine_type\" : " + engineType + ","); } if (l.contains("\"storage_type\"")) { lines.set(i, " \"storage_type\" : " + storageType + ","); } } FileUtils.writeLines(f, "UTF-8", lines); } }
Example 13
Source File: LineReaderTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testLineReader() throws Exception { File tmpdir = testDir.newFolder(); if (tmpdir.exists()) tmpdir.delete(); tmpdir.mkdir(); File tmp1 = new File(FilenameUtils.concat(tmpdir.getPath(), "tmp1.txt")); File tmp2 = new File(FilenameUtils.concat(tmpdir.getPath(), "tmp2.txt")); File tmp3 = new File(FilenameUtils.concat(tmpdir.getPath(), "tmp3.txt")); FileUtils.writeLines(tmp1, Arrays.asList("1", "2", "3")); FileUtils.writeLines(tmp2, Arrays.asList("4", "5", "6")); FileUtils.writeLines(tmp3, Arrays.asList("7", "8", "9")); InputSplit split = new FileSplit(tmpdir); RecordReader reader = new LineRecordReader(); reader.initialize(split); int count = 0; List<List<Writable>> list = new ArrayList<>(); while (reader.hasNext()) { List<Writable> l = reader.next(); assertEquals(1, l.size()); list.add(l); count++; } assertEquals(9, count); }
Example 14
Source File: MyPdfUtils.java From spring-boot with Apache License 2.0 | 5 votes |
/** * 检查文件夹内的文件是否被加密,可以递归 * * @param srcPdfFileDir 待检查的文件夹 * @throws java.io.IOException */ public static void findEncryptPdf(File srcPdfFileDir) throws IOException { if (tempList == null) tempList = new ArrayList(); tempList.clear();//重新初始化,否则静态变量全局不变 findEncryptPdf0(srcPdfFileDir); if (tempList.size() != 0) FileUtils.writeLines(new File(srcPdfFileDir.getParent() + File.separator + "encrypt.txt"), StandardCharsets.UTF_8.name(), tempList); log.info(" finished!"); }
Example 15
Source File: AtlasBuilder.java From atlas with Apache License 2.0 | 5 votes |
public void writeLines(File file,List<String>lines,boolean append){ Set<String>mergeLines = new LinkedHashSet<>(); try { List<String> readLines = FileUtils.readLines(file); mergeLines.addAll(readLines); mergeLines.addAll(lines); FileUtils.writeLines(file,mergeLines,append); } catch (IOException e) { e.printStackTrace(); } }
Example 16
Source File: Ignore.java From floobits-intellij with Apache License 2.0 | 5 votes |
public static void writeDefaultIgnores(IContext context) { Flog.log("Creating default ignores."); String path = FilenameUtils.concat(context.colabDir, ".flooignore"); try { File f = new File(path); if (f.exists()) { return; } FileUtils.writeLines(f, DEFAULT_IGNORES); } catch (IOException e) { Flog.error(e); } }
Example 17
Source File: TestDatastreamServer.java From brooklin with BSD 2-Clause "Simplified" License | 4 votes |
@Test(enabled = false) public void testNodeUpRebalanceTwoDatastreamsSimpleStrategy() throws Exception { _datastreamCluster = initializeTestDatastreamServerWithFileConnector(2, LOAD_BALANCING_STRATEGY_FACTORY); _datastreamCluster.startupServer(0); List<DatastreamServer> servers = _datastreamCluster.getAllDatastreamServers(); Assert.assertEquals(servers.size(), 2); Assert.assertNotNull(servers.get(0)); DatastreamServer server1 = servers.get(0); Path tempFile1 = Files.createTempFile("testFile1", ""); String fileName1 = tempFile1.toAbsolutePath().toString(); Path tempFile2 = Files.createTempFile("testFile2", ""); String fileName2 = tempFile2.toAbsolutePath().toString(); Datastream fileDatastream1 = createFileDatastream(fileName1, 1); Datastream fileDatastream2 = createFileDatastream(fileName2, 1); int totalEvents = 10; List<String> eventsWritten1 = TestUtils.generateStrings(totalEvents); List<String> eventsWritten2 = TestUtils.generateStrings(totalEvents); FileUtils.writeLines(new File(fileName1), eventsWritten1); FileUtils.writeLines(new File(fileName2), eventsWritten2); List<String> eventsReceived1 = readFileDatastreamEvents(fileDatastream1, totalEvents); List<String> eventsReceived2 = readFileDatastreamEvents(fileDatastream2, totalEvents); LOG.info("(1) Events Received " + eventsReceived1); LOG.info("(1) Events Written to file " + eventsWritten1); LOG.info("(2) Events Received " + eventsReceived2); LOG.info("(2) Events Written to file " + eventsWritten2); Assert.assertTrue(eventsReceived1.containsAll(eventsWritten1)); Assert.assertTrue(eventsReceived2.containsAll(eventsWritten2)); // Ensure 1st instance was assigned both tasks String cluster = _datastreamCluster.getDatastreamServerProperties().get(0).getProperty(CONFIG_CLUSTER_NAME); ZkClient zkclient = new ZkClient(_datastreamCluster.getZkConnection()); String instance1 = server1.getCoordinator().getInstanceName(); String assignmentPath = KeyBuilder.instanceAssignments(cluster, instance1); List<String> assignments = zkclient.getChildren(assignmentPath); Assert.assertEquals(assignments.size(), 2); // Start 2nd instance and wait until it shows up in ZK _datastreamCluster.startupServer(1); DatastreamServer server2 = servers.get(1); Assert.assertNotNull(server2); String instancesPath = KeyBuilder.liveInstances(cluster); Assert.assertTrue(PollUtils.poll(() -> zkclient.getChildren(instancesPath).size() == 2, 100, 5000)); // Ensure each instance gets one task assignmentPath = KeyBuilder.instanceAssignments(cluster, instance1); Assert.assertTrue(PollUtils.poll((path) -> zkclient.getChildren(path).size() == 1, 100, 10000, assignmentPath)); LOG.info("Instance1 got task: " + zkclient.getChildren(assignmentPath)); String instance2 = server2.getCoordinator().getInstanceName(); assignmentPath = KeyBuilder.instanceAssignments(cluster, instance2); Assert.assertTrue(PollUtils.poll((path) -> zkclient.getChildren(path).size() == 1, 100, 10000, assignmentPath)); LOG.info("Instance2 got task: " + zkclient.getChildren(assignmentPath)); // Wait 3 seconds to allow the connectors to stop the handler and flush the checkpoints // Automatic flush period is 1 second by default. Thread.sleep(3000); eventsWritten1 = TestUtils.generateStrings(totalEvents); eventsWritten2 = TestUtils.generateStrings(totalEvents); FileUtils.writeLines(new File(fileName1), eventsWritten1, true /* append */); FileUtils.writeLines(new File(fileName2), eventsWritten2, true /* append */); // Read twice as many events because KafkaTestUtils.readTopic always seeks // to the beginning of the topic such that previous events are included eventsReceived1 = readFileDatastreamEvents(fileDatastream1, totalEvents * 2); eventsReceived2 = readFileDatastreamEvents(fileDatastream2, totalEvents * 2); LOG.info("(1-NEW) Events Received " + eventsReceived1); LOG.info("(1-NEW) Events Written to file " + eventsWritten1); LOG.info("(2-NEW) Events Received " + eventsReceived2); LOG.info("(2-NEW) Events Written to file " + eventsWritten2); Assert.assertTrue(eventsReceived1.containsAll(eventsWritten1)); Assert.assertTrue(eventsReceived2.containsAll(eventsWritten2)); }
Example 18
Source File: UploadDirectoryScanner.java From dddsample-core with MIT License | 4 votes |
private void writeRejectedLinesToFile(final String filename, final List<String> rejectedLines) throws IOException { FileUtils.writeLines( new File(parseFailureDirectory, filename), rejectedLines ); }
Example 19
Source File: TestFileBasedIPList.java From hadoop with Apache License 2.0 | 4 votes |
public static void createFileWithEntries(String fileName, String[] ips) throws IOException { FileUtils.writeLines(new File(fileName), Arrays.asList(ips)); }
Example 20
Source File: BundleInfoUtils.java From atlas with Apache License 2.0 | 4 votes |
private static Map<String, BundleInfo> getBundleInfoMap(AppVariantContext appVariantContext) throws IOException { File baseBunfleInfoFile = new File(appVariantContext.getScope() .getGlobalScope() .getProject() .getProjectDir(), "bundleBaseInfoFile.json"); //Use the file replacement in the plug-in Map<String, BundleInfo> bundleFileMap = Maps.newHashMap(); if (null != baseBunfleInfoFile && baseBunfleInfoFile.exists() && baseBunfleInfoFile.canRead()) { String bundleBaseInfo = FileUtils.readFileToString(baseBunfleInfoFile, "utf-8"); bundleFileMap = JSON.parseObject(bundleBaseInfo, new TypeReference<Map<String, BundleInfo>>() { }); } List<AwbBundle> awbBundles = AtlasBuildContext.androidDependencyTrees.get( appVariantContext.getVariantData().getName()).getAwbBundles(); List<String> duplicatedBundleInfo = new ArrayList<>(); Set<String>pkgNames = new HashSet<>(); for (AwbBundle awbBundle : awbBundles) { pkgNames.add(awbBundle.getPackageName()); String name = awbBundle.getResolvedCoordinates().getArtifactId(); File bundleBaseInfoFile = new File(awbBundle.getAndroidLibrary().getFolder(), "bundleBaseInfoFile.json"); if (bundleBaseInfoFile.exists()) { String json = FileUtils.readFileToString(bundleBaseInfoFile, "utf-8"); BundleInfo bundleInfo = JSON.parseObject(json, BundleInfo.class); if (bundleFileMap.containsKey(name)) { appVariantContext.getProject().getLogger().error( "bundleBaseInfoFile>>>" + name + " has declared bundleBaseInfoFile"); duplicatedBundleInfo.add(name); for (String dependency:bundleInfo.getDependency()) { if (!bundleFileMap.get(name).getDependency().contains(dependency)) bundleFileMap.get(name).getDependency().add(dependency); } }else { bundleFileMap.put(name,bundleInfo); } } } bundleFileMap.values().forEach(bundleInfo -> { List<String>removedBundles = new ArrayList<>(); List<String>deps = bundleInfo.getDependency(); for (String s:deps){ if (!pkgNames.contains(s)){ removedBundles.add(s); } } deps.removeAll(removedBundles); }); if (duplicatedBundleInfo.size() > 0) { FileUtils.writeLines( new File(appVariantContext.getProject().getBuildDir(), "outputs/warning-dupbundleinfo.properties"), duplicatedBundleInfo); } return bundleFileMap; }