org.apache.flink.util.FileUtils Java Examples
The following examples show how to use
org.apache.flink.util.FileUtils.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FileCacheDirectoriesTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testDirectoryDownloadedFromBlob() throws Exception { JobID jobID = new JobID(); ExecutionAttemptID attemptID = new ExecutionAttemptID(); final String fileName = "test_file"; // copy / create the file final DistributedCache.DistributedCacheEntry entry = new DistributedCache.DistributedCacheEntry( fileName, false, InstantiationUtil.serializeObject(permanentBlobKey), true); Future<Path> copyResult = fileCache.createTmpFile(fileName, entry, jobID, attemptID); final Path dstPath = copyResult.get(); final FileSystem fs = dstPath.getFileSystem(); final FileStatus fileStatus = fs.getFileStatus(dstPath); assertTrue(fileStatus.isDir()); final Path cacheFile = new Path(dstPath, "cacheFile"); assertTrue(fs.exists(cacheFile)); final String actualContent = FileUtils.readFileUtf8(new File(cacheFile.getPath())); assertEquals(testFileContent, actualContent); }
Example #2
Source File: FileCacheDirectoriesTest.java From flink with Apache License 2.0 | 6 votes |
private void testDirectoryDownloaded(DistributedCache.DistributedCacheEntry entry) throws Exception { JobID jobID = new JobID(); ExecutionAttemptID attemptID = new ExecutionAttemptID(); // copy / create the file final String fileName = "test_file"; Future<Path> copyResult = fileCache.createTmpFile(fileName, entry, jobID, attemptID); final Path dstPath = copyResult.get(); final FileSystem fs = dstPath.getFileSystem(); final FileStatus fileStatus = fs.getFileStatus(dstPath); assertTrue(fileStatus.isDir()); final Path cacheFile = new Path(dstPath, "cacheFile"); assertTrue(fs.exists(cacheFile)); final String actualContent = FileUtils.readFileUtf8(new File(cacheFile.getPath())); assertEquals(testFileContent, actualContent); }
Example #3
Source File: PageRankITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Before public void before() throws Exception{ File resultFile = tempFolder.newFile(); //Delete file because the Scala API does not respect WriteMode set by the configuration resultFile.delete(); resultPath = resultFile.toURI().toString(); File verticesFile = tempFolder.newFile(); FileUtils.writeFileUtf8(verticesFile, PageRankData.VERTICES); File edgesFile = tempFolder.newFile(); FileUtils.writeFileUtf8(edgesFile, PageRankData.EDGES); verticesPath = verticesFile.toURI().toString(); edgesPath = edgesFile.toURI().toString(); }
Example #4
Source File: AbstractTaskManagerFileHandlerTest.java From flink with Apache License 2.0 | 6 votes |
/** * Tests that the {@link AbstractTaskManagerFileHandler} serves the requested file. */ @Test public void testFileServing() throws Exception { final Time cacheEntryDuration = Time.milliseconds(1000L); final Queue<CompletableFuture<TransientBlobKey>> requestFileUploads = new ArrayDeque<>(1); requestFileUploads.add(CompletableFuture.completedFuture(transientBlobKey1)); final TestTaskManagerFileHandler testTaskManagerFileHandler = createTestTaskManagerFileHandler(cacheEntryDuration, requestFileUploads, EXPECTED_TASK_MANAGER_ID); final File outputFile = temporaryFolder.newFile(); final TestingChannelHandlerContext testingContext = new TestingChannelHandlerContext(outputFile); testTaskManagerFileHandler.respondToRequest( testingContext, HTTP_REQUEST, handlerRequest, null); assertThat(outputFile.length(), is(greaterThan(0L))); assertThat(FileUtils.readFileUtf8(outputFile), is(equalTo(fileContent1))); }
Example #5
Source File: PageRankITCase.java From flink with Apache License 2.0 | 6 votes |
@Before public void before() throws Exception{ File resultFile = tempFolder.newFile(); //Delete file because the Scala API does not respect WriteMode set by the configuration resultFile.delete(); resultPath = resultFile.toURI().toString(); File verticesFile = tempFolder.newFile(); FileUtils.writeFileUtf8(verticesFile, PageRankData.VERTICES); File edgesFile = tempFolder.newFile(); FileUtils.writeFileUtf8(edgesFile, PageRankData.EDGES); verticesPath = verticesFile.toURI().toString(); edgesPath = edgesFile.toURI().toString(); }
Example #6
Source File: IncrementalSSSPITCase.java From flink with Apache License 2.0 | 6 votes |
@Before public void before() throws Exception { resultPath = tempFolder.newFile().toURI().toString(); File verticesFile = tempFolder.newFile(); FileUtils.writeFileUtf8(verticesFile, IncrementalSSSPData.VERTICES); File edgesFile = tempFolder.newFile(); FileUtils.writeFileUtf8(edgesFile, IncrementalSSSPData.EDGES); File edgesInSSSPFile = tempFolder.newFile(); FileUtils.writeFileUtf8(edgesInSSSPFile, IncrementalSSSPData.EDGES_IN_SSSP); verticesPath = verticesFile.toURI().toString(); edgesPath = edgesFile.toURI().toString(); edgesInSSSPPath = edgesInSSSPFile.toURI().toString(); }
Example #7
Source File: IOManager.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Close method, marks the I/O manager as closed * and removed all temporary files. */ public void shutdown() { // remove all of our temp directories for (File path : paths) { try { if (path != null) { if (path.exists()) { FileUtils.deleteDirectory(path); LOG.info("I/O manager removed spill file directory {}", path.getAbsolutePath()); } } } catch (Throwable t) { LOG.error("IOManager failed to properly clean up temp file directory: " + path, t); } } }
Example #8
Source File: JsonRowSchemaConverterTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testComplexSchema() throws Exception { final URL url = getClass().getClassLoader().getResource("complex-schema.json"); Objects.requireNonNull(url); final String schema = FileUtils.readFileUtf8(new File(url.getFile())); final TypeInformation<?> result = JsonRowSchemaConverter.convert(schema); final TypeInformation<?> expected = Types.ROW_NAMED( new String[] {"fn", "familyName", "additionalName", "tuples", "honorificPrefix", "url", "email", "tel", "sound", "org"}, Types.STRING, Types.STRING, Types.BOOLEAN, Types.ROW(Types.BIG_DEC, Types.STRING, Types.STRING, Types.STRING), Types.OBJECT_ARRAY(Types.STRING), Types.STRING, Types.ROW_NAMED(new String[] {"type", "value"}, Types.STRING, Types.STRING), Types.ROW_NAMED(new String[] {"type", "value"}, Types.BIG_DEC, Types.STRING), Types.VOID, Types.ROW_NAMED(new String[] {"organizationUnit"}, Types.ROW())); assertEquals(expected, result); }
Example #9
Source File: AbstractBlobCache.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void close() throws IOException { cancelCleanupTask(); if (shutdownRequested.compareAndSet(false, true)) { log.info("Shutting down BLOB cache"); // Clean up the storage directory try { FileUtils.deleteDirectory(storageDir); } finally { // Remove shutdown hook to prevent resource leaks ShutdownHookUtil.removeShutdownHook(shutdownHook, getClass().getSimpleName(), log); } } }
Example #10
Source File: AbstractBlobCache.java From flink with Apache License 2.0 | 6 votes |
@Override public void close() throws IOException { cancelCleanupTask(); if (shutdownRequested.compareAndSet(false, true)) { log.info("Shutting down BLOB cache"); // Clean up the storage directory try { FileUtils.deleteDirectory(storageDir); } finally { // Remove shutdown hook to prevent resource leaks ShutdownHookUtil.removeShutdownHook(shutdownHook, getClass().getSimpleName(), log); } } }
Example #11
Source File: SpilledBufferOrEventSequenceTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testCleanup() { try { ByteBuffer data = ByteBuffer.allocate(157); data.order(ByteOrder.LITTLE_ENDIAN); FileUtils.writeCompletely(fileChannel, data); fileChannel.position(54); SpilledBufferOrEventSequence seq = new SpilledBufferOrEventSequence(tempFile, fileChannel, buffer, pageSize); seq.open(); seq.cleanup(); assertFalse(fileChannel.isOpen()); assertFalse(tempFile.exists()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example #12
Source File: PythonEnvUtils.java From flink with Apache License 2.0 | 5 votes |
public void run() { p.destroyForcibly(); if (pyFileDir != null) { File pyDir = new File(pyFileDir); FileUtils.deleteDirectoryQuietly(pyDir); } }
Example #13
Source File: BucketStateSerializerTest.java From flink with Apache License 2.0 | 5 votes |
private static BucketState<String> readBucketStateFromTemplate(final String scenarioName, final int version) throws IOException { final java.nio.file.Path scenarioPath = getResourcePath(scenarioName, version); // clear the scenario files first FileUtils.deleteDirectory(scenarioPath.toFile()); // prepare the scenario files FileUtils.copy(new Path(scenarioPath.toString() + "-template"), new Path(scenarioPath.toString()), false); return readBucketState(scenarioName, version); }
Example #14
Source File: FileCacheReadsFromBlobTest.java From flink with Apache License 2.0 | 5 votes |
@Override public File getFile(JobID jobId, PermanentBlobKey key) throws IOException { if (key.equals(permanentBlobKey)) { File f = temporaryFolder.newFile("cacheFile"); FileUtils.writeFileUtf8(f, testFileContent); return f; } else { throw new IllegalArgumentException("This service contains only entry for " + permanentBlobKey); } }
Example #15
Source File: ClassPathPackagedProgramRetrieverTest.java From flink with Apache License 2.0 | 5 votes |
@BeforeClass public static void init() throws IOException { final String textFileName = "test.txt"; final String userDirHasEntryClassName = "_test_user_dir_has_entry_class"; final String userDirHasNotEntryClassName = "_test_user_dir_has_not_entry_class"; userDirHasEntryClass = JOB_DIRS.newFolder(userDirHasEntryClassName); final Path userJarPath = userDirHasEntryClass.toPath().resolve(TestJobInfo.JOB_JAR_PATH.toFile().getName()); final Path userLibJarPath = userDirHasEntryClass.toPath().resolve(TestJobInfo.JOB_LIB_JAR_PATH.toFile().getName()); userDirHasNotEntryClass = JOB_DIRS.newFolder(userDirHasNotEntryClassName); //create files Files.copy(TestJobInfo.JOB_JAR_PATH, userJarPath); Files.copy(TestJobInfo.JOB_LIB_JAR_PATH, userLibJarPath); Files.createFile(userDirHasEntryClass.toPath().resolve(textFileName)); Files.copy(TestJobInfo.JOB_LIB_JAR_PATH, userDirHasNotEntryClass.toPath().resolve(TestJobInfo.JOB_LIB_JAR_PATH.toFile().getName())); Files.createFile(userDirHasNotEntryClass.toPath().resolve(textFileName)); final Path workingDirectory = FileUtils.getCurrentWorkingDirectory(); Arrays.asList(userJarPath, userLibJarPath) .stream() .map(path -> FileUtils.relativizePath(workingDirectory, path)) .map(FunctionUtils.uncheckedFunction(FileUtils::toURL)) .forEach(expectedURLs::add); }
Example #16
Source File: FileCacheDirectoriesTest.java From flink with Apache License 2.0 | 5 votes |
@Override public File getFile(JobID jobId, PermanentBlobKey key) throws IOException { if (key.equals(permanentBlobKey)) { final java.nio.file.Path directory = temporaryFolder.newFolder("zipArchive").toPath(); final java.nio.file.Path containedFile = directory.resolve("cacheFile"); Files.copy(new ByteArrayInputStream(testFileContent.getBytes(StandardCharsets.UTF_8)), containedFile); Path zipPath = FileUtils.compressDirectory(new Path(directory.toString()), new Path(directory + ".zip")); return new File(zipPath.getPath()); } else { throw new IllegalArgumentException("This service contains only entry for " + permanentBlobKey); } }
Example #17
Source File: StreamWindowSQLExample.java From flink-learning with Apache License 2.0 | 5 votes |
/** * Creates a temporary file with the contents and returns the absolute path. */ private static String createTempFile(String contents) throws IOException { File tempFile = File.createTempFile("orders", ".csv"); tempFile.deleteOnExit(); FileUtils.writeFileUtf8(tempFile, contents); return tempFile.toURI().toString(); }
Example #18
Source File: SingleSourceShortestPathsITCase.java From flink with Apache License 2.0 | 5 votes |
@Before public void before() throws Exception { resultPath = tempFolder.newFile().toURI().toString(); File edgesFile = tempFolder.newFile(); FileUtils.writeFileUtf8(edgesFile, SingleSourceShortestPathsData.EDGES); edgesPath = edgesFile.toURI().toString(); }
Example #19
Source File: HiveCatalogITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testReadWriteCsv() throws Exception { // similar to CatalogTableITCase::testReadWriteCsvUsingDDL but uses HiveCatalog EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build(); TableEnvironment tableEnv = TableEnvironment.create(settings); tableEnv.getConfig().getConfiguration().setInteger(TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 1); tableEnv.registerCatalog("myhive", hiveCatalog); tableEnv.useCatalog("myhive"); String srcPath = this.getClass().getResource("/csv/test3.csv").getPath(); tableEnv.executeSql("CREATE TABLE src (" + "price DECIMAL(10, 2),currency STRING,ts6 TIMESTAMP(6),ts AS CAST(ts6 AS TIMESTAMP(3)),WATERMARK FOR ts AS ts) " + String.format("WITH ('connector.type' = 'filesystem','connector.path' = 'file://%s','format.type' = 'csv')", srcPath)); String sinkPath = new File(tempFolder.newFolder(), "csv-order-sink").toURI().toString(); tableEnv.executeSql("CREATE TABLE sink (" + "window_end TIMESTAMP(3),max_ts TIMESTAMP(6),counter BIGINT,total_price DECIMAL(10, 2)) " + String.format("WITH ('connector.type' = 'filesystem','connector.path' = '%s','format.type' = 'csv')", sinkPath)); TableEnvUtil.execInsertSqlAndWaitResult(tableEnv, "INSERT INTO sink " + "SELECT TUMBLE_END(ts, INTERVAL '5' SECOND),MAX(ts6),COUNT(*),MAX(price) FROM src " + "GROUP BY TUMBLE(ts, INTERVAL '5' SECOND)"); String expected = "2019-12-12 00:00:05.0,2019-12-12 00:00:04.004001,3,50.00\n" + "2019-12-12 00:00:10.0,2019-12-12 00:00:06.006001,2,5.33\n"; assertEquals(expected, FileUtils.readFileUtf8(new File(new URI(sinkPath)))); }
Example #20
Source File: ConfigOptionsDocGeneratorTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCommonOptions() throws IOException, ClassNotFoundException { final String projectRootDir = System.getProperty("rootDir"); final String outputDirectory = TMP.newFolder().getAbsolutePath(); final OptionsClassLocation[] locations = new OptionsClassLocation[] { new OptionsClassLocation("flink-docs", TestCommonOptions.class.getPackage().getName()) }; ConfigOptionsDocGenerator.generateCommonSection(projectRootDir, outputDirectory, locations, "src/test/java"); Formatter formatter = new HtmlFormatter(); String expected = "<table class=\"table table-bordered\">\n" + " <thead>\n" + " <tr>\n" + " <th class=\"text-left\" style=\"width: 20%\">Key</th>\n" + " <th class=\"text-left\" style=\"width: 15%\">Default</th>\n" + " <th class=\"text-left\" style=\"width: 65%\">Description</th>\n" + " </tr>\n" + " </thead>\n" + " <tbody>\n" + " <tr>\n" + " <td><h5>" + TestCommonOptions.COMMON_POSITIONED_OPTION.key() + "</h5></td>\n" + " <td style=\"word-wrap: break-word;\">" + TestCommonOptions.COMMON_POSITIONED_OPTION.defaultValue() + "</td>\n" + " <td>" + formatter.format(TestCommonOptions.COMMON_POSITIONED_OPTION.description()) + "</td>\n" + " </tr>\n" + " <tr>\n" + " <td><h5>" + TestCommonOptions.COMMON_OPTION.key() + "</h5></td>\n" + " <td style=\"word-wrap: break-word;\">" + TestCommonOptions.COMMON_OPTION.defaultValue() + "</td>\n" + " <td>" + formatter.format(TestCommonOptions.COMMON_OPTION.description()) + "</td>\n" + " </tr>\n" + " </tbody>\n" + "</table>\n"; String output = FileUtils.readFile(Paths.get(outputDirectory, ConfigOptionsDocGenerator.COMMON_SECTION_FILE_NAME).toFile(), StandardCharsets.UTF_8.name()); assertEquals(expected, output); }
Example #21
Source File: RocksDBIncrementalRestoreOperation.java From flink with Apache License 2.0 | 5 votes |
private void cleanUpPathQuietly(@Nonnull Path path) { try { FileUtils.deleteDirectory(path.toFile()); } catch (IOException ex) { LOG.warn("Failed to clean up path " + path, ex); } }
Example #22
Source File: FileCache.java From flink with Apache License 2.0 | 5 votes |
@Override public Path call() throws IOException { final File file = blobService.getFile(jobID, blobKey); if (isDirectory) { Path directory = FileUtils.expandDirectory(new Path(file.getAbsolutePath()), target); return directory; } else { //noinspection ResultOfMethodCallIgnored file.setExecutable(isExecutable); return Path.fromLocalFile(file); } }
Example #23
Source File: AbstractTaskManagerFileHandlerTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Tests that file cache entries expire. */ @Test public void testFileCacheExpiration() throws Exception { final Time cacheEntryDuration = Time.milliseconds(5L); final File outputFile = runFileCachingTest(cacheEntryDuration, cacheEntryDuration); assertThat(outputFile.length(), is(greaterThan(0L))); assertThat(FileUtils.readFileUtf8(outputFile), is(equalTo(fileContent2))); }
Example #24
Source File: LocalFileSystemTest.java From flink with Apache License 2.0 | 5 votes |
/** * Test that {@link FileUtils#deletePathIfEmpty(FileSystem, Path)} deletes the path if it is * empty. A path can only be empty if it is a directory which does not contain any * files/directories. */ @Test public void testDeletePathIfEmpty() throws IOException { File file = temporaryFolder.newFile(); File directory = temporaryFolder.newFolder(); File directoryFile = new File(directory, UUID.randomUUID().toString()); assertTrue(directoryFile.createNewFile()); Path filePath = new Path(file.toURI()); Path directoryPath = new Path(directory.toURI()); Path directoryFilePath = new Path(directoryFile.toURI()); FileSystem fs = FileSystem.getLocalFileSystem(); // verify that the files have been created assertTrue(fs.exists(filePath)); assertTrue(fs.exists(directoryFilePath)); // delete the single file assertFalse(FileUtils.deletePathIfEmpty(fs, filePath)); assertTrue(fs.exists(filePath)); // try to delete the non-empty directory assertFalse(FileUtils.deletePathIfEmpty(fs, directoryPath)); assertTrue(fs.exists(directoryPath)); // delete the file contained in the directory assertTrue(fs.delete(directoryFilePath, false)); // now the deletion should work assertTrue(FileUtils.deletePathIfEmpty(fs, directoryPath)); assertFalse(fs.exists(directoryPath)); }
Example #25
Source File: EuclideanGraphWeighingITCase.java From flink with Apache License 2.0 | 5 votes |
@Before public void before() throws Exception { resultPath = tempFolder.newFile().toURI().toString(); File verticesFile = tempFolder.newFile(); FileUtils.writeFileUtf8(verticesFile, EuclideanGraphData.VERTICES); File edgesFile = tempFolder.newFile(); FileUtils.writeFileUtf8(edgesFile, EuclideanGraphData.EDGES); verticesPath = verticesFile.toURI().toString(); edgesPath = edgesFile.toURI().toString(); }
Example #26
Source File: FileUploadHandlerTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testUploadDirectoryRegeneration() throws Exception { OkHttpClient client = createOkHttpClientWithNoTimeouts(); MultipartUploadResource.MultipartFileHandler fileHandler = MULTIPART_UPLOAD_RESOURCE.getFileHandler(); FileUtils.deleteDirectory(MULTIPART_UPLOAD_RESOURCE.getUploadDirectory().toFile()); Request fileRequest = buildFileRequest(fileHandler.getMessageHeaders().getTargetRestEndpointURL()); try (Response response = client.newCall(fileRequest).execute()) { assertEquals(fileHandler.getMessageHeaders().getResponseStatusCode().code(), response.code()); } verifyNoFileIsRegisteredToDeleteOnExitHook(); }
Example #27
Source File: AsynchronousFileIOChannel.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public void write() throws IOException { try { FileUtils.writeCompletely(this.channel.fileChannel, this.segment.wrap(0, this.segment.size())); } catch (NullPointerException npex) { throw new IOException("Memory segment has been released."); } }
Example #28
Source File: MusicProfilesITCase.java From flink with Apache License 2.0 | 5 votes |
@Before public void before() throws Exception { topSongsResultPath = tempFolder.newFile().toURI().toString(); communitiesResultPath = tempFolder.newFile().toURI().toString(); File tripletsFile = tempFolder.newFile(); FileUtils.writeFileUtf8(tripletsFile, MusicProfilesData.USER_SONG_TRIPLETS); tripletsPath = tripletsFile.toURI().toString(); File mismatchesFile = tempFolder.newFile(); FileUtils.writeFileUtf8(mismatchesFile, MusicProfilesData.MISMATCHES); mismatchesPath = mismatchesFile.toURI().toString(); }
Example #29
Source File: BlobServer.java From flink with Apache License 2.0 | 5 votes |
/** * Removes all BLOBs from local and HA store belonging to the given job ID. * * @param jobId * ID of the job this blob belongs to * @param cleanupBlobStoreFiles * True if the corresponding blob store files shall be cleaned up as well. Otherwise false. * * @return <tt>true</tt> if the job directory is successfully deleted or non-existing; * <tt>false</tt> otherwise */ public boolean cleanupJob(JobID jobId, boolean cleanupBlobStoreFiles) { checkNotNull(jobId); final File jobDir = new File(BlobUtils.getStorageLocationPath(storageDir.getAbsolutePath(), jobId)); readWriteLock.writeLock().lock(); try { // delete locally boolean deletedLocally = false; try { FileUtils.deleteDirectory(jobDir); // NOTE: Instead of going through blobExpiryTimes, keep lingering entries - they // will be cleaned up by the timer task which tolerates non-existing files // If inserted again with the same IDs (via put()), the TTL will be updated // again. deletedLocally = true; } catch (IOException e) { LOG.warn("Failed to locally delete BLOB storage directory at " + jobDir.getAbsolutePath(), e); } // delete in HA blob store files final boolean deletedHA = !cleanupBlobStoreFiles || blobStore.deleteAll(jobId); return deletedLocally && deletedHA; } finally { readWriteLock.writeLock().unlock(); } }
Example #30
Source File: PythonEnvUtils.java From flink with Apache License 2.0 | 5 votes |
private static void addToPythonPath(PythonEnvironment env, List<Path> pythonFiles) throws IOException { List<String> pythonPathList = new ArrayList<>(); Path tmpDirPath = new Path(env.tempDirectory); for (Path pythonFile : pythonFiles) { String sourceFileName = pythonFile.getName(); // add random UUID parent directory to avoid name conflict. Path targetPath = new Path( tmpDirPath, String.join(File.separator, UUID.randomUUID().toString(), sourceFileName)); if (!pythonFile.getFileSystem().isDistributedFS()) { // if the path is local file, try to create symbolic link. new File(targetPath.getParent().toString()).mkdir(); createSymbolicLink( Paths.get(new File(pythonFile.getPath()).getAbsolutePath()), Paths.get(targetPath.toString())); } else { try { FileUtils.copy(pythonFile, targetPath, true); } catch (Exception e) { LOG.error("Error occurred when copying {} to {}, skipping...", pythonFile, targetPath, e); continue; } } if (Files.isRegularFile(Paths.get(targetPath.toString()).toRealPath()) && sourceFileName.endsWith(".py")) { // add the parent directory of .py file itself to PYTHONPATH pythonPathList.add(targetPath.getParent().toString()); } else { pythonPathList.add(targetPath.toString()); } } if (env.pythonPath != null && !env.pythonPath.isEmpty()) { pythonPathList.add(env.pythonPath); } env.pythonPath = String.join(File.pathSeparator, pythonPathList); }