org.apache.hadoop.fs.permission.FsAction Scala Examples
The following examples show how to use org.apache.hadoop.fs.permission.FsAction.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: DFSJarStore.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.jarstore.dfs import java.io.{InputStream, OutputStream} import org.apache.gearpump.util.Constants import org.apache.gearpump.jarstore.JarStore import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import com.typesafe.config.Config import org.apache.hadoop.fs.permission.{FsAction, FsPermission} override def getFile(fileName: String): InputStream = { val filePath = new Path(rootPath, fileName) val fs = filePath.getFileSystem(new Configuration()) fs.open(filePath) } private def createDirIfNotExists(path: Path): Unit = { val fs = path.getFileSystem(new Configuration()) if (!fs.exists(path)) { fs.mkdirs(path, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)) } } }
Example 2
Source File: packageSuite.scala From kyuubi with Apache License 2.0 | 5 votes |
package yaooqinn.kyuubi.yarn import org.apache.hadoop.fs.permission.FsAction import org.apache.spark.SparkFunSuite class packageSuite extends SparkFunSuite { test("yarn package object test") { assert(KYUUBI_YARN_APP_NAME === "KYUUBI SERVER") assert(KYUUBI_YARN_APP_TYPE === "KYUUBI") assert(KYUUBI_STAGING === ".kyuubiStaging") assert(STAGING_DIR_PERMISSION.getUserAction === FsAction.ALL) assert(STAGING_DIR_PERMISSION.getGroupAction === FsAction.NONE) assert(STAGING_DIR_PERMISSION.getOtherAction === FsAction.NONE) assert(APP_FILE_PERMISSION.getUserAction === FsAction.READ_WRITE) assert(APP_FILE_PERMISSION.getGroupAction === FsAction.READ) assert(APP_FILE_PERMISSION.getOtherAction === FsAction.READ) assert(SPARK_CONF_DIR === "__spark_conf__") assert(SPARK_CONF_FILE === "__spark_conf__.properties") assert(HADOOP_CONF_DIR === "__hadoop_conf__") assert(SPARK_CONF_ARCHIVE === SPARK_CONF_DIR + ".zip") assert(SPARK_LIB_DIR === "__spark_libs__") assert(LOCAL_SCHEME === "local") } }
Example 3
Source File: SparkHadoopUtilSuite.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy import java.security.PrivilegedExceptionAction import scala.util.Random import org.apache.hadoop.fs.FileStatus import org.apache.hadoop.fs.permission.{FsAction, FsPermission} import org.apache.hadoop.security.UserGroupInformation import org.scalatest.Matchers import org.apache.spark.SparkFunSuite class SparkHadoopUtilSuite extends SparkFunSuite with Matchers { test("check file permission") { import FsAction._ val testUser = s"user-${Random.nextInt(100)}" val testGroups = Array(s"group-${Random.nextInt(100)}") val testUgi = UserGroupInformation.createUserForTesting(testUser, testGroups) testUgi.doAs(new PrivilegedExceptionAction[Void] { override def run(): Void = { val sparkHadoopUtil = new SparkHadoopUtil // If file is owned by user and user has access permission var status = fileStatus(testUser, testGroups.head, READ_WRITE, READ_WRITE, NONE) sparkHadoopUtil.checkAccessPermission(status, READ) should be(true) sparkHadoopUtil.checkAccessPermission(status, WRITE) should be(true) // If file is owned by user but user has no access permission status = fileStatus(testUser, testGroups.head, NONE, READ_WRITE, NONE) sparkHadoopUtil.checkAccessPermission(status, READ) should be(false) sparkHadoopUtil.checkAccessPermission(status, WRITE) should be(false) val otherUser = s"test-${Random.nextInt(100)}" val otherGroup = s"test-${Random.nextInt(100)}" // If file is owned by user's group and user's group has access permission status = fileStatus(otherUser, testGroups.head, NONE, READ_WRITE, NONE) sparkHadoopUtil.checkAccessPermission(status, READ) should be(true) sparkHadoopUtil.checkAccessPermission(status, WRITE) should be(true) // If file is owned by user's group but user's group has no access permission status = fileStatus(otherUser, testGroups.head, READ_WRITE, NONE, NONE) sparkHadoopUtil.checkAccessPermission(status, READ) should be(false) sparkHadoopUtil.checkAccessPermission(status, WRITE) should be(false) // If file is owned by other user and this user has access permission status = fileStatus(otherUser, otherGroup, READ_WRITE, READ_WRITE, READ_WRITE) sparkHadoopUtil.checkAccessPermission(status, READ) should be(true) sparkHadoopUtil.checkAccessPermission(status, WRITE) should be(true) // If file is owned by other user but this user has no access permission status = fileStatus(otherUser, otherGroup, READ_WRITE, READ_WRITE, NONE) sparkHadoopUtil.checkAccessPermission(status, READ) should be(false) sparkHadoopUtil.checkAccessPermission(status, WRITE) should be(false) null } }) } private def fileStatus( owner: String, group: String, userAction: FsAction, groupAction: FsAction, otherAction: FsAction): FileStatus = { new FileStatus(0L, false, 0, 0L, 0L, 0L, new FsPermission(userAction, groupAction, otherAction), owner, group, null) } }