Skip to content
Snippets Groups Projects
Commit bd0bab47 authored by Jey Kottalam's avatar Jey Kottalam
Browse files

SparkEnv isn't available this early, and not needed anyway

parent 4f43fd79
No related branches found
No related tags found
No related merge requests found
...@@ -25,17 +25,6 @@ import org.apache.hadoop.mapred.JobConf ...@@ -25,17 +25,6 @@ import org.apache.hadoop.mapred.JobConf
*/ */
class SparkHadoopUtil { class SparkHadoopUtil {
def getUserNameFromEnvironment(): String = {
// defaulting to -D ...
System.getProperty("user.name")
}
def runAsUser(func: (Product) => Unit, args: Product) {
// Add support, if exists - for now, simply run func !
func(args)
}
// Return an appropriate (subclass) of Configuration. Creating config can initializes some hadoop subsystems // Return an appropriate (subclass) of Configuration. Creating config can initializes some hadoop subsystems
def newConfiguration(): Configuration = new Configuration() def newConfiguration(): Configuration = new Configuration()
......
...@@ -81,20 +81,6 @@ private[spark] class StandaloneExecutorBackend( ...@@ -81,20 +81,6 @@ private[spark] class StandaloneExecutorBackend(
private[spark] object StandaloneExecutorBackend { private[spark] object StandaloneExecutorBackend {
def run(driverUrl: String, executorId: String, hostname: String, cores: Int) { def run(driverUrl: String, executorId: String, hostname: String, cores: Int) {
val env = SparkEnv.get
env.hadoop.runAsUser(run0, Tuple4[Any, Any, Any, Any] (driverUrl, executorId, hostname, cores))
}
// This will be run 'as' the user
def run0(args: Product) {
assert(4 == args.productArity)
runImpl(args.productElement(0).asInstanceOf[String],
args.productElement(1).asInstanceOf[String],
args.productElement(2).asInstanceOf[String],
args.productElement(3).asInstanceOf[Int])
}
private def runImpl(driverUrl: String, executorId: String, hostname: String, cores: Int) {
// Debug code // Debug code
Utils.checkHost(hostname) Utils.checkHost(hostname)
......
...@@ -32,22 +32,6 @@ object SparkHadoopUtil { ...@@ -32,22 +32,6 @@ object SparkHadoopUtil {
val yarnConf = newConfiguration() val yarnConf = newConfiguration()
def getUserNameFromEnvironment(): String = {
// defaulting to env if -D is not present ...
val retval = System.getProperty(Environment.USER.name, System.getenv(Environment.USER.name))
// If nothing found, default to user we are running as
if (retval == null) System.getProperty("user.name") else retval
}
def runAsUser(func: (Product) => Unit, args: Product) {
runAsUser(func, args, getUserNameFromEnvironment())
}
def runAsUser(func: (Product) => Unit, args: Product, user: String) {
func(args)
}
// Note that all params which start with SPARK are propagated all the way through, so if in yarn mode, this MUST be set to true. // Note that all params which start with SPARK are propagated all the way through, so if in yarn mode, this MUST be set to true.
def isYarnMode(): Boolean = { def isYarnMode(): Boolean = {
val yarnMode = System.getProperty("SPARK_YARN_MODE", System.getenv("SPARK_YARN_MODE")) val yarnMode = System.getProperty("SPARK_YARN_MODE", System.getenv("SPARK_YARN_MODE"))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment