Skip to content
Snippets Groups Projects
Commit c622a87c authored by jerryshao's avatar jerryshao Committed by Marcelo Vanzin
Browse files

[SPARK-20059][YARN] Use the correct classloader for HBaseCredentialProvider

## What changes were proposed in this pull request?

Currently we use system classloader to find HBase jars, if it is specified by `--jars`, then it will be failed with ClassNotFound issue. So here changing to use child classloader.

Also putting added jars and main jar into classpath of submitted application in yarn cluster mode, otherwise HBase jars specified with `--jars` will never be honored in cluster mode, and fetching tokens in client side will always be failed.

## How was this patch tested?

Unit test and local verification.

Author: jerryshao <sshao@hortonworks.com>

Closes #17388 from jerryshao/SPARK-20059.
parent b56ad2b1
No related branches found
No related tags found
No related merge requests found
......@@ -485,12 +485,17 @@ object SparkSubmit extends CommandLineUtils {
// In client mode, launch the application main class directly
// In addition, add the main application jar and any added jars (if any) to the classpath
if (deployMode == CLIENT) {
// Also add the main application jar and any added jars to classpath in case YARN client
// requires these jars.
if (deployMode == CLIENT || isYarnCluster) {
childMainClass = args.mainClass
if (isUserJar(args.primaryResource)) {
childClasspath += args.primaryResource
}
if (args.jars != null) { childClasspath ++= args.jars.split(",") }
}
if (deployMode == CLIENT) {
if (args.childArgs != null) { childArgs ++= args.childArgs }
}
......
......@@ -213,7 +213,12 @@ class SparkSubmitSuite
childArgsStr should include ("--arg arg1 --arg arg2")
childArgsStr should include regex ("--jar .*thejar.jar")
mainClass should be ("org.apache.spark.deploy.yarn.Client")
classpath should have length (0)
// In yarn cluster mode, also adding jars to classpath
classpath(0) should endWith ("thejar.jar")
classpath(1) should endWith ("one.jar")
classpath(2) should endWith ("two.jar")
classpath(3) should endWith ("three.jar")
sysProps("spark.executor.memory") should be ("5g")
sysProps("spark.driver.memory") should be ("4g")
......
......@@ -26,6 +26,7 @@ import org.apache.hadoop.security.token.{Token, TokenIdentifier}
import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
private[security] class HBaseCredentialProvider extends ServiceCredentialProvider with Logging {
......@@ -36,7 +37,7 @@ private[security] class HBaseCredentialProvider extends ServiceCredentialProvide
sparkConf: SparkConf,
creds: Credentials): Option[Long] = {
try {
val mirror = universe.runtimeMirror(getClass.getClassLoader)
val mirror = universe.runtimeMirror(Utils.getContextOrSparkClassLoader)
val obtainToken = mirror.classLoader.
loadClass("org.apache.hadoop.hbase.security.token.TokenUtil").
getMethod("obtainToken", classOf[Configuration])
......@@ -60,7 +61,7 @@ private[security] class HBaseCredentialProvider extends ServiceCredentialProvide
private def hbaseConf(conf: Configuration): Configuration = {
try {
val mirror = universe.runtimeMirror(getClass.getClassLoader)
val mirror = universe.runtimeMirror(Utils.getContextOrSparkClassLoader)
val confCreate = mirror.classLoader.
loadClass("org.apache.hadoop.hbase.HBaseConfiguration").
getMethod("create", classOf[Configuration])
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment