Skip to content

Commit 9e15391

Browse files
committed
Use the right classloader and add jars to classpath in yarn-client mode
Change-Id: I04d6ffcdca58277e60d3dd0c456f4c6a2936b320
1 parent 478fbc8 commit 9e15391

File tree

3 files changed

+10
-4
lines changed

3 files changed

+10
-4
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -485,7 +485,7 @@ object SparkSubmit extends CommandLineUtils {
485485

486486
// In client mode, launch the application main class directly
487487
// In addition, add the main application jar and any added jars (if any) to the classpath
488-
if (deployMode == CLIENT) {
488+
if (deployMode == CLIENT || isYarnCluster) {
489489
childMainClass = args.mainClass
490490
if (isUserJar(args.primaryResource)) {
491491
childClasspath += args.primaryResource

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -213,7 +213,12 @@ class SparkSubmitSuite
213213
childArgsStr should include ("--arg arg1 --arg arg2")
214214
childArgsStr should include regex ("--jar .*thejar.jar")
215215
mainClass should be ("org.apache.spark.deploy.yarn.Client")
216-
classpath should have length (0)
216+
217+
// In yarn cluster mode, also adding jars to classpath
218+
classpath(0) should endWith ("thejar.jar")
219+
classpath(1) should endWith ("one.jar")
220+
classpath(2) should endWith ("two.jar")
221+
classpath(3) should endWith ("three.jar")
217222

218223
sysProps("spark.executor.memory") should be ("5g")
219224
sysProps("spark.driver.memory") should be ("4g")

resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/security/HBaseCredentialProvider.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ import org.apache.hadoop.security.token.{Token, TokenIdentifier}
2626

2727
import org.apache.spark.SparkConf
2828
import org.apache.spark.internal.Logging
29+
import org.apache.spark.util.Utils
2930

3031
private[security] class HBaseCredentialProvider extends ServiceCredentialProvider with Logging {
3132

@@ -36,7 +37,7 @@ private[security] class HBaseCredentialProvider extends ServiceCredentialProvide
3637
sparkConf: SparkConf,
3738
creds: Credentials): Option[Long] = {
3839
try {
39-
val mirror = universe.runtimeMirror(getClass.getClassLoader)
40+
val mirror = universe.runtimeMirror(Utils.getContextOrSparkClassLoader)
4041
val obtainToken = mirror.classLoader.
4142
loadClass("org.apache.hadoop.hbase.security.token.TokenUtil").
4243
getMethod("obtainToken", classOf[Configuration])
@@ -60,7 +61,7 @@ private[security] class HBaseCredentialProvider extends ServiceCredentialProvide
6061

6162
private def hbaseConf(conf: Configuration): Configuration = {
6263
try {
63-
val mirror = universe.runtimeMirror(getClass.getClassLoader)
64+
val mirror = universe.runtimeMirror(Utils.getContextOrSparkClassLoader)
6465
val confCreate = mirror.classLoader.
6566
loadClass("org.apache.hadoop.hbase.HBaseConfiguration").
6667
getMethod("create", classOf[Configuration])

0 commit comments

Comments
 (0)