Skip to content
Snippets Groups Projects
Commit bdabfd43 authored by Marcelo Vanzin's avatar Marcelo Vanzin
Browse files

[SPARK-13955][YARN] Also look for Spark jars in the build directory.

Move the logic to find Spark jars to CommandBuilderUtils and make it
available for YARN code, so that it's possible to easily launch Spark
on YARN from a build directory.

Tested by running SparkPi from the build directory on YARN.

Author: Marcelo Vanzin <vanzin@cloudera.com>

Closes #11970 from vanzin/SPARK-13955.
parent d46c71b3
No related branches found
No related tags found
No related merge requests found
......@@ -174,7 +174,7 @@ abstract class AbstractCommandBuilder {
// Add Spark jars to the classpath. For the testing case, we rely on the test code to set and
// propagate the test classpath appropriately. For normal invocation, look for the jars
// directory under SPARK_HOME.
String jarsDir = findJarsDir(!isTesting);
String jarsDir = findJarsDir(getSparkHome(), getScalaVersion(), !isTesting);
if (jarsDir != null) {
addToClassPath(cp, join(File.separator, jarsDir, "*"));
}
......@@ -311,27 +311,6 @@ abstract class AbstractCommandBuilder {
return props;
}
private String findJarsDir(boolean failIfNotFound) {
// TODO: change to the correct directory once the assembly build is changed.
String sparkHome = getSparkHome();
File libdir;
if (new File(sparkHome, "RELEASE").isFile()) {
libdir = new File(sparkHome, "lib");
checkState(!failIfNotFound || libdir.isDirectory(),
"Library directory '%s' does not exist.",
libdir.getAbsolutePath());
} else {
libdir = new File(sparkHome, String.format("assembly/target/scala-%s", getScalaVersion()));
if (!libdir.isDirectory()) {
checkState(!failIfNotFound,
"Library directory '%s' does not exist; make sure Spark is built.",
libdir.getAbsolutePath());
libdir = null;
}
}
return libdir != null ? libdir.getAbsolutePath() : null;
}
private String getConfDir() {
String confDir = getenv("SPARK_CONF_DIR");
return confDir != null ? confDir : join(File.separator, getSparkHome(), "conf");
......
......@@ -349,4 +349,29 @@ class CommandBuilderUtils {
return Integer.parseInt(version[1]);
}
}
/**
* Find the location of the Spark jars dir, depending on whether we're looking at a build
* or a distribution directory.
*/
static String findJarsDir(String sparkHome, String scalaVersion, boolean failIfNotFound) {
// TODO: change to the correct directory once the assembly build is changed.
File libdir;
if (new File(sparkHome, "RELEASE").isFile()) {
libdir = new File(sparkHome, "lib");
checkState(!failIfNotFound || libdir.isDirectory(),
"Library directory '%s' does not exist.",
libdir.getAbsolutePath());
} else {
libdir = new File(sparkHome, String.format("assembly/target/scala-%s", scalaVersion));
if (!libdir.isDirectory()) {
checkState(!failIfNotFound,
"Library directory '%s' does not exist; make sure Spark is built.",
libdir.getAbsolutePath());
libdir = null;
}
}
return libdir != null ? libdir.getAbsolutePath() : null;
}
}
......@@ -468,12 +468,11 @@ private[spark] class Client(
// No configuration, so fall back to uploading local jar files.
logWarning(s"Neither ${SPARK_JARS.key} nor ${SPARK_ARCHIVE.key} is set, falling back " +
"to uploading libraries under SPARK_HOME.")
val jarsDir = new File(sparkConf.getenv("SPARK_HOME"), "lib")
if (jarsDir.isDirectory()) {
jarsDir.listFiles().foreach { f =>
if (f.isFile() && f.getName().toLowerCase().endsWith(".jar")) {
distribute(f.getAbsolutePath(), targetDir = Some(LOCALIZED_LIB_DIR))
}
val jarsDir = new File(YarnCommandBuilderUtils.findJarsDir(
sparkConf.getenv("SPARK_HOME")))
jarsDir.listFiles().foreach { f =>
if (f.isFile() && f.getName().toLowerCase().endsWith(".jar")) {
distribute(f.getAbsolutePath(), targetDir = Some(LOCALIZED_LIB_DIR))
}
}
}
......
......@@ -19,6 +19,7 @@ package org.apache.spark.launcher
import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer
import scala.util.Properties
/**
* Exposes methods from the launcher library that are used by the YARN backend.
......@@ -29,6 +30,14 @@ private[spark] object YarnCommandBuilderUtils {
CommandBuilderUtils.quoteForBatchScript(arg)
}
def findJarsDir(sparkHome: String): String = {
val scalaVer = Properties.versionNumberString
.split("\\.")
.take(2)
.mkString(".")
CommandBuilderUtils.findJarsDir(sparkHome, scalaVer, true)
}
/**
* Adds the perm gen configuration to the list of java options if needed and not yet added.
*
......
......@@ -17,7 +17,7 @@
package org.apache.spark.deploy.yarn
import java.io.File
import java.io.{File, FileOutputStream}
import java.net.URI
import java.util.Properties
......@@ -274,6 +274,7 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
val jarsDir = new File(temp, "lib")
assert(jarsDir.mkdir())
val jar = TestUtils.createJarWithFiles(Map(), jarsDir)
new FileOutputStream(new File(temp, "RELEASE")).close()
val sparkConf = new SparkConfWithEnv(Map("SPARK_HOME" -> temp.getAbsolutePath()))
val client = createClient(sparkConf)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment