Skip to content
Snippets Groups Projects
Commit adcda84f authored by Mridul Muralidharan's avatar Mridul Muralidharan
Browse files

Pull latest SparkBuild.scala from master and merge conflicts

parent 5b85c715
No related branches found
No related tags found
No related merge requests found
......@@ -43,15 +43,22 @@ object SparkBuild extends Build {
def sharedSettings = Defaults.defaultSettings ++ Seq(
organization := "org.spark-project",
version := "0.7.1-SNAPSHOT",
scalaVersion := "2.9.2",
version := "0.8.0-SNAPSHOT",
scalaVersion := "2.9.3",
scalacOptions := Seq("-unchecked", "-optimize", "-deprecation"),
unmanagedJars in Compile <<= baseDirectory map { base => (base / "lib" ** "*.jar").classpath },
retrieveManaged := true,
transitiveClassifiers in Scope.GlobalScope := Seq("sources"),
testListeners <<= target.map(t => Seq(new eu.henkelmann.sbt.JUnitXmlTestsListener(t.getAbsolutePath))),
// shared between both core and streaming.
// Fork new JVMs for tests and set Java options for those
fork := true,
javaOptions += "-Xmx1g",
// Only allow one test at a time, even across projects, since they run in the same JVM
concurrentRestrictions in Global += Tags.limit(Tags.Test, 1),
// Shared between both core and streaming.
resolvers ++= Seq("Akka Repository" at "http://repo.akka.io/releases/"),
// For Sonatype publishing
......@@ -100,13 +107,12 @@ object SparkBuild extends Build {
libraryDependencies ++= Seq(
"io.netty" % "netty" % "3.5.3.Final",
"org.eclipse.jetty" % "jetty-server" % "7.5.3.v20111011",
"org.scalatest" %% "scalatest" % "1.8" % "test",
"org.scalacheck" %% "scalacheck" % "1.9" % "test",
"com.novocode" % "junit-interface" % "0.8" % "test",
"org.eclipse.jetty" % "jetty-server" % "7.6.8.v20121106",
"org.scalatest" %% "scalatest" % "1.9.1" % "test",
"org.scalacheck" %% "scalacheck" % "1.10.0" % "test",
"com.novocode" % "junit-interface" % "0.9" % "test",
"org.easymock" % "easymock" % "3.1" % "test"
),
parallelExecution := false,
/* Workaround for issue #206 (fixed after SBT 0.11.0) */
watchTransitiveSources <<= Defaults.inDependencies[Task[Seq[File]]](watchSources.task,
const(std.TaskExtra.constant(Nil)), aggregate = true, includeRoot = true) apply { _.join.map(_.flatten) },
......@@ -137,8 +143,8 @@ object SparkBuild extends Build {
"log4j" % "log4j" % "1.2.16",
"org.slf4j" % "slf4j-api" % slf4jVersion,
"org.slf4j" % "slf4j-log4j12" % slf4jVersion,
"com.ning" % "compress-lzf" % "0.8.4",
"commons-daemon" % "commons-daemon" % "1.0.10",
"com.ning" % "compress-lzf" % "0.8.4",
"asm" % "asm-all" % "3.3.1",
"com.google.protobuf" % "protobuf-java" % "2.4.1",
"de.javakaffee" % "kryo-serializers" % "0.22",
......@@ -149,25 +155,26 @@ object SparkBuild extends Build {
"colt" % "colt" % "1.2.0",
"cc.spray" % "spray-can" % "1.0-M2.1",
"cc.spray" % "spray-server" % "1.0-M2.1",
"cc.spray" %% "spray-json" % "1.1.1",
"cc.spray" % "spray-json_2.9.2" % "1.1.1",
"org.apache.mesos" % "mesos" % "0.9.0-incubating"
) ++ (
if (HADOOP_MAJOR_VERSION == "2") {
if (HADOOP_YARN) {
Seq(
"org.apache.hadoop" % "hadoop-client" % HADOOP_VERSION,
"org.apache.hadoop" % "hadoop-yarn-api" % HADOOP_VERSION,
"org.apache.hadoop" % "hadoop-yarn-common" % HADOOP_VERSION,
"org.apache.hadoop" % "hadoop-yarn-client" % HADOOP_VERSION
// Exclude rule required for all ?
"org.apache.hadoop" % "hadoop-client" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") ),
"org.apache.hadoop" % "hadoop-yarn-api" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") ),
"org.apache.hadoop" % "hadoop-yarn-common" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") ),
"org.apache.hadoop" % "hadoop-yarn-client" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") )
)
} else {
Seq(
"org.apache.hadoop" % "hadoop-core" % HADOOP_VERSION,
"org.apache.hadoop" % "hadoop-client" % HADOOP_VERSION
"org.apache.hadoop" % "hadoop-core" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") ),
"org.apache.hadoop" % "hadoop-client" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") )
)
}
} else {
Seq("org.apache.hadoop" % "hadoop-core" % HADOOP_VERSION)
Seq("org.apache.hadoop" % "hadoop-core" % HADOOP_VERSION excludeAll( ExclusionRule(organization = "org.codehaus.jackson") ) )
}),
unmanagedSourceDirectories in Compile <+= baseDirectory{ _ /
( if (HADOOP_YARN && HADOOP_MAJOR_VERSION == "2") {
......@@ -189,7 +196,7 @@ object SparkBuild extends Build {
def examplesSettings = sharedSettings ++ Seq(
name := "spark-examples",
libraryDependencies ++= Seq("com.twitter" % "algebird-core_2.9.2" % "0.1.8")
libraryDependencies ++= Seq("com.twitter" % "algebird-core_2.9.2" % "0.1.11")
)
def bagelSettings = sharedSettings ++ Seq(name := "spark-bagel")
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment