Skip to content
Snippets Groups Projects
Commit f686e3da authored by Ismael Juma's avatar Ismael Juma
Browse files

Initial work on converting build to SBT 0.10.1

parent cf8f5de6
No related branches found
No related tags found
No related merge requests found
import sbt._
import Keys._
object SparkBuild extends Build {
lazy val root = Project("root", file("."), settings = sharedSettings) aggregate(core, repl, examples, bagel)
lazy val core = Project("core", file("core"), settings = coreSettings)
lazy val repl = Project("repl", file("repl"), settings = replSettings) dependsOn (core)
lazy val examples = Project("examples", file("examples"), settings = examplesSettings) dependsOn (core)
lazy val bagel = Project("bagel", file("bagel"), settings = bagelSettings) dependsOn (core)
def sharedSettings = Defaults.defaultSettings ++ Seq(
organization := "org.spark-project",
version := "version=0.4-SNAPSHOT",
scalaVersion := "2.9.0-1",
scalacOptions := Seq(/*"-deprecation",*/ "-unchecked"), // TODO Enable -deprecation and fix all warnings
unmanagedJars in Compile <<= baseDirectory map { base => (base ** "*.jar").classpath },
retrieveManaged := true,
transitiveClassifiers in Scope.GlobalScope := Seq("sources"),
libraryDependencies ++= Seq(
"org.eclipse.jetty" % "jetty-server" % "7.4.2.v20110526",
"org.scalatest" % "scalatest_2.9.0" % "1.4.1" % "test",
"org.scala-tools.testing" % "scalacheck_2.9.0" % "1.9" % "test"
)
)
val slf4jVersion = "1.6.1"
//FIXME DepJar and XmlTestReport
def coreSettings = sharedSettings ++ Seq(libraryDependencies ++= Seq(
"com.google.guava" % "guava" % "r09",
"log4j" % "log4j" % "1.2.16",
"org.slf4j" % "slf4j-api" % slf4jVersion,
"org.slf4j" % "slf4j-log4j12" % slf4jVersion,
"com.ning" % "compress-lzf" % "0.7.0",
"org.apache.hadoop" % "hadoop-core" % "0.20.2",
"asm" % "asm-all" % "3.3.1"
))
//FIXME DepJar and XmlTestReport
def replSettings = sharedSettings ++ Seq(libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _))
def examplesSettings = sharedSettings ++ Seq(libraryDependencies += "colt" % "colt" % "1.2.0")
//FIXME DepJar and XmlTestReport
def bagelSettings = sharedSettings
}
// Project mixin for an XML-based ScalaTest report. Unfortunately
// there is currently no way to call this directly from SBT without
// executing a subprocess.
//trait XmlTestReport extends BasicScalaProject {
// def testReportDir = outputPath / "test-report"
//
// lazy val testReport = task {
// log.info("Creating " + testReportDir + "...")
// if (!testReportDir.exists) {
// testReportDir.asFile.mkdirs()
// }
// log.info("Executing org.scalatest.tools.Runner...")
// val command = ("scala -classpath " + testClasspath.absString +
// " org.scalatest.tools.Runner -o " +
// " -u " + testReportDir.absolutePath +
// " -p " + (outputPath / "test-classes").absolutePath)
// Process(command, path("."), "JAVA_OPTS" -> "-Xmx500m") !
//
// None
// }.dependsOn(compile, testCompile).describedAs("Generate XML test report.")
//}
// Project mixin for creating a JAR with a project's dependencies. This is based
// on the AssemblyBuilder plugin, but because this plugin attempts to package Scala
// and our project too, we leave that out using our own exclude filter (depJarExclude).
//trait DepJar extends AssemblyBuilder {
// def depJarExclude(base: PathFinder) = {
// (base / "scala" ** "*") +++ // exclude scala library
// (base / "spark" ** "*") +++ // exclude Spark classes
// ((base / "META-INF" ** "*") --- // generally ignore the hell out of META-INF
// (base / "META-INF" / "services" ** "*") --- // include all service providers
// (base / "META-INF" / "maven" ** "*")) // include all Maven POMs and such
// }
//
// def depJarTempDir = outputPath / "dep-classes"
//
// def depJarOutputPath =
// outputPath / (name.toLowerCase.replace(" ", "-") + "-dep-" + version.toString + ".jar")
//
// lazy val depJar = {
// packageTask(
// Path.lazyPathFinder(assemblyPaths(depJarTempDir,
// assemblyClasspath,
// assemblyExtraJars,
// depJarExclude)),
// depJarOutputPath,
// packageOptions)
// }.dependsOn(compile).describedAs("Bundle project's dependencies into a JAR.")
//}
#Project properties sbt.version=0.10.1
#Sat Nov 13 21:57:32 PST 2010
project.organization=org.spark-project
project.name=spark
sbt.version=0.7.7
project.version=0.4-SNAPSHOT
build.scala.versions=2.9.0-1
project.initialize=false
import sbt._
import sbt.Process._
import assembly._
import de.element34.sbteclipsify._
class SparkProject(info: ProjectInfo) extends ParentProject(info) with IdeaProject {
lazy val core = project("core", "Spark Core", new CoreProject(_))
lazy val repl = project("repl", "Spark REPL", new ReplProject(_), core)
lazy val examples = project("examples", "Spark Examples", new ExamplesProject(_), core)
lazy val bagel = project("bagel", "Bagel", new BagelProject(_), core)
trait BaseProject extends BasicScalaProject with ScalaPaths with BasicPackagePaths with Eclipsify with IdeaProject {
override def compileOptions = super.compileOptions ++ Seq(Unchecked)
lazy val jettyServer = "org.eclipse.jetty" % "jetty-server" % "7.4.2.v20110526"
override def packageDocsJar = defaultJarPath("-javadoc.jar")
override def packageSrcJar= defaultJarPath("-sources.jar")
lazy val sourceArtifact = Artifact.sources(artifactID)
lazy val docsArtifact = Artifact.javadoc(artifactID)
override def packageToPublishActions = super.packageToPublishActions ++ Seq(packageDocs, packageSrc)
}
class CoreProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport {
val guava = "com.google.guava" % "guava" % "r09"
val log4j = "log4j" % "log4j" % "1.2.16"
val slf4jVersion = "1.6.1"
val slf4jApi = "org.slf4j" % "slf4j-api" % slf4jVersion
val slf4jLog4j = "org.slf4j" % "slf4j-log4j12" % slf4jVersion
val compressLzf = "com.ning" % "compress-lzf" % "0.7.0"
val hadoop = "org.apache.hadoop" % "hadoop-core" % "0.20.2"
val asm = "asm" % "asm-all" % "3.3.1"
val scalaTest = "org.scalatest" % "scalatest_2.9.0" % "1.4.1" % "test"
val scalaCheck = "org.scala-tools.testing" % "scalacheck_2.9.0" % "1.9" % "test"
}
class ReplProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport
class ExamplesProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject {
val colt = "colt" % "colt" % "1.2.0"
}
class BagelProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport
override def managedStyle = ManagedStyle.Maven
}
// Project mixin for an XML-based ScalaTest report. Unfortunately
// there is currently no way to call this directly from SBT without
// executing a subprocess.
trait XmlTestReport extends BasicScalaProject {
def testReportDir = outputPath / "test-report"
lazy val testReport = task {
log.info("Creating " + testReportDir + "...")
if (!testReportDir.exists) {
testReportDir.asFile.mkdirs()
}
log.info("Executing org.scalatest.tools.Runner...")
val command = ("scala -classpath " + testClasspath.absString +
" org.scalatest.tools.Runner -o " +
" -u " + testReportDir.absolutePath +
" -p " + (outputPath / "test-classes").absolutePath)
Process(command, path("."), "JAVA_OPTS" -> "-Xmx500m") !
None
}.dependsOn(compile, testCompile).describedAs("Generate XML test report.")
}
// Project mixin for creating a JAR with a project's dependencies. This is based
// on the AssemblyBuilder plugin, but because this plugin attempts to package Scala
// and our project too, we leave that out using our own exclude filter (depJarExclude).
trait DepJar extends AssemblyBuilder {
def depJarExclude(base: PathFinder) = {
(base / "scala" ** "*") +++ // exclude scala library
(base / "spark" ** "*") +++ // exclude Spark classes
((base / "META-INF" ** "*") --- // generally ignore the hell out of META-INF
(base / "META-INF" / "services" ** "*") --- // include all service providers
(base / "META-INF" / "maven" ** "*")) // include all Maven POMs and such
}
def depJarTempDir = outputPath / "dep-classes"
def depJarOutputPath =
outputPath / (name.toLowerCase.replace(" ", "-") + "-dep-" + version.toString + ".jar")
lazy val depJar = {
packageTask(
Path.lazyPathFinder(assemblyPaths(depJarTempDir,
assemblyClasspath,
assemblyExtraJars,
depJarExclude)),
depJarOutputPath,
packageOptions)
}.dependsOn(compile).describedAs("Bundle project's dependencies into a JAR.")
}
import sbt._
class SparkProjectPlugins(info: ProjectInfo) extends PluginDefinition(info) {
val eclipse = "de.element34" % "sbt-eclipsify" % "0.7.0"
val sbtIdeaRepo = "sbt-idea-repo" at "http://mpeltonen.github.com/maven/"
val sbtIdea = "com.github.mpeltonen" % "sbt-idea-plugin" % "0.4.0"
val codaRepo = "Coda Hale's Repository" at "http://repo.codahale.com/"
val assemblySBT = "com.codahale" % "assembly-sbt" % "0.1.1"
}
resolvers += {
val typesafeRepoUrl = new java.net.URL("http://repo.typesafe.com/typesafe/releases")
val pattern = Patterns(false, "[organisation]/[module]/[sbtversion]/[revision]/[type]s/[module](-[classifier])-[revision].[ext]")
Resolver.url("Typesafe Repository", typesafeRepoUrl)(pattern)
}
resolvers += "sbt-idea-repo" at "http://mpeltonen.github.com/maven/"
libraryDependencies ++= Seq(
"com.github.mpeltonen" %% "sbt-idea" % "0.10.0-SNAPSHOT"
// FIXME Uncomment once version for SBT 0.10.1 is available "com.eed3si9n" %% "sbt-assembly" % "0.2"
)
libraryDependencies <<= (libraryDependencies, sbtVersion) { (deps, version) =>
deps :+ ("com.typesafe.sbteclipse" %% "sbteclipse" % "1.2" extra("sbtversion" -> version))
}
\ No newline at end of file
#!/bin/bash #!/bin/bash
SCALA_VERSION=2.9.0-1 SCALA_VERSION=2.9.0.1
# Figure out where the Scala framework is installed # Figure out where the Scala framework is installed
FWDIR="$(cd `dirname $0`; pwd)" FWDIR="$(cd `dirname $0`; pwd)"
...@@ -41,23 +41,23 @@ EXAMPLES_DIR=$FWDIR/examples ...@@ -41,23 +41,23 @@ EXAMPLES_DIR=$FWDIR/examples
BAGEL_DIR=$FWDIR/bagel BAGEL_DIR=$FWDIR/bagel
# Build up classpath # Build up classpath
CLASSPATH="$SPARK_CLASSPATH:$CORE_DIR/target/scala_$SCALA_VERSION/classes:$MESOS_CLASSPATH" CLASSPATH="$SPARK_CLASSPATH:$CORE_DIR/target/scala-$SCALA_VERSION/classes:$MESOS_CLASSPATH"
CLASSPATH+=:$FWDIR/conf CLASSPATH+=:$FWDIR/conf
CLASSPATH+=:$REPL_DIR/target/scala_$SCALA_VERSION/classes CLASSPATH+=:$REPL_DIR/target/scala-$SCALA_VERSION/classes
CLASSPATH+=:$EXAMPLES_DIR/target/scala_$SCALA_VERSION/classes CLASSPATH+=:$EXAMPLES_DIR/target/scala-$SCALA_VERSION/classes
for jar in `find $CORE_DIR/lib -name '*jar'`; do for jar in `find $CORE_DIR/lib -name '*jar'`; do
CLASSPATH+=:$jar CLASSPATH+=:$jar
done done
for jar in $CORE_DIR/lib_managed/scala_$SCALA_VERSION/compile/*.jar; do for jar in `find $FWDIR/lib_managed/jars -name '*jar'`; do
CLASSPATH+=:$jar CLASSPATH+=:$jar
done done
for jar in `find $REPL_DIR/lib -name '*jar'`; do for jar in `find $FWDIR/lib_managed/bundles -name '*jar'`; do
CLASSPATH+=:$jar CLASSPATH+=:$jar
done done
for jar in $REPL_DIR/lib_managed/scala_$SCALA_VERSION/compile/*.jar; do for jar in `find $REPL_DIR/lib -name '*jar'`; do
CLASSPATH+=:$jar CLASSPATH+=:$jar
done done
CLASSPATH+=:$BAGEL_DIR/target/scala_$SCALA_VERSION/classes CLASSPATH+=:$BAGEL_DIR/target/scala-$SCALA_VERSION/classes
export CLASSPATH # Needed for spark-shell export CLASSPATH # Needed for spark-shell
if [ -n "$SCALA_HOME" ]; then if [ -n "$SCALA_HOME" ]; then
...@@ -66,4 +66,5 @@ else ...@@ -66,4 +66,5 @@ else
SCALA=scala SCALA=scala
fi fi
echo $CLASSPATH >> tmp
exec $SCALA -cp $CLASSPATH "$@" exec $SCALA -cp $CLASSPATH "$@"
File added
File deleted
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment