Skip to content
Snippets Groups Projects
Commit 483f724d authored by Ismael Juma's avatar Ismael Juma
Browse files

Upgrade to Scala 2.9.1.

Interestingly, the version in Maven is 2.9.1, but SBT outputs file to the 2.9.1.final
directory inside target.

A couple of small changes in SparkIMain were also required.

All tests pass and ./spark-shell launches successfully.
parent 10697402
No related branches found
No related tags found
No related merge requests found
...@@ -17,7 +17,7 @@ object SparkBuild extends Build { ...@@ -17,7 +17,7 @@ object SparkBuild extends Build {
def sharedSettings = Defaults.defaultSettings ++ Seq( def sharedSettings = Defaults.defaultSettings ++ Seq(
organization := "org.spark-project", organization := "org.spark-project",
version := "0.4-SNAPSHOT", version := "0.4-SNAPSHOT",
scalaVersion := "2.9.0-1", scalaVersion := "2.9.1",
scalacOptions := Seq(/*"-deprecation",*/ "-unchecked", "-optimize"), // -deprecation is too noisy due to usage of old Hadoop API, enable it once that's no longer an issue scalacOptions := Seq(/*"-deprecation",*/ "-unchecked", "-optimize"), // -deprecation is too noisy due to usage of old Hadoop API, enable it once that's no longer an issue
unmanagedJars in Compile <<= baseDirectory map { base => (base / "lib" ** "*.jar").classpath }, unmanagedJars in Compile <<= baseDirectory map { base => (base / "lib" ** "*.jar").classpath },
retrieveManaged := true, retrieveManaged := true,
......
...@@ -356,7 +356,7 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends ...@@ -356,7 +356,7 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
private def mostRecentlyHandledTree: Option[Tree] = { private def mostRecentlyHandledTree: Option[Tree] = {
prevRequests.reverse foreach { req => prevRequests.reverse foreach { req =>
req.handlers.reverse foreach { req.handlers.reverse foreach {
case x: MemberDefHandler if x.definesValue && !isInternalVarName(x.name) => return Some(x.member) case x: MemberDefHandler if x.definesValue && !isInternalVarName(x.name.toString) => return Some(x.member)
case _ => () case _ => ()
} }
} }
...@@ -1023,7 +1023,7 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends ...@@ -1023,7 +1023,7 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x } protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x }
protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x } protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x }
def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalVarName def definedTerms = onlyTerms(allDefinedNames) filterNot (x => isInternalVarName(x.toString))
def definedTypes = onlyTypes(allDefinedNames) def definedTypes = onlyTypes(allDefinedNames)
def definedSymbols = prevRequests.toSet flatMap ((x: Request) => x.definedSymbols.values) def definedSymbols = prevRequests.toSet flatMap ((x: Request) => x.definedSymbols.values)
......
#!/bin/bash #!/bin/bash
SCALA_VERSION=2.9.0.1 SCALA_VERSION=2.9.1.final
# Figure out where the Scala framework is installed # Figure out where the Scala framework is installed
FWDIR="$(cd `dirname $0`; pwd)" FWDIR="$(cd `dirname $0`; pwd)"
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment