diff --git a/core/pom.xml b/core/pom.xml
index bd6767e03bb9d34b6421349c4654633f9da63c37..8c23842730e37fa163694d7f2bc933b6638e8386 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -244,6 +244,11 @@
       <artifactId>easymockclassextension</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>asm</groupId>
+      <artifactId>asm</artifactId>
+      <scope>test</scope>
+    </dependency>
     <dependency>
       <groupId>com.novocode</groupId>
       <artifactId>junit-interface</artifactId>
diff --git a/core/src/test/java/org/apache/spark/JavaAPISuite.java b/core/src/test/java/org/apache/spark/JavaAPISuite.java
index 1d7a7be6cfeb323be661322d6c63bca7a597506e..b2868b59ce6c693c07cbe0698272495ef6f3a06d 100644
--- a/core/src/test/java/org/apache/spark/JavaAPISuite.java
+++ b/core/src/test/java/org/apache/spark/JavaAPISuite.java
@@ -18,6 +18,7 @@
 package org.apache.spark;
 
 import java.io.*;
+import java.net.URI;
 import java.util.*;
 
 import scala.Tuple2;
@@ -768,7 +769,7 @@ public class JavaAPISuite implements Serializable {
   }
 
   @Test
-  public void wholeTextFiles() throws IOException {
+  public void wholeTextFiles() throws Exception {
     byte[] content1 = "spark is easy to use.\n".getBytes("utf-8");
     byte[] content2 = "spark is also easy to use.\n".getBytes("utf-8");
 
@@ -784,7 +785,7 @@ public class JavaAPISuite implements Serializable {
     List<Tuple2<String, String>> result = readRDD.collect();
 
     for (Tuple2<String, String> res : result) {
-      Assert.assertEquals(res._2(), container.get(res._1()));
+      Assert.assertEquals(res._2(), container.get(new URI(res._1()).getPath()));
     }
   }
 
diff --git a/pom.xml b/pom.xml
index 0d46bb4114f734f87cb2a31259ae96259dfb7db6..05f76d566e9d1622a3cafcf008ad083452f3f621 100644
--- a/pom.xml
+++ b/pom.xml
@@ -468,6 +468,13 @@
         <version>3.1</version>
         <scope>test</scope>
       </dependency>
+      <!-- Needed by cglib which is needed by easymock. -->
+      <dependency>
+        <groupId>asm</groupId>
+        <artifactId>asm</artifactId>
+        <version>3.3.1</version>
+        <scope>test</scope>
+      </dependency>
       <dependency>
         <groupId>org.mockito</groupId>
         <artifactId>mockito-all</artifactId>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 7bb39dc77120bcb2d26a0369d49a618987e075d2..55a2aa0fc714149403205fd402e946a1e2cf4667 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -293,7 +293,9 @@ object SparkBuild extends Build {
         "com.novocode"      % "junit-interface"        % "0.10"   % "test",
         "org.easymock"      % "easymockclassextension" % "3.1"    % "test",
         "org.mockito"       % "mockito-all"            % "1.9.0"  % "test",
-        "junit"             % "junit"                  % "4.10"   % "test"
+        "junit"             % "junit"                  % "4.10"   % "test",
+        // Needed by cglib which is needed by easymock.
+        "asm"               % "asm"                    % "3.3.1"  % "test"
     ),
 
     testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
@@ -461,7 +463,7 @@ object SparkBuild extends Build {
 
   def toolsSettings = sharedSettings ++ Seq(
     name := "spark-tools",
-    libraryDependencies <+= scalaVersion(v => "org.scala-lang"  % "scala-compiler" % v ),
+    libraryDependencies <+= scalaVersion(v => "org.scala-lang"  % "scala-compiler" % v),
     libraryDependencies <+= scalaVersion(v => "org.scala-lang"  % "scala-reflect"  % v )
   ) ++ assemblySettings ++ extraAssemblySettings
 
@@ -630,9 +632,9 @@ object SparkBuild extends Build {
     scalaVersion := "2.10.4",
     retrieveManaged := true,
     retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
-    libraryDependencies := Seq("spark-streaming-mqtt", "spark-streaming-zeromq", 
+    libraryDependencies := Seq("spark-streaming-mqtt", "spark-streaming-zeromq",
       "spark-streaming-flume", "spark-streaming-kafka", "spark-streaming-twitter",
-      "spark-streaming", "spark-mllib", "spark-bagel", "spark-graphx", 
+      "spark-streaming", "spark-mllib", "spark-bagel", "spark-graphx",
       "spark-core").map(sparkPreviousArtifact(_).get intransitive())
   )