From 96929f28bb9c929ca3309dbe99910097f5eb3c8c Mon Sep 17 00:00:00 2001
From: Harvey Feng <harvey@databricks.com>
Date: Sat, 5 Oct 2013 17:14:19 -0700
Subject: [PATCH] Make HadoopRDD object Spark private.

---
 core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
index 728f3d1aed..51e5bb88d2 100644
--- a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
@@ -106,7 +106,7 @@ class HadoopRDD[K, V](
 
   protected val jobConfCacheKey = "rdd_%d_job_conf".format(id)
 
-  private val inputFormatCacheKey = "rdd_%d_input_format".format(id)
+  protected val inputFormatCacheKey = "rdd_%d_input_format".format(id)
 
   // Returns a JobConf that will be used on slaves to obtain input splits for Hadoop reads.
   protected def getJobConf(): JobConf = {
@@ -122,7 +122,7 @@ class HadoopRDD[K, V](
     }
   }
 
-  def getInputFormat(conf: JobConf): InputFormat[K, V] = {
+  protected def getInputFormat(conf: JobConf): InputFormat[K, V] = {
     if (HadoopRDD.containsCachedMetadata(inputFormatCacheKey)) {
       return HadoopRDD.getCachedMetadata(inputFormatCacheKey).asInstanceOf[InputFormat[K, V]]
     }
@@ -196,7 +196,7 @@ class HadoopRDD[K, V](
   def getConf: Configuration = getJobConf()
 }
 
-object HadoopRDD {
+private[spark] object HadoopRDD {
   def getCachedMetadata(key: String) = SparkEnv.get.hadoop.hadoopJobMetadata.get(key)
 
   def containsCachedMetadata(key: String) = SparkEnv.get.hadoop.hadoopJobMetadata.containsKey(key)
-- 
GitLab