diff --git a/core/src/main/scala/spark/rdd/HadoopRDD.scala b/core/src/main/scala/spark/rdd/HadoopRDD.scala
index 6c41b9778095ed934433b3bebdb120a90a32508f..e512423fd63ab464de4ab3711e300d1a5726b3a4 100644
--- a/core/src/main/scala/spark/rdd/HadoopRDD.scala
+++ b/core/src/main/scala/spark/rdd/HadoopRDD.scala
@@ -34,7 +34,7 @@ import org.apache.hadoop.util.ReflectionUtils
 
 import spark.{Dependency, Logging, Partition, RDD, SerializableWritable, SparkContext, SparkEnv, TaskContext}
 import spark.util.NextIterator
-import org.apache.hadoop.conf.Configurable
+import org.apache.hadoop.conf.{Configuration, Configurable}
 
 
 /**
@@ -132,4 +132,6 @@ class HadoopRDD[K, V](
   override def checkpoint() {
     // Do nothing. Hadoop RDD should not be checkpointed.
   }
+
+  def getConf: Configuration = confBroadcast.value.value
 }
diff --git a/core/src/main/scala/spark/rdd/NewHadoopRDD.scala b/core/src/main/scala/spark/rdd/NewHadoopRDD.scala
index 184685528e34af5a81a6a391591271c3d895238b..b1877dc06e5103ca6b57099a35d546b973a3cd30 100644
--- a/core/src/main/scala/spark/rdd/NewHadoopRDD.scala
+++ b/core/src/main/scala/spark/rdd/NewHadoopRDD.scala
@@ -120,4 +120,7 @@ class NewHadoopRDD[K, V](
     val theSplit = split.asInstanceOf[NewHadoopPartition]
     theSplit.serializableHadoopSplit.value.getLocations.filter(_ != "localhost")
   }
+
+  def getConf: Configuration = confBroadcast.value.value
 }
+