diff --git a/core/src/main/scala/spark/CacheTracker.scala b/core/src/main/scala/spark/CacheTracker.scala index 7040d4e147434e627e181ecda742697f73b589b6..0de829d7bcecefb81005d4e461b9346f521d5a3b 100644 --- a/core/src/main/scala/spark/CacheTracker.scala +++ b/core/src/main/scala/spark/CacheTracker.scala @@ -111,7 +111,7 @@ class CacheTracker(isMaster: Boolean, theCache: Cache) extends Logging { if (cachedVal != null) { // Split is in cache, so just return its values logInfo("Found partition in cache!") - return Iterator.fromArray(cachedVal.asInstanceOf[Array[T]]) + return cachedVal.asInstanceOf[Array[T]].iterator } else { // Mark the split as loading (unless someone else marks it first) loading.synchronized { @@ -119,7 +119,7 @@ class CacheTracker(isMaster: Boolean, theCache: Cache) extends Logging { while (loading.contains(key)) { try {loading.wait()} catch {case _ =>} } - return Iterator.fromArray(cache.get(key).asInstanceOf[Array[T]]) + return cache.get(key).asInstanceOf[Array[T]].iterator } else { loading.add(key) } @@ -138,7 +138,7 @@ class CacheTracker(isMaster: Boolean, theCache: Cache) extends Logging { loading.notifyAll() } future.apply() // Wait for the reply from the cache tracker - return Iterator.fromArray(array) + return array.iterator } } diff --git a/core/src/main/scala/spark/RDD.scala b/core/src/main/scala/spark/RDD.scala index 6334896cb6354bf591305ca99bc5c20ff5e7c74f..590106388a4e11ae57d4984c4509e5072a8da1cd 100644 --- a/core/src/main/scala/spark/RDD.scala +++ b/core/src/main/scala/spark/RDD.scala @@ -178,7 +178,7 @@ class SplitRDD[T: ClassManifest](prev: RDD[T]) extends RDD[Array[T]](prev.context) { override def splits = prev.splits override val dependencies = List(new OneToOneDependency(prev)) - override def compute(split: Split) = Iterator.fromArray(Array(prev.iterator(split).toArray)) + override def compute(split: Split) = Array(prev.iterator(split).toArray).iterator }