Skip to content
Snippets Groups Projects
Commit 29ac7ea5 authored by Punya Biswal's avatar Punya Biswal Committed by Aaron Davidson
Browse files

Migrate Java code to Scala or move it to src/main/java

These classes can't be migrated:
  StorageLevels: impossible to create static fields in Scala
  JavaSparkContextVarargsWorkaround: incompatible varargs
  JavaAPISuite: should test Java APIs in pure Java (for sanity)

Author: Punya Biswal <pbiswal@palantir.com>

Closes #605 from punya/move-java-sources and squashes the following commits:

25b00b2 [Punya Biswal] Remove redundant type param; reformat
853da46 [Punya Biswal] Use factory method rather than constructor
e5d53d9 [Punya Biswal] Migrate Java code to Scala or move it to src/main/java
parent 1aa4f8af
No related branches found
No related tags found
No related merge requests found
Showing
with 56 additions and 92 deletions
...@@ -23,17 +23,17 @@ import org.apache.spark.storage.StorageLevel; ...@@ -23,17 +23,17 @@ import org.apache.spark.storage.StorageLevel;
* Expose some commonly useful storage level constants. * Expose some commonly useful storage level constants.
*/ */
public class StorageLevels { public class StorageLevels {
public static final StorageLevel NONE = new StorageLevel(false, false, false, 1); public static final StorageLevel NONE = create(false, false, false, 1);
public static final StorageLevel DISK_ONLY = new StorageLevel(true, false, false, 1); public static final StorageLevel DISK_ONLY = create(true, false, false, 1);
public static final StorageLevel DISK_ONLY_2 = new StorageLevel(true, false, false, 2); public static final StorageLevel DISK_ONLY_2 = create(true, false, false, 2);
public static final StorageLevel MEMORY_ONLY = new StorageLevel(false, true, true, 1); public static final StorageLevel MEMORY_ONLY = create(false, true, true, 1);
public static final StorageLevel MEMORY_ONLY_2 = new StorageLevel(false, true, true, 2); public static final StorageLevel MEMORY_ONLY_2 = create(false, true, true, 2);
public static final StorageLevel MEMORY_ONLY_SER = new StorageLevel(false, true, false, 1); public static final StorageLevel MEMORY_ONLY_SER = create(false, true, false, 1);
public static final StorageLevel MEMORY_ONLY_SER_2 = new StorageLevel(false, true, false, 2); public static final StorageLevel MEMORY_ONLY_SER_2 = create(false, true, false, 2);
public static final StorageLevel MEMORY_AND_DISK = new StorageLevel(true, true, true, 1); public static final StorageLevel MEMORY_AND_DISK = create(true, true, true, 1);
public static final StorageLevel MEMORY_AND_DISK_2 = new StorageLevel(true, true, true, 2); public static final StorageLevel MEMORY_AND_DISK_2 = create(true, true, true, 2);
public static final StorageLevel MEMORY_AND_DISK_SER = new StorageLevel(true, true, false, 1); public static final StorageLevel MEMORY_AND_DISK_SER = create(true, true, false, 1);
public static final StorageLevel MEMORY_AND_DISK_SER_2 = new StorageLevel(true, true, false, 2); public static final StorageLevel MEMORY_AND_DISK_SER_2 = create(true, true, false, 2);
/** /**
* Create a new StorageLevel object. * Create a new StorageLevel object.
......
...@@ -15,28 +15,25 @@ ...@@ -15,28 +15,25 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.spark; package org.apache.spark
import java.io.File; import java.io.File
/** /**
* Resolves paths to files added through `SparkContext.addFile()`. * Resolves paths to files added through `SparkContext.addFile()`.
*/ */
public class SparkFiles { object SparkFiles {
private SparkFiles() {}
/** /**
* Get the absolute path of a file added through `SparkContext.addFile()`. * Get the absolute path of a file added through `SparkContext.addFile()`.
*/ */
public static String get(String filename) { def get(filename: String): String =
return new File(getRootDirectory(), filename).getAbsolutePath(); new File(getRootDirectory(), filename).getAbsolutePath()
}
/** /**
* Get the root directory that contains files added through `SparkContext.addFile()`. * Get the root directory that contains files added through `SparkContext.addFile()`.
*/ */
public static String getRootDirectory() { def getRootDirectory(): String =
return SparkEnv.get().sparkFilesDir(); SparkEnv.get.sparkFilesDir
}
} }
...@@ -15,16 +15,16 @@ ...@@ -15,16 +15,16 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.spark.api.java.function; package org.apache.spark.api.java.function
import java.io.Serializable; import java.lang.{Double => JDouble, Iterable => JIterable}
/** /**
* A function that returns zero or more records of type Double from each input record. * A function that returns zero or more records of type Double from each input record.
*/ */
// DoubleFlatMapFunction does not extend FlatMapFunction because flatMap is // DoubleFlatMapFunction does not extend FlatMapFunction because flatMap is
// overloaded for both FlatMapFunction and DoubleFlatMapFunction. // overloaded for both FlatMapFunction and DoubleFlatMapFunction.
public abstract class DoubleFlatMapFunction<T> extends WrappedFunction1<T, Iterable<Double>> abstract class DoubleFlatMapFunction[T] extends WrappedFunction1[T, JIterable[JDouble]]
implements Serializable { with Serializable {
// Intentionally left blank // Intentionally left blank
} }
...@@ -15,16 +15,15 @@ ...@@ -15,16 +15,15 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.spark.api.java.function; package org.apache.spark.api.java.function
import java.io.Serializable; import java.lang.{Double => JDouble}
/** /**
* A function that returns Doubles, and can be used to construct DoubleRDDs. * A function that returns Doubles, and can be used to construct DoubleRDDs.
*/ */
// DoubleFunction does not extend Function because some UDF functions, like map, // DoubleFunction does not extend Function because some UDF functions, like map,
// are overloaded for both Function and DoubleFunction. // are overloaded for both Function and DoubleFunction.
public abstract class DoubleFunction<T> extends WrappedFunction1<T, Double> abstract class DoubleFunction[T] extends WrappedFunction1[T, JDouble] with Serializable {
implements Serializable {
// Intentionally left blank // Intentionally left blank
} }
...@@ -15,21 +15,17 @@ ...@@ -15,21 +15,17 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.spark.api.java.function; package org.apache.spark.api.java.function
import java.io.Serializable; import scala.reflect.ClassTag
import org.apache.spark.api.java.JavaSparkContext
import scala.reflect.ClassTag;
import scala.reflect.ClassTag$;
/** /**
* Base class for functions whose return types do not create special RDDs. PairFunction and * Base class for functions whose return types do not create special RDDs. PairFunction and
* DoubleFunction are handled separately, to allow PairRDDs and DoubleRDDs to be constructed * DoubleFunction are handled separately, to allow PairRDDs and DoubleRDDs to be constructed
* when mapping RDDs of other types. * when mapping RDDs of other types.
*/ */
public abstract class Function<T, R> extends WrappedFunction1<T, R> implements Serializable { abstract class Function[T, R] extends WrappedFunction1[T, R] with Serializable {
public ClassTag<R> returnType() { def returnType(): ClassTag[R] = JavaSparkContext.fakeClassTag
return ClassTag$.MODULE$.apply(Object.class);
}
} }
...@@ -15,21 +15,15 @@ ...@@ -15,21 +15,15 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.spark.api.java.function; package org.apache.spark.api.java.function
import java.io.Serializable; import scala.reflect.ClassTag
import org.apache.spark.api.java.JavaSparkContext
import scala.reflect.ClassTag;
import scala.reflect.ClassTag$;
/** /**
* A two-argument function that takes arguments of type T1 and T2 and returns an R. * A two-argument function that takes arguments of type T1 and T2 and returns an R.
*/ */
public abstract class Function2<T1, T2, R> extends WrappedFunction2<T1, T2, R> abstract class Function2[T1, T2, R] extends WrappedFunction2[T1, T2, R] with Serializable {
implements Serializable { def returnType(): ClassTag[R] = JavaSparkContext.fakeClassTag
public ClassTag<R> returnType() {
return (ClassTag<R>) ClassTag$.MODULE$.apply(Object.class);
}
} }
...@@ -15,21 +15,14 @@ ...@@ -15,21 +15,14 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.spark.api.java.function; package org.apache.spark.api.java.function
import java.io.Serializable; import org.apache.spark.api.java.JavaSparkContext
import scala.reflect.ClassTag
import scala.reflect.ClassTag;
import scala.reflect.ClassTag$;
/** /**
* A three-argument function that takes arguments of type T1, T2 and T3 and returns an R. * A three-argument function that takes arguments of type T1, T2 and T3 and returns an R.
*/ */
public abstract class Function3<T1, T2, T3, R> extends WrappedFunction3<T1, T2, T3, R> abstract class Function3[T1, T2, T3, R] extends WrappedFunction3[T1, T2, T3, R] with Serializable {
implements Serializable { def returnType(): ClassTag[R] = JavaSparkContext.fakeClassTag
public ClassTag<R> returnType() {
return (ClassTag<R>) ClassTag$.MODULE$.apply(Object.class);
}
} }
...@@ -15,13 +15,11 @@ ...@@ -15,13 +15,11 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.spark.api.java.function; package org.apache.spark.api.java.function
import java.io.Serializable; import java.lang.{Iterable => JIterable}
import org.apache.spark.api.java.JavaSparkContext
import scala.Tuple2; import scala.reflect.ClassTag
import scala.reflect.ClassTag;
import scala.reflect.ClassTag$;
/** /**
* A function that returns zero or more key-value pair records from each input record. The * A function that returns zero or more key-value pair records from each input record. The
...@@ -29,15 +27,10 @@ import scala.reflect.ClassTag$; ...@@ -29,15 +27,10 @@ import scala.reflect.ClassTag$;
*/ */
// PairFlatMapFunction does not extend FlatMapFunction because flatMap is // PairFlatMapFunction does not extend FlatMapFunction because flatMap is
// overloaded for both FlatMapFunction and PairFlatMapFunction. // overloaded for both FlatMapFunction and PairFlatMapFunction.
public abstract class PairFlatMapFunction<T, K, V> abstract class PairFlatMapFunction[T, K, V] extends WrappedFunction1[T, JIterable[(K, V)]]
extends WrappedFunction1<T, Iterable<Tuple2<K, V>>> with Serializable {
implements Serializable {
public ClassTag<K> keyType() { def keyType(): ClassTag[K] = JavaSparkContext.fakeClassTag
return (ClassTag<K>) ClassTag$.MODULE$.apply(Object.class);
}
public ClassTag<V> valueType() { def valueType(): ClassTag[V] = JavaSparkContext.fakeClassTag
return (ClassTag<V>) ClassTag$.MODULE$.apply(Object.class);
}
} }
...@@ -15,27 +15,19 @@ ...@@ -15,27 +15,19 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.spark.api.java.function; package org.apache.spark.api.java.function
import java.io.Serializable; import scala.reflect.ClassTag
import org.apache.spark.api.java.JavaSparkContext
import scala.Tuple2;
import scala.reflect.ClassTag;
import scala.reflect.ClassTag$;
/** /**
* A function that returns key-value pairs (Tuple2<K, V>), and can be used to construct PairRDDs. * A function that returns key-value pairs (Tuple2<K, V>), and can be used to construct PairRDDs.
*/ */
// PairFunction does not extend Function because some UDF functions, like map, // PairFunction does not extend Function because some UDF functions, like map,
// are overloaded for both Function and PairFunction. // are overloaded for both Function and PairFunction.
public abstract class PairFunction<T, K, V> extends WrappedFunction1<T, Tuple2<K, V>> abstract class PairFunction[T, K, V] extends WrappedFunction1[T, (K, V)] with Serializable {
implements Serializable {
public ClassTag<K> keyType() { def keyType(): ClassTag[K] = JavaSparkContext.fakeClassTag
return (ClassTag<K>) ClassTag$.MODULE$.apply(Object.class);
}
public ClassTag<V> valueType() { def valueType(): ClassTag[V] = JavaSparkContext.fakeClassTag
return (ClassTag<V>) ClassTag$.MODULE$.apply(Object.class);
}
} }
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment