Skip to content
Snippets Groups Projects
Commit e163e31c authored by Holden Karau's avatar Holden Karau
Browse files

Add spaces

parent 7de180fd
No related branches found
No related tags found
No related merge requests found
...@@ -39,6 +39,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext { ...@@ -39,6 +39,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext {
assert(histogramResults === expectedHistogramResults) assert(histogramResults === expectedHistogramResults)
assert(histogramResults2 === expectedHistogramResults) assert(histogramResults2 === expectedHistogramResults)
} }
test("WorksWithOutOfRangeWithOneBucket") { test("WorksWithOutOfRangeWithOneBucket") {
// Verify that if all of the elements are out of range the counts are zero // Verify that if all of the elements are out of range the counts are zero
val rdd = sc.parallelize(Seq(10.01, -0.01)) val rdd = sc.parallelize(Seq(10.01, -0.01))
...@@ -49,6 +50,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext { ...@@ -49,6 +50,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext {
assert(histogramResults === expectedHistogramResults) assert(histogramResults === expectedHistogramResults)
assert(histogramResults2 === expectedHistogramResults) assert(histogramResults2 === expectedHistogramResults)
} }
test("WorksInRangeWithOneBucket") { test("WorksInRangeWithOneBucket") {
// Verify the basic case of one bucket and all elements in that bucket works // Verify the basic case of one bucket and all elements in that bucket works
val rdd = sc.parallelize(Seq(1, 2, 3, 4)) val rdd = sc.parallelize(Seq(1, 2, 3, 4))
...@@ -59,6 +61,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext { ...@@ -59,6 +61,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext {
assert(histogramResults === expectedHistogramResults) assert(histogramResults === expectedHistogramResults)
assert(histogramResults2 === expectedHistogramResults) assert(histogramResults2 === expectedHistogramResults)
} }
test("WorksInRangeWithOneBucketExactMatch") { test("WorksInRangeWithOneBucketExactMatch") {
// Verify the basic case of one bucket and all elements in that bucket works // Verify the basic case of one bucket and all elements in that bucket works
val rdd = sc.parallelize(Seq(1, 2, 3, 4)) val rdd = sc.parallelize(Seq(1, 2, 3, 4))
...@@ -69,6 +72,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext { ...@@ -69,6 +72,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext {
assert(histogramResults === expectedHistogramResults) assert(histogramResults === expectedHistogramResults)
assert(histogramResults2 === expectedHistogramResults) assert(histogramResults2 === expectedHistogramResults)
} }
test("WorksWithOutOfRangeWithTwoBuckets") { test("WorksWithOutOfRangeWithTwoBuckets") {
// Verify that out of range works with two buckets // Verify that out of range works with two buckets
val rdd = sc.parallelize(Seq(10.01, -0.01)) val rdd = sc.parallelize(Seq(10.01, -0.01))
...@@ -79,6 +83,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext { ...@@ -79,6 +83,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext {
assert(histogramResults === expectedHistogramResults) assert(histogramResults === expectedHistogramResults)
assert(histogramResults2 === expectedHistogramResults) assert(histogramResults2 === expectedHistogramResults)
} }
test("WorksWithOutOfRangeWithTwoUnEvenBuckets") { test("WorksWithOutOfRangeWithTwoUnEvenBuckets") {
// Verify that out of range works with two un even buckets // Verify that out of range works with two un even buckets
val rdd = sc.parallelize(Seq(10.01, -0.01)) val rdd = sc.parallelize(Seq(10.01, -0.01))
...@@ -87,6 +92,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext { ...@@ -87,6 +92,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext {
val expectedHistogramResults = Array(0, 0) val expectedHistogramResults = Array(0, 0)
assert(histogramResults === expectedHistogramResults) assert(histogramResults === expectedHistogramResults)
} }
test("WorksInRangeWithTwoBuckets") { test("WorksInRangeWithTwoBuckets") {
// Make sure that it works with two equally spaced buckets and elements in each // Make sure that it works with two equally spaced buckets and elements in each
val rdd = sc.parallelize(Seq(1, 2, 3, 5, 6)) val rdd = sc.parallelize(Seq(1, 2, 3, 5, 6))
...@@ -97,6 +103,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext { ...@@ -97,6 +103,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext {
assert(histogramResults === expectedHistogramResults) assert(histogramResults === expectedHistogramResults)
assert(histogramResults2 === expectedHistogramResults) assert(histogramResults2 === expectedHistogramResults)
} }
test("WorksInRangeWithTwoBucketsAndNaN") { test("WorksInRangeWithTwoBucketsAndNaN") {
// Make sure that it works with two equally spaced buckets and elements in each // Make sure that it works with two equally spaced buckets and elements in each
val rdd = sc.parallelize(Seq(1, 2, 3, 5, 6, Double.NaN)) val rdd = sc.parallelize(Seq(1, 2, 3, 5, 6, Double.NaN))
...@@ -107,6 +114,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext { ...@@ -107,6 +114,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext {
assert(histogramResults === expectedHistogramResults) assert(histogramResults === expectedHistogramResults)
assert(histogramResults2 === expectedHistogramResults) assert(histogramResults2 === expectedHistogramResults)
} }
test("WorksInRangeWithTwoUnevenBuckets") { test("WorksInRangeWithTwoUnevenBuckets") {
// Make sure that it works with two unequally spaced buckets and elements in each // Make sure that it works with two unequally spaced buckets and elements in each
val rdd = sc.parallelize(Seq(1, 2, 3, 5, 6)) val rdd = sc.parallelize(Seq(1, 2, 3, 5, 6))
...@@ -115,6 +123,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext { ...@@ -115,6 +123,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext {
val expectedHistogramResults = Array(3, 2) val expectedHistogramResults = Array(3, 2)
assert(histogramResults === expectedHistogramResults) assert(histogramResults === expectedHistogramResults)
} }
test("WorksMixedRangeWithTwoUnevenBuckets") { test("WorksMixedRangeWithTwoUnevenBuckets") {
// Make sure that it works with two unequally spaced buckets and elements in each // Make sure that it works with two unequally spaced buckets and elements in each
val rdd = sc.parallelize(Seq(-0.01, 0.0, 1, 2, 3, 5, 6, 11.0, 11.01)) val rdd = sc.parallelize(Seq(-0.01, 0.0, 1, 2, 3, 5, 6, 11.0, 11.01))
...@@ -123,6 +132,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext { ...@@ -123,6 +132,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext {
val expectedHistogramResults = Array(4, 3) val expectedHistogramResults = Array(4, 3)
assert(histogramResults === expectedHistogramResults) assert(histogramResults === expectedHistogramResults)
} }
test("WorksMixedRangeWithFourUnevenBuckets") { test("WorksMixedRangeWithFourUnevenBuckets") {
// Make sure that it works with two unequally spaced buckets and elements in each // Make sure that it works with two unequally spaced buckets and elements in each
val rdd = sc.parallelize(Seq(-0.01, 0.0, 1, 2, 3, 5, 6, 11.01, 12.0, 199.0, val rdd = sc.parallelize(Seq(-0.01, 0.0, 1, 2, 3, 5, 6, 11.01, 12.0, 199.0,
...@@ -132,6 +142,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext { ...@@ -132,6 +142,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext {
val expectedHistogramResults = Array(4, 2, 1, 3) val expectedHistogramResults = Array(4, 2, 1, 3)
assert(histogramResults === expectedHistogramResults) assert(histogramResults === expectedHistogramResults)
} }
test("WorksMixedRangeWithUnevenBucketsAndNaN") { test("WorksMixedRangeWithUnevenBucketsAndNaN") {
// Make sure that it works with two unequally spaced buckets and elements in each // Make sure that it works with two unequally spaced buckets and elements in each
val rdd = sc.parallelize(Seq(-0.01, 0.0, 1, 2, 3, 5, 6, 11.01, 12.0, 199.0, val rdd = sc.parallelize(Seq(-0.01, 0.0, 1, 2, 3, 5, 6, 11.01, 12.0, 199.0,
...@@ -161,6 +172,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext { ...@@ -161,6 +172,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext {
val expectedHistogramResults = Array(4, 2, 1, 2, 4) val expectedHistogramResults = Array(4, 2, 1, 2, 4)
assert(histogramResults === expectedHistogramResults) assert(histogramResults === expectedHistogramResults)
} }
test("WorksWithOutOfRangeWithInfiniteBuckets") { test("WorksWithOutOfRangeWithInfiniteBuckets") {
// Verify that out of range works with two buckets // Verify that out of range works with two buckets
val rdd = sc.parallelize(Seq(10.01, -0.01, Double.NaN)) val rdd = sc.parallelize(Seq(10.01, -0.01, Double.NaN))
...@@ -214,6 +226,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext { ...@@ -214,6 +226,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext {
assert(histogramResults === expectedHistogramResults) assert(histogramResults === expectedHistogramResults)
assert(histogramBuckets === expectedHistogramBuckets) assert(histogramBuckets === expectedHistogramBuckets)
} }
test("WorksWithoutBucketsBasicTwo") { test("WorksWithoutBucketsBasicTwo") {
// Verify the basic case of one bucket and all elements in that bucket works // Verify the basic case of one bucket and all elements in that bucket works
val rdd = sc.parallelize(Seq(1, 2, 3, 4)) val rdd = sc.parallelize(Seq(1, 2, 3, 4))
...@@ -223,6 +236,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext { ...@@ -223,6 +236,7 @@ class DoubleRDDSuite extends FunSuite with SharedSparkContext {
assert(histogramResults === expectedHistogramResults) assert(histogramResults === expectedHistogramResults)
assert(histogramBuckets === expectedHistogramBuckets) assert(histogramBuckets === expectedHistogramBuckets)
} }
test("WorksWithoutBucketsWithMoreRequestedThanElements") { test("WorksWithoutBucketsWithMoreRequestedThanElements") {
// Verify the basic case of one bucket and all elements in that bucket works // Verify the basic case of one bucket and all elements in that bucket works
val rdd = sc.parallelize(Seq(1, 2)) val rdd = sc.parallelize(Seq(1, 2))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment