Skip to content
Snippets Groups Projects
Commit 28369ff7 authored by Matei Zaharia's avatar Matei Zaharia
Browse files

Merge pull request #829 from JoshRosen/pyspark-unit-tests-python-2.6

Fix PySpark unit tests on Python 2.6
parents 1a13460c 7a9abb9d
No related branches found
No related tags found
No related merge requests found
...@@ -64,7 +64,7 @@ class TestCheckpoint(PySparkTestCase): ...@@ -64,7 +64,7 @@ class TestCheckpoint(PySparkTestCase):
flatMappedRDD = parCollection.flatMap(lambda x: range(1, x + 1)) flatMappedRDD = parCollection.flatMap(lambda x: range(1, x + 1))
self.assertFalse(flatMappedRDD.isCheckpointed()) self.assertFalse(flatMappedRDD.isCheckpointed())
self.assertIsNone(flatMappedRDD.getCheckpointFile()) self.assertTrue(flatMappedRDD.getCheckpointFile() is None)
flatMappedRDD.checkpoint() flatMappedRDD.checkpoint()
result = flatMappedRDD.collect() result = flatMappedRDD.collect()
...@@ -79,13 +79,13 @@ class TestCheckpoint(PySparkTestCase): ...@@ -79,13 +79,13 @@ class TestCheckpoint(PySparkTestCase):
flatMappedRDD = parCollection.flatMap(lambda x: [x]) flatMappedRDD = parCollection.flatMap(lambda x: [x])
self.assertFalse(flatMappedRDD.isCheckpointed()) self.assertFalse(flatMappedRDD.isCheckpointed())
self.assertIsNone(flatMappedRDD.getCheckpointFile()) self.assertTrue(flatMappedRDD.getCheckpointFile() is None)
flatMappedRDD.checkpoint() flatMappedRDD.checkpoint()
flatMappedRDD.count() # forces a checkpoint to be computed flatMappedRDD.count() # forces a checkpoint to be computed
time.sleep(1) # 1 second time.sleep(1) # 1 second
self.assertIsNotNone(flatMappedRDD.getCheckpointFile()) self.assertTrue(flatMappedRDD.getCheckpointFile() is not None)
recovered = self.sc._checkpointFile(flatMappedRDD.getCheckpointFile()) recovered = self.sc._checkpointFile(flatMappedRDD.getCheckpointFile())
self.assertEquals([1, 2, 3, 4], recovered.collect()) self.assertEquals([1, 2, 3, 4], recovered.collect())
...@@ -164,9 +164,12 @@ class TestDaemon(unittest.TestCase): ...@@ -164,9 +164,12 @@ class TestDaemon(unittest.TestCase):
time.sleep(1) time.sleep(1)
# daemon should no longer accept connections # daemon should no longer accept connections
with self.assertRaises(EnvironmentError) as trap: try:
self.connect(port) self.connect(port)
self.assertEqual(trap.exception.errno, ECONNREFUSED) except EnvironmentError as exception:
self.assertEqual(exception.errno, ECONNREFUSED)
else:
self.fail("Expected EnvironmentError to be raised")
def test_termination_stdin(self): def test_termination_stdin(self):
"""Ensure that daemon and workers terminate when stdin is closed.""" """Ensure that daemon and workers terminate when stdin is closed."""
......
...@@ -26,20 +26,18 @@ cd "$FWDIR/python" ...@@ -26,20 +26,18 @@ cd "$FWDIR/python"
FAILED=0 FAILED=0
$FWDIR/pyspark pyspark/rdd.py rm -f unit-tests.log
FAILED=$(($?||$FAILED))
function run_test() {
$FWDIR/pyspark pyspark/context.py $FWDIR/pyspark $1 2>&1 | tee -a unit-tests.log
FAILED=$(($?||$FAILED)) FAILED=$((PIPESTATUS[0]||$FAILED))
}
$FWDIR/pyspark -m doctest pyspark/broadcast.py
FAILED=$(($?||$FAILED)) run_test "pyspark/rdd.py"
run_test "pyspark/context.py"
$FWDIR/pyspark -m doctest pyspark/accumulators.py run_test "-m doctest pyspark/broadcast.py"
FAILED=$(($?||$FAILED)) run_test "-m doctest pyspark/accumulators.py"
run_test "pyspark/tests.py"
$FWDIR/pyspark -m unittest pyspark.tests
FAILED=$(($?||$FAILED))
if [[ $FAILED != 0 ]]; then if [[ $FAILED != 0 ]]; then
echo -en "\033[31m" # Red echo -en "\033[31m" # Red
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment