diff --git a/python/pyspark/tests.py b/python/pyspark/tests.py
index df7235756d37df12da381b4d6163fc13dd9b1311..52297d44e630a00e6b5cd4c79fab1cb7b9e9644e 100644
--- a/python/pyspark/tests.py
+++ b/python/pyspark/tests.py
@@ -108,5 +108,14 @@ class TestAddFile(PySparkTestCase):
         self.assertEqual("Hello World!", UserClass().hello())
 
 
+class TestIO(PySparkTestCase):
+
+    def test_stdout_redirection(self):
+        import subprocess
+        def func(x):
+            subprocess.check_call('ls', shell=True)
+        self.sc.parallelize([1]).foreach(func)
+
+
 if __name__ == "__main__":
     unittest.main()
diff --git a/python/pyspark/worker.py b/python/pyspark/worker.py
index 9622e0cfe456275f06fb989a2fc92d8106db7a30..812e7a9da5a030875cf8d632c7e2040285030599 100644
--- a/python/pyspark/worker.py
+++ b/python/pyspark/worker.py
@@ -1,6 +1,7 @@
 """
 Worker that receives input from Piped RDD.
 """
+import os
 import sys
 import traceback
 from base64 import standard_b64decode
@@ -15,8 +16,8 @@ from pyspark.serializers import write_with_length, read_with_length, write_int,
 
 
 # Redirect stdout to stderr so that users must return values from functions.
-old_stdout = sys.stdout
-sys.stdout = sys.stderr
+old_stdout = os.fdopen(os.dup(1), 'w')
+os.dup2(2, 1)
 
 
 def load_obj():