diff --git a/python/pyspark/conf.py b/python/pyspark/conf.py
index c111e2e90f607b7e06db3321be3ef8fc0592c438..d72aed6a30ec1eeb75ec2b6ced790283b210a378 100644
--- a/python/pyspark/conf.py
+++ b/python/pyspark/conf.py
@@ -134,7 +134,12 @@ class SparkConf(object):
 
     def get(self, key, defaultValue=None):
         """Get the configured value for some key, or return a default otherwise."""
-        return self._jconf.get(key, defaultValue)
+        if defaultValue == None:   # Py4J doesn't call the right get() if we pass None
+            if not self._jconf.contains(key):
+                return None
+            return self._jconf.get(key)
+        else:
+            return self._jconf.get(key, defaultValue)
 
     def getAll(self):
         """Get all values as a list of key-value pairs."""
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index d77dd7676599d71247ba0bf1f1312d8ff2f4a61f..f955aad7a4f12bc21c0aabc4fd2b9ffa58caf576 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -92,11 +92,13 @@ class SparkContext(object):
             self.serializer = BatchedSerializer(self._unbatched_serializer,
                                                 batchSize)
 
-        # Set parameters passed directly to us on the conf; these operations will be
-        # no-ops if the parameters were None
-        self._conf.setMaster(master)
-        self._conf.setAppName(appName)
-        self._conf.setSparkHome(sparkHome)
+        # Set any parameters passed directly to us on the conf
+        if master:
+            self._conf.setMaster(master)
+        if appName:
+            self._conf.setAppName(appName)
+        if sparkHome:
+            self._conf.setSparkHome(sparkHome)
         if environment:
             for key, value in environment.iteritems():
                 self._conf.setExecutorEnv(key, value)
@@ -111,7 +113,7 @@ class SparkContext(object):
         # the classpath or an external config file
         self.master = self._conf.get("spark.master")
         self.appName = self._conf.get("spark.app.name")
-        self.sparkHome = self._conf.getOrElse("spark.home", None)
+        self.sparkHome = self._conf.get("spark.home", None)
         for (k, v) in self._conf.getAll():
             if k.startswith("spark.executorEnv."):
                 varName = k[len("spark.executorEnv."):]