diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 8cecaff5dd78624c5f9fbbb1f31969757c051e57..ae52de409e0945706b6ed9cea7f320e2e4741b0c 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -54,7 +54,7 @@ class SparkConf(loadDefaults: Boolean) extends Serializable with Cloneable {
   /** Set a name for your application. Shown in the Spark web UI. */
   def setAppName(name: String): SparkConf = {
     if (name != null) {
-      settings("spark.appName") = name
+      settings("spark.app.name") = name
     }
     this
   }
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 6f54fa7a5a631ec3154ef9db8d59de33df7e8e94..810ed1860bc532b0fd60daebcbea553b27ecbf50 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -112,7 +112,7 @@ class SparkContext(
   if (!conf.contains("spark.master")) {
     throw new SparkException("A master URL must be set in your configuration")
   }
-  if (!conf.contains("spark.appName")) {
+  if (!conf.contains("spark.app.name")) {
     throw new SparkException("An application must be set in your configuration")
   }
 
@@ -127,7 +127,7 @@ class SparkContext(
   }
 
   val master = conf.get("spark.master")
-  val appName = conf.get("spark.appName")
+  val appName = conf.get("spark.app.name")
 
   val isLocal = (master == "local" || master.startsWith("local["))
 
diff --git a/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
index 91fa00a66c51cab6235ea63df9c87dd1ed016823..6b4602f928cd232b83f333064ee1a4aa87fb841b 100644
--- a/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
@@ -48,12 +48,15 @@ private[spark] class EnvironmentUI(sc: SparkContext) {
     def jvmTable =
       UIUtils.listingTable(Seq("Name", "Value"), jvmRow, jvmInformation, fixedWidth = true)
 
-    val properties = System.getProperties.iterator.toSeq
-    val classPathProperty = properties.find { case (k, v) =>
-      k.contains("java.class.path")
+    val sparkProperties = sc.conf.getAll.sorted
+
+    val systemProperties = System.getProperties.iterator.toSeq
+    val classPathProperty = systemProperties.find { case (k, v) =>
+      k == "java.class.path"
     }.getOrElse(("", ""))
-    val sparkProperties = properties.filter(_._1.startsWith("spark")).sorted
-    val otherProperties = properties.diff(sparkProperties :+ classPathProperty).sorted
+    val otherProperties = systemProperties.filter { case (k, v) =>
+      k != "java.class.path" && !k.startsWith("spark.")
+    }.sorted
 
     val propertyHeaders = Seq("Name", "Value")
     def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index aaf0b80fe9baced567481b4a3d1ad5920ba8dfb9..77c7b829b310cc34f54ee8a48598799df2a5d9aa 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -46,7 +46,7 @@ class SparkConfSuite extends FunSuite with LocalSparkContext {
     conf.setExecutorEnv(Seq(("VAR2", "value2"), ("VAR3", "value3")))
 
     assert(conf.get("spark.master") === "local[3]")
-    assert(conf.get("spark.appName") === "My app")
+    assert(conf.get("spark.app.name") === "My app")
     assert(conf.get("spark.home") === "/path")
     assert(conf.get("spark.jars") === "a.jar,b.jar")
     assert(conf.get("spark.executorEnv.VAR1") === "value1")
diff --git a/python/pyspark/conf.py b/python/pyspark/conf.py
index cf98b0e071e8dbcb602671470783866a2f518dca..c07dd883078236b0b9dfd0e810fbb39520300e99 100644
--- a/python/pyspark/conf.py
+++ b/python/pyspark/conf.py
@@ -23,7 +23,7 @@
 <pyspark.conf.SparkConf object at ...>
 >>> conf.get("spark.master")
 u'local'
->>> conf.get("spark.appName")
+>>> conf.get("spark.app.name")
 u'My app'
 >>> sc = SparkContext(conf=conf)
 >>> sc.master
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index 8b028027ebfec327c7ebf5bb17d3461edd4c20b1..12ac0299e28b98a91490d9c3e4559b7dafda9a36 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -104,13 +104,13 @@ class SparkContext(object):
         # Check that we have at least the required parameters
         if not self.conf.contains("spark.master"):
             raise Exception("A master URL must be set in your configuration")
-        if not self.conf.contains("spark.appName"):
+        if not self.conf.contains("spark.app.name"):
             raise Exception("An application name must be set in your configuration")
 
         # Read back our properties from the conf in case we loaded some of them from
         # the classpath or an external config file
         self.master = self.conf.get("spark.master")
-        self.appName = self.conf.get("spark.appName")
+        self.appName = self.conf.get("spark.app.name")
         self.sparkHome = self.conf.getOrElse("spark.home", None)
         for (k, v) in self.conf.getAll():
             if k.startswith("spark.executorEnv."):