diff --git a/README.md b/README.md
index 2ddfe862a23d71a300bf0bf08bc8f0015c619620..c4170650f799a9e07b2ff757775b9b85e1a1114d 100644
--- a/README.md
+++ b/README.md
@@ -27,7 +27,7 @@ Or, for the Python API, the Python shell (`./pyspark`).
 Spark also comes with several sample programs in the `examples` directory.
 To run one of them, use `./run-example <class> <params>`. For example:
 
-    ./run-example spark.examples.SparkLR local[2]
+    ./run-example org.apache.spark.examples.SparkLR local[2]
 
 will run the Logistic Regression example locally on 2 CPUs.
 
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 74990b636120de1fff01842fe478aea9127bae5a..d19f44d292b7f1651c80b8b0b0a25b2fb1bccf11 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -19,16 +19,16 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <parent>
-    <groupId>org.spark-project</groupId>
+    <groupId>org.apache.spark</groupId>
     <artifactId>spark-parent</artifactId>
     <version>0.8.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
-  <groupId>org.spark-project</groupId>
+  <groupId>org.apache.spark</groupId>
   <artifactId>spark-assembly</artifactId>
   <name>Spark Project Assembly</name>
-  <url>http://spark-project.org/</url>
+  <url>http://spark.incubator.apache.org/</url>
 
   <repositories>
     <!-- A repository in the local filesystem for the Py4J JAR, which is not in Maven central -->
@@ -40,27 +40,27 @@
 
   <dependencies>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-core</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-bagel</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-mllib</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-repl</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-streaming</artifactId>
       <version>${project.version}</version>
     </dependency>
@@ -121,7 +121,7 @@
       <id>hadoop2-yarn</id>
       <dependencies>
         <dependency>
-          <groupId>org.spark-project</groupId>
+          <groupId>org.apache.spark</groupId>
           <artifactId>spark-yarn</artifactId>
           <version>${project.version}</version>
         </dependency>
diff --git a/assembly/src/main/assembly/assembly.xml b/assembly/src/main/assembly/assembly.xml
index 4543b52c93176182300f03b67e9da8d056d531d3..47d3fa93d07657dc2401a4f2a8365b0a83fb8edb 100644
--- a/assembly/src/main/assembly/assembly.xml
+++ b/assembly/src/main/assembly/assembly.xml
@@ -30,9 +30,9 @@
     </fileSet>
     <fileSet>
       <directory>
-        ${project.parent.basedir}/core/src/main/resources/spark/ui/static/
+        ${project.parent.basedir}/core/src/main/resources/org/apache/spark/ui/static/
       </directory>
-      <outputDirectory>/ui-resources/spark/ui/static</outputDirectory>
+      <outputDirectory>/ui-resources/org/apache/spark/ui/static</outputDirectory>
       <includes>
         <include>**/*</include>
       </includes>
@@ -63,10 +63,10 @@
   <dependencySets>
     <dependencySet>
       <includes>
-        <include>org.spark-project:*:jar</include>
+        <include>org.apache.spark:*:jar</include>
       </includes>
       <excludes>
-        <exclude>org.spark-project:spark-assembly:jar</exclude>
+        <exclude>org.apache.spark:spark-assembly:jar</exclude>
       </excludes>
     </dependencySet>
     <dependencySet>
@@ -77,7 +77,7 @@
       <useProjectArtifact>false</useProjectArtifact>
       <excludes>
         <exclude>org.apache.hadoop:*:jar</exclude>
-        <exclude>org.spark-project:*:jar</exclude>
+        <exclude>org.apache.spark:*:jar</exclude>
       </excludes>
     </dependencySet>
   </dependencySets>
diff --git a/bagel/pom.xml b/bagel/pom.xml
index cbcf8d123930a0e1205a2ad90fa363db492d943b..51173c32b2c152ddfb3166df80deccad86f31c1a 100644
--- a/bagel/pom.xml
+++ b/bagel/pom.xml
@@ -19,21 +19,21 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <parent>
-    <groupId>org.spark-project</groupId>
+    <groupId>org.apache.spark</groupId>
     <artifactId>spark-parent</artifactId>
     <version>0.8.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
-  <groupId>org.spark-project</groupId>
+  <groupId>org.apache.spark</groupId>
   <artifactId>spark-bagel</artifactId>
   <packaging>jar</packaging>
   <name>Spark Project Bagel</name>
-  <url>http://spark-project.org/</url>
+  <url>http://spark.incubator.apache.org/</url>
 
   <dependencies>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-core</artifactId>
       <version>${project.version}</version>
     </dependency>
diff --git a/bagel/src/main/scala/spark/bagel/Bagel.scala b/bagel/src/main/scala/org/apache/spark/bagel/Bagel.scala
similarity index 86%
rename from bagel/src/main/scala/spark/bagel/Bagel.scala
rename to bagel/src/main/scala/org/apache/spark/bagel/Bagel.scala
index 80c8d53d2bbfe498d7d57b052bc716f3c06150cc..44e26bbb9e094c37527122bb3825197436c13219 100644
--- a/bagel/src/main/scala/spark/bagel/Bagel.scala
+++ b/bagel/src/main/scala/org/apache/spark/bagel/Bagel.scala
@@ -15,32 +15,31 @@
  * limitations under the License.
  */
 
-package spark.bagel
+package org.apache.spark.bagel
 
-import spark._
-import spark.SparkContext._
-
-import scala.collection.mutable.ArrayBuffer
-import storage.StorageLevel
+import org.apache.spark._
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.storage.StorageLevel
 
 object Bagel extends Logging {
   val DEFAULT_STORAGE_LEVEL = StorageLevel.MEMORY_AND_DISK
 
   /**
    * Runs a Bagel program.
-   * @param sc [[spark.SparkContext]] to use for the program.
+   * @param sc [[org.apache.spark.SparkContext]] to use for the program.
    * @param vertices vertices of the graph represented as an RDD of (Key, Vertex) pairs. Often the Key will be
    *                 the vertex id.
    * @param messages initial set of messages represented as an RDD of (Key, Message) pairs. Often this will be an
    *                 empty array, i.e. sc.parallelize(Array[K, Message]()).
-   * @param combiner [[spark.bagel.Combiner]] combines multiple individual messages to a given vertex into one
+   * @param combiner [[org.apache.spark.bagel.Combiner]] combines multiple individual messages to a given vertex into one
    *                message before sending (which often involves network I/O).
-   * @param aggregator [[spark.bagel.Aggregator]] performs a reduce across all vertices after each superstep,
+   * @param aggregator [[org.apache.spark.bagel.Aggregator]] performs a reduce across all vertices after each superstep,
    *                  and provides the result to each vertex in the next superstep.
-   * @param partitioner [[spark.Partitioner]] partitions values by key
+   * @param partitioner [[org.apache.spark.Partitioner]] partitions values by key
    * @param numPartitions number of partitions across which to split the graph.
    *                      Default is the default parallelism of the SparkContext
-   * @param storageLevel [[spark.storage.StorageLevel]] to use for caching of intermediate RDDs in each superstep.
+   * @param storageLevel [[org.apache.spark.storage.StorageLevel]] to use for caching of intermediate RDDs in each superstep.
    *                    Defaults to caching in memory.
    * @param compute function that takes a Vertex, optional set of (possibly combined) messages to the Vertex,
    *                optional Aggregator and the current superstep,
@@ -98,7 +97,7 @@ object Bagel extends Logging {
     verts
   }
 
-  /** Runs a Bagel program with no [[spark.bagel.Aggregator]] and the default storage level */
+  /** Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]] and the default storage level */
   def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
     sc: SparkContext,
     vertices: RDD[(K, V)],
@@ -110,7 +109,7 @@ object Bagel extends Logging {
     compute: (V, Option[C], Int) => (V, Array[M])
   ): RDD[(K, V)] = run(sc, vertices, messages, combiner, numPartitions, DEFAULT_STORAGE_LEVEL)(compute)
 
-  /** Runs a Bagel program with no [[spark.bagel.Aggregator]] */
+  /** Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]] */
   def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
     sc: SparkContext,
     vertices: RDD[(K, V)],
@@ -128,7 +127,7 @@ object Bagel extends Logging {
   }
 
   /**
-   * Runs a Bagel program with no [[spark.bagel.Aggregator]], default [[spark.HashPartitioner]]
+   * Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]], default [[org.apache.spark.HashPartitioner]]
    * and default storage level
    */
   def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
@@ -141,7 +140,7 @@ object Bagel extends Logging {
     compute: (V, Option[C], Int) => (V, Array[M])
   ): RDD[(K, V)] = run(sc, vertices, messages, combiner, numPartitions, DEFAULT_STORAGE_LEVEL)(compute)
 
-  /** Runs a Bagel program with no [[spark.bagel.Aggregator]] and the default [[spark.HashPartitioner]]*/
+  /** Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]] and the default [[org.apache.spark.HashPartitioner]]*/
   def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
     sc: SparkContext,
     vertices: RDD[(K, V)],
@@ -159,8 +158,8 @@ object Bagel extends Logging {
   }
 
   /**
-   * Runs a Bagel program with no [[spark.bagel.Aggregator]], default [[spark.HashPartitioner]],
-   * [[spark.bagel.DefaultCombiner]] and the default storage level
+   * Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]], default [[org.apache.spark.HashPartitioner]],
+   * [[org.apache.spark.bagel.DefaultCombiner]] and the default storage level
    */
   def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest](
     sc: SparkContext,
@@ -172,8 +171,8 @@ object Bagel extends Logging {
   ): RDD[(K, V)] = run(sc, vertices, messages, numPartitions, DEFAULT_STORAGE_LEVEL)(compute)
 
   /**
-   * Runs a Bagel program with no [[spark.bagel.Aggregator]], the default [[spark.HashPartitioner]]
-   * and [[spark.bagel.DefaultCombiner]]
+   * Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]], the default [[org.apache.spark.HashPartitioner]]
+   * and [[org.apache.spark.bagel.DefaultCombiner]]
    */
   def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest](
     sc: SparkContext,
diff --git a/bagel/src/test/scala/bagel/BagelSuite.scala b/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
similarity index 96%
rename from bagel/src/test/scala/bagel/BagelSuite.scala
rename to bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
index ef2d57fbd0a81c18f9777c8b90e16b54c4569c0c..7b954a477570f5c42e0b8ee4529b7234f751667e 100644
--- a/bagel/src/test/scala/bagel/BagelSuite.scala
+++ b/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
@@ -15,16 +15,14 @@
  * limitations under the License.
  */
 
-package spark.bagel
+package org.apache.spark.bagel
 
-import org.scalatest.{FunSuite, Assertions, BeforeAndAfter}
+import org.scalatest.{BeforeAndAfter, FunSuite, Assertions}
 import org.scalatest.concurrent.Timeouts
 import org.scalatest.time.SpanSugar._
 
-import scala.collection.mutable.ArrayBuffer
-
-import spark._
-import storage.StorageLevel
+import org.apache.spark._
+import org.apache.spark.storage.StorageLevel
 
 class TestVertex(val active: Boolean, val age: Int) extends Vertex with Serializable
 class TestMessage(val targetId: String) extends Message[String] with Serializable
diff --git a/bin/start-master.sh b/bin/start-master.sh
index 2288fb19d7b0d022dbd603a4c26b61de0c9d4648..648c7ae75fe81ddafbd9394ee6f1f60f00f7c263 100755
--- a/bin/start-master.sh
+++ b/bin/start-master.sh
@@ -49,4 +49,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then
     fi
 fi
 
-"$bin"/spark-daemon.sh start spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
+"$bin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
diff --git a/bin/start-slave.sh b/bin/start-slave.sh
index d6db16882d0206a5a58385188b53f55b7f365e7b..4eefa209443385881905ee0c7208ea56fa79e9e5 100755
--- a/bin/start-slave.sh
+++ b/bin/start-slave.sh
@@ -32,4 +32,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then
     fi
 fi
 
-"$bin"/spark-daemon.sh start spark.deploy.worker.Worker "$@"
+"$bin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@"
diff --git a/bin/stop-master.sh b/bin/stop-master.sh
index 31a610bf9df12c9f504194934e2d60d4633b34b5..310e33bedc057194d4b410f2335234f5e2f30613 100755
--- a/bin/stop-master.sh
+++ b/bin/stop-master.sh
@@ -24,4 +24,4 @@ bin=`cd "$bin"; pwd`
 
 . "$bin/spark-config.sh"
 
-"$bin"/spark-daemon.sh stop spark.deploy.master.Master 1
+"$bin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
diff --git a/bin/stop-slaves.sh b/bin/stop-slaves.sh
index 8e056f23d4ee1757bd335ef942961cc64cd7a02d..03e416a13274d5c94caa381257332ec42bb31fd1 100755
--- a/bin/stop-slaves.sh
+++ b/bin/stop-slaves.sh
@@ -29,9 +29,9 @@ if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
 fi
 
 if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
-  "$bin"/spark-daemons.sh stop spark.deploy.worker.Worker 1
+  "$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1
 else
   for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
-    "$bin"/spark-daemons.sh stop spark.deploy.worker.Worker $(( $i + 1 ))
+    "$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
   done
 fi
diff --git a/core/pom.xml b/core/pom.xml
index 53696367e984f88682071abf03001337f76a92f2..5738b7406f89c91032f956b7abdbf5db44d6d0c4 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -19,17 +19,17 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <parent>
-    <groupId>org.spark-project</groupId>
+    <groupId>org.apache.spark</groupId>
     <artifactId>spark-parent</artifactId>
     <version>0.8.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
-  <groupId>org.spark-project</groupId>
+  <groupId>org.apache.spark</groupId>
   <artifactId>spark-core</artifactId>
   <packaging>jar</packaging>
   <name>Spark Project Core</name>
-  <url>http://spark-project.org/</url>
+  <url>http://spark.incubator.apache.org/</url>
 
   <dependencies>
     <dependency>
diff --git a/core/src/main/java/spark/network/netty/FileClient.java b/core/src/main/java/org/apache/spark/network/netty/FileClient.java
similarity index 98%
rename from core/src/main/java/spark/network/netty/FileClient.java
rename to core/src/main/java/org/apache/spark/network/netty/FileClient.java
index 0625a6d502944dde21949c414b38fd71e65af037..20a7a3aa8c122ba7696df735bb3309c21fd50691 100644
--- a/core/src/main/java/spark/network/netty/FileClient.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileClient.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.network.netty;
+package org.apache.spark.network.netty;
 
 import io.netty.bootstrap.Bootstrap;
 import io.netty.channel.Channel;
diff --git a/core/src/main/java/spark/network/netty/FileClientChannelInitializer.java b/core/src/main/java/org/apache/spark/network/netty/FileClientChannelInitializer.java
similarity index 97%
rename from core/src/main/java/spark/network/netty/FileClientChannelInitializer.java
rename to core/src/main/java/org/apache/spark/network/netty/FileClientChannelInitializer.java
index 05ad4b61d72919cefb89f98490355c2c47331bd3..65ee15d63b8548b27275419a2c1507cf0e23847b 100644
--- a/core/src/main/java/spark/network/netty/FileClientChannelInitializer.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileClientChannelInitializer.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.network.netty;
+package org.apache.spark.network.netty;
 
 import io.netty.buffer.BufType;
 import io.netty.channel.ChannelInitializer;
diff --git a/core/src/main/java/spark/network/netty/FileClientHandler.java b/core/src/main/java/org/apache/spark/network/netty/FileClientHandler.java
similarity index 97%
rename from core/src/main/java/spark/network/netty/FileClientHandler.java
rename to core/src/main/java/org/apache/spark/network/netty/FileClientHandler.java
index e8cd9801f63eb56d68be6700db378d582bffb11e..c4aa2669e05d36bea4bd6631ef5c96bd63634219 100644
--- a/core/src/main/java/spark/network/netty/FileClientHandler.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileClientHandler.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.network.netty;
+package org.apache.spark.network.netty;
 
 import io.netty.buffer.ByteBuf;
 import io.netty.channel.ChannelHandlerContext;
diff --git a/core/src/main/java/spark/network/netty/FileServer.java b/core/src/main/java/org/apache/spark/network/netty/FileServer.java
similarity index 98%
rename from core/src/main/java/spark/network/netty/FileServer.java
rename to core/src/main/java/org/apache/spark/network/netty/FileServer.java
index 9f009a61d5dfbc3dbb83fdaa299c3140ee9e61d5..666432474dc75616a3f3a586c55996f5acdc8635 100644
--- a/core/src/main/java/spark/network/netty/FileServer.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileServer.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.network.netty;
+package org.apache.spark.network.netty;
 
 import java.net.InetSocketAddress;
 
diff --git a/core/src/main/java/spark/network/netty/FileServerChannelInitializer.java b/core/src/main/java/org/apache/spark/network/netty/FileServerChannelInitializer.java
similarity index 97%
rename from core/src/main/java/spark/network/netty/FileServerChannelInitializer.java
rename to core/src/main/java/org/apache/spark/network/netty/FileServerChannelInitializer.java
index 50c57a81a355313e741f7abed9a09f32048960d5..833af1632de9ca2b87d0b51f5732ae359c4e2d75 100644
--- a/core/src/main/java/spark/network/netty/FileServerChannelInitializer.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileServerChannelInitializer.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.network.netty;
+package org.apache.spark.network.netty;
 
 import io.netty.channel.ChannelInitializer;
 import io.netty.channel.socket.SocketChannel;
diff --git a/core/src/main/java/spark/network/netty/FileServerHandler.java b/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
similarity index 98%
rename from core/src/main/java/spark/network/netty/FileServerHandler.java
rename to core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
index 176ba8da49102ffa0d54b3c8d78cf6b1d6c602c8..d3d57a02555720000bb90583bfde2ab4f32d07d1 100644
--- a/core/src/main/java/spark/network/netty/FileServerHandler.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.network.netty;
+package org.apache.spark.network.netty;
 
 import java.io.File;
 import java.io.FileInputStream;
diff --git a/core/src/main/java/spark/network/netty/PathResolver.java b/core/src/main/java/org/apache/spark/network/netty/PathResolver.java
similarity index 93%
rename from core/src/main/java/spark/network/netty/PathResolver.java
rename to core/src/main/java/org/apache/spark/network/netty/PathResolver.java
index f446c55b19b6b922ac1bcc7a29e6a6cbbe2399d9..94c034cad0119397bc6332dde066fa2be2ab75a8 100755
--- a/core/src/main/java/spark/network/netty/PathResolver.java
+++ b/core/src/main/java/org/apache/spark/network/netty/PathResolver.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.network.netty;
+package org.apache.spark.network.netty;
 
 
 public interface PathResolver {
diff --git a/core/src/main/resources/spark/ui/static/bootstrap.min.css b/core/src/main/resources/org/apache/spark/ui/static/bootstrap.min.css
similarity index 100%
rename from core/src/main/resources/spark/ui/static/bootstrap.min.css
rename to core/src/main/resources/org/apache/spark/ui/static/bootstrap.min.css
diff --git a/core/src/main/resources/spark/ui/static/sorttable.js b/core/src/main/resources/org/apache/spark/ui/static/sorttable.js
similarity index 100%
rename from core/src/main/resources/spark/ui/static/sorttable.js
rename to core/src/main/resources/org/apache/spark/ui/static/sorttable.js
diff --git a/core/src/main/resources/spark/ui/static/spark-logo-77x50px-hd.png b/core/src/main/resources/org/apache/spark/ui/static/spark-logo-77x50px-hd.png
similarity index 100%
rename from core/src/main/resources/spark/ui/static/spark-logo-77x50px-hd.png
rename to core/src/main/resources/org/apache/spark/ui/static/spark-logo-77x50px-hd.png
diff --git a/core/src/main/resources/spark/ui/static/spark_logo.png b/core/src/main/resources/org/apache/spark/ui/static/spark_logo.png
similarity index 100%
rename from core/src/main/resources/spark/ui/static/spark_logo.png
rename to core/src/main/resources/org/apache/spark/ui/static/spark_logo.png
diff --git a/core/src/main/resources/spark/ui/static/webui.css b/core/src/main/resources/org/apache/spark/ui/static/webui.css
similarity index 100%
rename from core/src/main/resources/spark/ui/static/webui.css
rename to core/src/main/resources/org/apache/spark/ui/static/webui.css
diff --git a/core/src/main/scala/spark/Accumulators.scala b/core/src/main/scala/org/apache/spark/Accumulators.scala
similarity index 94%
rename from core/src/main/scala/spark/Accumulators.scala
rename to core/src/main/scala/org/apache/spark/Accumulators.scala
index 6ff92ce833b022778a5f8d72bcdb6abb9167be61..6e922a612a0799afd3d36d2cb7721cd73056add1 100644
--- a/core/src/main/scala/spark/Accumulators.scala
+++ b/core/src/main/scala/org/apache/spark/Accumulators.scala
@@ -15,12 +15,13 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import java.io._
 
 import scala.collection.mutable.Map
 import scala.collection.generic.Growable
+import org.apache.spark.serializer.JavaSerializer
 
 /**
  * A datatype that can be accumulated, i.e. has an commutative and associative "add" operation,
@@ -28,7 +29,7 @@ import scala.collection.generic.Growable
  *
  * You must define how to add data, and how to merge two of these together.  For some datatypes,
  * such as a counter, these might be the same operation. In that case, you can use the simpler
- * [[spark.Accumulator]]. They won't always be the same, though -- e.g., imagine you are
+ * [[org.apache.spark.Accumulator]]. They won't always be the same, though -- e.g., imagine you are
  * accumulating a set. You will add items to the set, and you will union two sets together.
  *
  * @param initialValue initial value of accumulator
@@ -176,7 +177,7 @@ class GrowableAccumulableParam[R <% Growable[T] with TraversableOnce[T] with Ser
   def zero(initialValue: R): R = {
     // We need to clone initialValue, but it's hard to specify that R should also be Cloneable.
     // Instead we'll serialize it to a buffer and load it back.
-    val ser = (new spark.JavaSerializer).newInstance()
+    val ser = new JavaSerializer().newInstance()
     val copy = ser.deserialize[R](ser.serialize(initialValue))
     copy.clear()   // In case it contained stuff
     copy
@@ -184,7 +185,7 @@ class GrowableAccumulableParam[R <% Growable[T] with TraversableOnce[T] with Ser
 }
 
 /**
- * A simpler value of [[spark.Accumulable]] where the result type being accumulated is the same
+ * A simpler value of [[org.apache.spark.Accumulable]] where the result type being accumulated is the same
  * as the types of elements being merged.
  *
  * @param initialValue initial value of accumulator
@@ -195,7 +196,7 @@ class Accumulator[T](@transient initialValue: T, param: AccumulatorParam[T])
   extends Accumulable[T,T](initialValue, param)
 
 /**
- * A simpler version of [[spark.AccumulableParam]] where the only datatype you can add in is the same type
+ * A simpler version of [[org.apache.spark.AccumulableParam]] where the only datatype you can add in is the same type
  * as the accumulated value. An implicit AccumulatorParam object needs to be available when you create
  * Accumulators of a specific type.
  *
diff --git a/core/src/main/scala/spark/Aggregator.scala b/core/src/main/scala/org/apache/spark/Aggregator.scala
similarity index 98%
rename from core/src/main/scala/spark/Aggregator.scala
rename to core/src/main/scala/org/apache/spark/Aggregator.scala
index 9af401986d350ec4b47495638d64ccc33531034a..3ef402926e8985e30744da8f0e67a30715b21c4e 100644
--- a/core/src/main/scala/spark/Aggregator.scala
+++ b/core/src/main/scala/org/apache/spark/Aggregator.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import java.util.{HashMap => JHashMap}
 
diff --git a/core/src/main/scala/spark/BlockStoreShuffleFetcher.scala b/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
similarity index 93%
rename from core/src/main/scala/spark/BlockStoreShuffleFetcher.scala
rename to core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
index 1ec95ed9b88e049b72d30d56f06f0c386fbc6ebc..908ff56a6bbdb8bcd00a6ab671ee298ea8c67ed8 100644
--- a/core/src/main/scala/spark/BlockStoreShuffleFetcher.scala
+++ b/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
@@ -15,15 +15,15 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.HashMap
 
-import spark.executor.{ShuffleReadMetrics, TaskMetrics}
-import spark.serializer.Serializer
-import spark.storage.BlockManagerId
-import spark.util.CompletionIterator
+import org.apache.spark.executor.{ShuffleReadMetrics, TaskMetrics}
+import org.apache.spark.serializer.Serializer
+import org.apache.spark.storage.BlockManagerId
+import org.apache.spark.util.CompletionIterator
 
 
 private[spark] class BlockStoreShuffleFetcher extends ShuffleFetcher with Logging {
diff --git a/core/src/main/scala/spark/CacheManager.scala b/core/src/main/scala/org/apache/spark/CacheManager.scala
similarity index 96%
rename from core/src/main/scala/spark/CacheManager.scala
rename to core/src/main/scala/org/apache/spark/CacheManager.scala
index 81314805a93853af4855d429c883c2e216288034..e299a106ee6599cd610320c5dfe55864009590c6 100644
--- a/core/src/main/scala/spark/CacheManager.scala
+++ b/core/src/main/scala/org/apache/spark/CacheManager.scala
@@ -15,10 +15,11 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import scala.collection.mutable.{ArrayBuffer, HashSet}
-import spark.storage.{BlockManager, StorageLevel}
+import org.apache.spark.storage.{BlockManager, StorageLevel}
+import org.apache.spark.rdd.RDD
 
 
 /** Spark class responsible for passing RDDs split contents to the BlockManager and making
diff --git a/core/src/main/scala/spark/Dependency.scala b/core/src/main/scala/org/apache/spark/Dependency.scala
similarity index 97%
rename from core/src/main/scala/spark/Dependency.scala
rename to core/src/main/scala/org/apache/spark/Dependency.scala
index d5a960657095634151ddb3e4513b934212a0e408..cc30105940d1acaf823f720b8f67cacbc94194b6 100644
--- a/core/src/main/scala/spark/Dependency.scala
+++ b/core/src/main/scala/org/apache/spark/Dependency.scala
@@ -15,7 +15,9 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
+
+import org.apache.spark.rdd.RDD
 
 /**
  * Base class for dependencies.
diff --git a/core/src/main/scala/spark/FetchFailedException.scala b/core/src/main/scala/org/apache/spark/FetchFailedException.scala
similarity index 95%
rename from core/src/main/scala/spark/FetchFailedException.scala
rename to core/src/main/scala/org/apache/spark/FetchFailedException.scala
index a2dae6cae9adee2370f297a6eef3f4da6e857865..d242047502fd3acb320264533ef88a4443c4fe3b 100644
--- a/core/src/main/scala/spark/FetchFailedException.scala
+++ b/core/src/main/scala/org/apache/spark/FetchFailedException.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
-import spark.storage.BlockManagerId
+import org.apache.spark.storage.BlockManagerId
 
 private[spark] class FetchFailedException(
     taskEndReason: TaskEndReason,
diff --git a/core/src/main/scala/spark/HttpFileServer.scala b/core/src/main/scala/org/apache/spark/HttpFileServer.scala
similarity index 96%
rename from core/src/main/scala/spark/HttpFileServer.scala
rename to core/src/main/scala/org/apache/spark/HttpFileServer.scala
index a13a7a28590bbc668099c9347e38c43847a9fa33..ad1ee20045f46e5591067364692a4f3e0de60b27 100644
--- a/core/src/main/scala/spark/HttpFileServer.scala
+++ b/core/src/main/scala/org/apache/spark/HttpFileServer.scala
@@ -15,10 +15,11 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import java.io.{File}
 import com.google.common.io.Files
+import org.apache.spark.util.Utils
 
 private[spark] class HttpFileServer extends Logging {
   
diff --git a/core/src/main/scala/spark/HttpServer.scala b/core/src/main/scala/org/apache/spark/HttpServer.scala
similarity index 98%
rename from core/src/main/scala/spark/HttpServer.scala
rename to core/src/main/scala/org/apache/spark/HttpServer.scala
index c9dffbc63110b2c5b780d2923b24ea11610f9d81..cdfc9dd54e06a82b01846066f686e70a858963a5 100644
--- a/core/src/main/scala/spark/HttpServer.scala
+++ b/core/src/main/scala/org/apache/spark/HttpServer.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import java.io.File
 import java.net.InetAddress
@@ -26,6 +26,7 @@ import org.eclipse.jetty.server.handler.DefaultHandler
 import org.eclipse.jetty.server.handler.HandlerList
 import org.eclipse.jetty.server.handler.ResourceHandler
 import org.eclipse.jetty.util.thread.QueuedThreadPool
+import org.apache.spark.util.Utils
 
 /**
  * Exception type thrown by HttpServer when it is in the wrong state for an operation.
diff --git a/core/src/main/scala/spark/Logging.scala b/core/src/main/scala/org/apache/spark/Logging.scala
similarity index 99%
rename from core/src/main/scala/spark/Logging.scala
rename to core/src/main/scala/org/apache/spark/Logging.scala
index 79b0362830d9d43550e81f67679774dcccda17be..6a973ea4951c3a389a202f6c5dfd77f355061648 100644
--- a/core/src/main/scala/spark/Logging.scala
+++ b/core/src/main/scala/org/apache/spark/Logging.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.slf4j.Logger
 import org.slf4j.LoggerFactory
diff --git a/core/src/main/scala/spark/MapOutputTracker.scala b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
similarity index 98%
rename from core/src/main/scala/spark/MapOutputTracker.scala
rename to core/src/main/scala/org/apache/spark/MapOutputTracker.scala
index 0cd0341a7242e18006e5dbfa0e0235472222a12a..ae7cf2a89309942a4394fdac3204311a25cec3b0 100644
--- a/core/src/main/scala/spark/MapOutputTracker.scala
+++ b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import java.io._
 import java.util.zip.{GZIPInputStream, GZIPOutputStream}
@@ -30,9 +30,9 @@ import akka.remote._
 import akka.util.Duration
 
 
-import spark.scheduler.MapStatus
-import spark.storage.BlockManagerId
-import spark.util.{MetadataCleaner, TimeStampedHashMap}
+import org.apache.spark.scheduler.MapStatus
+import org.apache.spark.storage.BlockManagerId
+import org.apache.spark.util.{Utils, MetadataCleaner, TimeStampedHashMap}
 
 
 private[spark] sealed trait MapOutputTrackerMessage
diff --git a/core/src/main/scala/spark/Partition.scala b/core/src/main/scala/org/apache/spark/Partition.scala
similarity index 97%
rename from core/src/main/scala/spark/Partition.scala
rename to core/src/main/scala/org/apache/spark/Partition.scala
index 2a4edcec9841948ff0b3cca294b5a0f14aca786c..87914a061f5d7c0864b2a2f2e0abedc260be3831 100644
--- a/core/src/main/scala/spark/Partition.scala
+++ b/core/src/main/scala/org/apache/spark/Partition.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 /**
  * A partition of an RDD.
diff --git a/core/src/main/scala/spark/Partitioner.scala b/core/src/main/scala/org/apache/spark/Partitioner.scala
similarity index 93%
rename from core/src/main/scala/spark/Partitioner.scala
rename to core/src/main/scala/org/apache/spark/Partitioner.scala
index 65da8235d75085bdca8ce20f6c40a537990fd203..0e2c987a598ec3b5c4ad57ac16a7b232b49f65ad 100644
--- a/core/src/main/scala/spark/Partitioner.scala
+++ b/core/src/main/scala/org/apache/spark/Partitioner.scala
@@ -15,7 +15,10 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
+
+import org.apache.spark.util.Utils
+import org.apache.spark.rdd.RDD
 
 /**
  * An object that defines how the elements in a key-value pair RDD are partitioned by key.
@@ -56,7 +59,7 @@ object Partitioner {
 }
 
 /**
- * A [[spark.Partitioner]] that implements hash-based partitioning using Java's `Object.hashCode`.
+ * A [[org.apache.spark.Partitioner]] that implements hash-based partitioning using Java's `Object.hashCode`.
  *
  * Java arrays have hashCodes that are based on the arrays' identities rather than their contents,
  * so attempting to partition an RDD[Array[_]] or RDD[(Array[_], _)] using a HashPartitioner will
@@ -79,7 +82,7 @@ class HashPartitioner(partitions: Int) extends Partitioner {
 }
 
 /**
- * A [[spark.Partitioner]] that partitions sortable records by range into roughly equal ranges.
+ * A [[org.apache.spark.Partitioner]] that partitions sortable records by range into roughly equal ranges.
  * Determines the ranges by sampling the RDD passed in.
  */
 class RangePartitioner[K <% Ordered[K]: ClassManifest, V](
diff --git a/core/src/main/scala/spark/SerializableWritable.scala b/core/src/main/scala/org/apache/spark/SerializableWritable.scala
similarity index 98%
rename from core/src/main/scala/spark/SerializableWritable.scala
rename to core/src/main/scala/org/apache/spark/SerializableWritable.scala
index 936d8e62412e7853ed823e66f53fd95a13811c6a..fdd4c24e2345f7bff20a1e565d9d50ee60c47e0f 100644
--- a/core/src/main/scala/spark/SerializableWritable.scala
+++ b/core/src/main/scala/org/apache/spark/SerializableWritable.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import java.io._
 
diff --git a/core/src/main/scala/spark/ShuffleFetcher.scala b/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
similarity index 91%
rename from core/src/main/scala/spark/ShuffleFetcher.scala
rename to core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
index a6839cf7a40dc94b6cd49010acee7f8d259c5f5c..307c383a89c72d1af1670b4176551719b226ba2e 100644
--- a/core/src/main/scala/spark/ShuffleFetcher.scala
+++ b/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
-import spark.executor.TaskMetrics
-import spark.serializer.Serializer
+import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.serializer.Serializer
 
 
 private[spark] abstract class ShuffleFetcher {
diff --git a/core/src/main/scala/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
similarity index 97%
rename from core/src/main/scala/spark/SparkContext.scala
rename to core/src/main/scala/org/apache/spark/SparkContext.scala
index 7ce9505b9c277966a5eb73b41a28d585a4413445..faf0c2362a24b4fd13e7e8e81964e5c5d3f71778 100644
--- a/core/src/main/scala/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import java.io._
 import java.net.URI
@@ -52,22 +52,20 @@ import org.apache.hadoop.mapreduce.lib.input.{FileInputFormat => NewFileInputFor
 
 import org.apache.mesos.MesosNativeLibrary
 
-import spark.deploy.LocalSparkCluster
-import spark.partial.{ApproximateEvaluator, PartialResult}
-import spark.rdd.{CheckpointRDD, HadoopRDD, NewHadoopRDD, UnionRDD, ParallelCollectionRDD,
-  OrderedRDDFunctions}
-import spark.scheduler._
-import spark.scheduler.cluster.{StandaloneSchedulerBackend, SparkDeploySchedulerBackend,
+import org.apache.spark.deploy.LocalSparkCluster
+import org.apache.spark.partial.{ApproximateEvaluator, PartialResult}
+import org.apache.spark.rdd._
+import org.apache.spark.scheduler._
+import org.apache.spark.scheduler.cluster.{StandaloneSchedulerBackend, SparkDeploySchedulerBackend,
   ClusterScheduler, Schedulable, SchedulingMode}
-import spark.scheduler.local.LocalScheduler
-import spark.scheduler.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
-import spark.storage.{StorageStatus, StorageUtils, RDDInfo, BlockManagerSource}
-import spark.ui.SparkUI
-import spark.util.{MetadataCleaner, TimeStampedHashMap}
-import scala.Some
-import spark.scheduler.StageInfo
-import spark.storage.RDDInfo
-import spark.storage.StorageStatus
+import org.apache.spark.scheduler.local.LocalScheduler
+import org.apache.spark.scheduler.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
+import org.apache.spark.storage.{StorageUtils, BlockManagerSource}
+import org.apache.spark.ui.SparkUI
+import org.apache.spark.util.{ClosureCleaner, Utils, MetadataCleaner, TimeStampedHashMap}
+import org.apache.spark.scheduler.StageInfo
+import org.apache.spark.storage.RDDInfo
+import org.apache.spark.storage.StorageStatus
 
 /**
  * Main entry point for Spark functionality. A SparkContext represents the connection to a Spark
@@ -494,14 +492,14 @@ class SparkContext(
   // Methods for creating shared variables
 
   /**
-   * Create an [[spark.Accumulator]] variable of a given type, which tasks can "add" values
+   * Create an [[org.apache.spark.Accumulator]] variable of a given type, which tasks can "add" values
    * to using the `+=` method. Only the driver can access the accumulator's `value`.
    */
   def accumulator[T](initialValue: T)(implicit param: AccumulatorParam[T]) =
     new Accumulator(initialValue, param)
 
   /**
-   * Create an [[spark.Accumulable]] shared variable, to which tasks can add values with `+=`.
+   * Create an [[org.apache.spark.Accumulable]] shared variable, to which tasks can add values with `+=`.
    * Only the driver can access the accumuable's `value`.
    * @tparam T accumulator type
    * @tparam R type that can be added to the accumulator
@@ -521,7 +519,7 @@ class SparkContext(
   }
 
   /**
-   * Broadcast a read-only variable to the cluster, returning a [[spark.broadcast.Broadcast]] object for
+   * Broadcast a read-only variable to the cluster, returning a [[org.apache.spark.broadcast.Broadcast]] object for
    * reading it in distributed functions. The variable will be sent to each cluster only once.
    */
   def broadcast[T](value: T) = env.broadcastManager.newBroadcast[T](value, isLocal)
diff --git a/core/src/main/scala/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala
similarity index 90%
rename from core/src/main/scala/spark/SparkEnv.scala
rename to core/src/main/scala/org/apache/spark/SparkEnv.scala
index 1f66e9cc7f3b36a59d054f7ab0aa453e16886544..478e5a0aaf2ac529c7dd733174cca69c511b5b2f 100644
--- a/core/src/main/scala/spark/SparkEnv.scala
+++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import collection.mutable
 import serializer.Serializer
@@ -23,15 +23,14 @@ import serializer.Serializer
 import akka.actor.{Actor, ActorRef, Props, ActorSystemImpl, ActorSystem}
 import akka.remote.RemoteActorRefProvider
 
-import spark.broadcast.BroadcastManager
-import spark.metrics.MetricsSystem
-import spark.deploy.SparkHadoopUtil
-import spark.storage.BlockManager
-import spark.storage.BlockManagerMaster
-import spark.network.ConnectionManager
-import spark.serializer.{Serializer, SerializerManager}
-import spark.util.AkkaUtils
-import spark.api.python.PythonWorkerFactory
+import org.apache.spark.broadcast.BroadcastManager
+import org.apache.spark.metrics.MetricsSystem
+import org.apache.spark.deploy.SparkHadoopUtil
+import org.apache.spark.storage.{BlockManagerMasterActor, BlockManager, BlockManagerMaster}
+import org.apache.spark.network.ConnectionManager
+import org.apache.spark.serializer.{Serializer, SerializerManager}
+import org.apache.spark.util.{Utils, AkkaUtils}
+import org.apache.spark.api.python.PythonWorkerFactory
 
 
 /**
@@ -156,10 +155,10 @@ object SparkEnv extends Logging {
     val serializerManager = new SerializerManager
 
     val serializer = serializerManager.setDefault(
-      System.getProperty("spark.serializer", "spark.JavaSerializer"))
+      System.getProperty("spark.serializer", "org.apache.spark.serializer.JavaSerializer"))
 
     val closureSerializer = serializerManager.get(
-      System.getProperty("spark.closure.serializer", "spark.JavaSerializer"))
+      System.getProperty("spark.closure.serializer", "org.apache.spark.serializer.JavaSerializer"))
 
     def registerOrLookup(name: String, newActor: => Actor): ActorRef = {
       if (isDriver) {
@@ -177,7 +176,7 @@ object SparkEnv extends Logging {
 
     val blockManagerMaster = new BlockManagerMaster(registerOrLookup(
       "BlockManagerMaster",
-      new spark.storage.BlockManagerMasterActor(isLocal)))
+      new BlockManagerMasterActor(isLocal)))
     val blockManager = new BlockManager(executorId, actorSystem, blockManagerMaster, serializer)
 
     val connectionManager = blockManager.connectionManager
@@ -194,7 +193,7 @@ object SparkEnv extends Logging {
       new MapOutputTrackerActor(mapOutputTracker))
 
     val shuffleFetcher = instantiateClass[ShuffleFetcher](
-      "spark.shuffle.fetcher", "spark.BlockStoreShuffleFetcher")
+      "spark.shuffle.fetcher", "org.apache.spark.BlockStoreShuffleFetcher")
 
     val httpFileServer = new HttpFileServer()
     httpFileServer.initialize()
diff --git a/core/src/main/scala/spark/SparkException.scala b/core/src/main/scala/org/apache/spark/SparkException.scala
similarity index 97%
rename from core/src/main/scala/spark/SparkException.scala
rename to core/src/main/scala/org/apache/spark/SparkException.scala
index b7045eea63e86467f58ec1cfc05a904fa08e805b..d34e47e8cac226ef63f82ae5dc1ad023b4be1bae 100644
--- a/core/src/main/scala/spark/SparkException.scala
+++ b/core/src/main/scala/org/apache/spark/SparkException.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 class SparkException(message: String, cause: Throwable)
   extends Exception(message, cause) {
diff --git a/core/src/main/scala/spark/SparkFiles.java b/core/src/main/scala/org/apache/spark/SparkFiles.java
similarity index 98%
rename from core/src/main/scala/spark/SparkFiles.java
rename to core/src/main/scala/org/apache/spark/SparkFiles.java
index f9b3f7965eaf1529bcee091aab8f430209eef8c4..af9cf85e372bf7b8f22508690552e7e04de8229d 100644
--- a/core/src/main/scala/spark/SparkFiles.java
+++ b/core/src/main/scala/org/apache/spark/SparkFiles.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark;
+package org.apache.spark;
 
 import java.io.File;
 
diff --git a/core/src/main/scala/spark/SparkHadoopWriter.scala b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
similarity index 98%
rename from core/src/main/scala/spark/SparkHadoopWriter.scala
rename to core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
index 6b330ef5727be767df763dd61c42caa88012b353..2bab9d6e3d9280c2173fa2b6f2498c32d03bb6cc 100644
--- a/core/src/main/scala/spark/SparkHadoopWriter.scala
+++ b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
@@ -25,8 +25,8 @@ import java.text.NumberFormat
 import java.io.IOException
 import java.util.Date
 
-import spark.Logging
-import spark.SerializableWritable
+import org.apache.spark.Logging
+import org.apache.spark.SerializableWritable
 
 /**
  * Internal helper class that saves an RDD using a Hadoop OutputFormat. This is only public
diff --git a/core/src/main/scala/spark/TaskContext.scala b/core/src/main/scala/org/apache/spark/TaskContext.scala
similarity index 98%
rename from core/src/main/scala/spark/TaskContext.scala
rename to core/src/main/scala/org/apache/spark/TaskContext.scala
index b79f4ca81306837d01206d168405ddc22b84e495..b2dd668330a3acdc68a0f84160b09a1bcbbcd073 100644
--- a/core/src/main/scala/spark/TaskContext.scala
+++ b/core/src/main/scala/org/apache/spark/TaskContext.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import executor.TaskMetrics
 import scala.collection.mutable.ArrayBuffer
diff --git a/core/src/main/scala/spark/TaskEndReason.scala b/core/src/main/scala/org/apache/spark/TaskEndReason.scala
similarity index 93%
rename from core/src/main/scala/spark/TaskEndReason.scala
rename to core/src/main/scala/org/apache/spark/TaskEndReason.scala
index 3ad665da34481677a8c5fb22de1f7742d96975e1..03bf268863cf7a9942eb8b0b3c8777aa7a79fd5f 100644
--- a/core/src/main/scala/spark/TaskEndReason.scala
+++ b/core/src/main/scala/org/apache/spark/TaskEndReason.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
-import spark.executor.TaskMetrics
-import spark.storage.BlockManagerId
+import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.storage.BlockManagerId
 
 /**
  * Various possible reasons why a task ended. The low-level TaskScheduler is supposed to retry
diff --git a/core/src/main/scala/spark/TaskState.scala b/core/src/main/scala/org/apache/spark/TaskState.scala
similarity index 98%
rename from core/src/main/scala/spark/TaskState.scala
rename to core/src/main/scala/org/apache/spark/TaskState.scala
index bf757530565db259fe40a4064c66ae7b4ed9288d..19ce8369d90c7e2bedad49625557fb08b17492c7 100644
--- a/core/src/main/scala/spark/TaskState.scala
+++ b/core/src/main/scala/org/apache/spark/TaskState.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.apache.mesos.Protos.{TaskState => MesosTaskState}
 
diff --git a/core/src/main/scala/spark/api/java/JavaDoubleRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
similarity index 92%
rename from core/src/main/scala/spark/api/java/JavaDoubleRDD.scala
rename to core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
index 8ce7df62133876f73b60976deea9eb7481e5fc02..5fd1fab5809da1ddc9cf6d2162c73a9532a53cf5 100644
--- a/core/src/main/scala/spark/api/java/JavaDoubleRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
@@ -15,16 +15,16 @@
  * limitations under the License.
  */
 
-package spark.api.java
-
-import spark.RDD
-import spark.SparkContext.doubleRDDToDoubleRDDFunctions
-import spark.api.java.function.{Function => JFunction}
-import spark.util.StatCounter
-import spark.partial.{BoundedDouble, PartialResult}
-import spark.storage.StorageLevel
+package org.apache.spark.api.java
+
+import org.apache.spark.rdd.RDD
+import org.apache.spark.SparkContext.doubleRDDToDoubleRDDFunctions
+import org.apache.spark.api.java.function.{Function => JFunction}
+import org.apache.spark.util.StatCounter
+import org.apache.spark.partial.{BoundedDouble, PartialResult}
+import org.apache.spark.storage.StorageLevel
 import java.lang.Double
-import spark.Partitioner
+import org.apache.spark.Partitioner
 
 class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, JavaDoubleRDD] {
 
@@ -119,7 +119,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
   def sum(): Double = srdd.sum()
 
   /**
-   * Return a [[spark.util.StatCounter]] object that captures the mean, variance and count
+   * Return a [[org.apache.spark.util.StatCounter]] object that captures the mean, variance and count
    * of the RDD's elements in one operation.
    */
   def stats(): StatCounter = srdd.stats()
diff --git a/core/src/main/scala/spark/api/java/JavaPairRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala
similarity index 97%
rename from core/src/main/scala/spark/api/java/JavaPairRDD.scala
rename to core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala
index effe6e5e0d6cd6d9626192f07e2d811e055412f9..a6518abf456d3ec9d3da730713c2d71733a6fc14 100644
--- a/core/src/main/scala/spark/api/java/JavaPairRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java
+package org.apache.spark.api.java
 
 import java.util.{List => JList}
 import java.util.Comparator
@@ -30,17 +30,17 @@ import org.apache.hadoop.mapred.OutputFormat
 import org.apache.hadoop.mapreduce.{OutputFormat => NewOutputFormat}
 import org.apache.hadoop.conf.Configuration
 
-import spark.HashPartitioner
-import spark.Partitioner
-import spark.Partitioner._
-import spark.RDD
-import spark.SparkContext.rddToPairRDDFunctions
-import spark.api.java.function.{Function2 => JFunction2}
-import spark.api.java.function.{Function => JFunction}
-import spark.partial.BoundedDouble
-import spark.partial.PartialResult
-import spark.rdd.OrderedRDDFunctions
-import spark.storage.StorageLevel
+import org.apache.spark.HashPartitioner
+import org.apache.spark.Partitioner
+import org.apache.spark.Partitioner._
+import org.apache.spark.SparkContext.rddToPairRDDFunctions
+import org.apache.spark.api.java.function.{Function2 => JFunction2}
+import org.apache.spark.api.java.function.{Function => JFunction}
+import org.apache.spark.partial.BoundedDouble
+import org.apache.spark.partial.PartialResult
+import org.apache.spark.rdd.RDD
+import org.apache.spark.rdd.OrderedRDDFunctions
+import org.apache.spark.storage.StorageLevel
 
 
 class JavaPairRDD[K, V](val rdd: RDD[(K, V)])(implicit val kManifest: ClassManifest[K],
diff --git a/core/src/main/scala/spark/api/java/JavaRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
similarity index 94%
rename from core/src/main/scala/spark/api/java/JavaRDD.scala
rename to core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
index c0bf2cf56867c717a04cacdf328193f6d336f098..eec58abdd608a462997b6101ac645d8c8be9cefe 100644
--- a/core/src/main/scala/spark/api/java/JavaRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
@@ -15,11 +15,12 @@
  * limitations under the License.
  */
 
-package spark.api.java
+package org.apache.spark.api.java
 
-import spark._
-import spark.api.java.function.{Function => JFunction}
-import spark.storage.StorageLevel
+import org.apache.spark._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.api.java.function.{Function => JFunction}
+import org.apache.spark.storage.StorageLevel
 
 class JavaRDD[T](val rdd: RDD[T])(implicit val classManifest: ClassManifest[T]) extends
 JavaRDDLike[T, JavaRDD[T]] {
diff --git a/core/src/main/scala/spark/api/java/JavaRDDLike.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
similarity index 97%
rename from core/src/main/scala/spark/api/java/JavaRDDLike.scala
rename to core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
index 2c2b138f16c2073dde18295cbe0d4473aaf0300d..7e6e691f11c9d1d57d9db3e0eac37d64f3de71ce 100644
--- a/core/src/main/scala/spark/api/java/JavaRDDLike.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
@@ -15,19 +15,21 @@
  * limitations under the License.
  */
 
-package spark.api.java
+package org.apache.spark.api.java
 
 import java.util.{List => JList, Comparator}
 import scala.Tuple2
 import scala.collection.JavaConversions._
 
-import org.apache.hadoop.io.compress.CompressionCodec
-import spark.{SparkContext, Partition, RDD, TaskContext}
-import spark.api.java.JavaPairRDD._
-import spark.api.java.function.{Function2 => JFunction2, Function => JFunction, _}
-import spark.partial.{PartialResult, BoundedDouble}
-import spark.storage.StorageLevel
 import com.google.common.base.Optional
+import org.apache.hadoop.io.compress.CompressionCodec
+
+import org.apache.spark.{SparkContext, Partition, TaskContext}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.api.java.JavaPairRDD._
+import org.apache.spark.api.java.function.{Function2 => JFunction2, Function => JFunction, _}
+import org.apache.spark.partial.{PartialResult, BoundedDouble}
+import org.apache.spark.storage.StorageLevel
 
 
 trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
@@ -40,7 +42,7 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
   /** Set of partitions in this RDD. */
   def splits: JList[Partition] = new java.util.ArrayList(rdd.partitions.toSeq)
 
-  /** The [[spark.SparkContext]] that this RDD was created on. */
+  /** The [[org.apache.spark.SparkContext]] that this RDD was created on. */
   def context: SparkContext = rdd.context
 
   /** A unique ID for this RDD (within its SparkContext). */
diff --git a/core/src/main/scala/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
similarity index 93%
rename from core/src/main/scala/spark/api/java/JavaSparkContext.scala
rename to core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
index 29d57004b5643f17131a7003a97b8d8bc331acd8..8869e072bf1ce0efcce3d2cac31d944c4108afe6 100644
--- a/core/src/main/scala/spark/api/java/JavaSparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java
+package org.apache.spark.api.java
 
 import java.util.{Map => JMap}
 
@@ -26,16 +26,16 @@ import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.mapred.InputFormat
 import org.apache.hadoop.mapred.JobConf
 import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat}
-
-import spark.{Accumulable, AccumulableParam, Accumulator, AccumulatorParam, RDD, SparkContext}
-import spark.SparkContext.IntAccumulatorParam
-import spark.SparkContext.DoubleAccumulatorParam
-import spark.broadcast.Broadcast
-
 import com.google.common.base.Optional
 
+import org.apache.spark.{Accumulable, AccumulableParam, Accumulator, AccumulatorParam, SparkContext}
+import org.apache.spark.SparkContext.IntAccumulatorParam
+import org.apache.spark.SparkContext.DoubleAccumulatorParam
+import org.apache.spark.broadcast.Broadcast
+import org.apache.spark.rdd.RDD
+
 /**
- * A Java-friendly version of [[spark.SparkContext]] that returns [[spark.api.java.JavaRDD]]s and
+ * A Java-friendly version of [[org.apache.spark.SparkContext]] that returns [[org.apache.spark.api.java.JavaRDD]]s and
  * works with Java collections instead of Scala ones.
  */
 class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWorkaround {
@@ -283,48 +283,48 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork
   }
 
   /**
-   * Create an [[spark.Accumulator]] integer variable, which tasks can "add" values
+   * Create an [[org.apache.spark.Accumulator]] integer variable, which tasks can "add" values
    * to using the `add` method. Only the master can access the accumulator's `value`.
    */
   def intAccumulator(initialValue: Int): Accumulator[java.lang.Integer] =
     sc.accumulator(initialValue)(IntAccumulatorParam).asInstanceOf[Accumulator[java.lang.Integer]]
 
   /**
-   * Create an [[spark.Accumulator]] double variable, which tasks can "add" values
+   * Create an [[org.apache.spark.Accumulator]] double variable, which tasks can "add" values
    * to using the `add` method. Only the master can access the accumulator's `value`.
    */
   def doubleAccumulator(initialValue: Double): Accumulator[java.lang.Double] =
     sc.accumulator(initialValue)(DoubleAccumulatorParam).asInstanceOf[Accumulator[java.lang.Double]]
 
   /**
-   * Create an [[spark.Accumulator]] integer variable, which tasks can "add" values
+   * Create an [[org.apache.spark.Accumulator]] integer variable, which tasks can "add" values
    * to using the `add` method. Only the master can access the accumulator's `value`.
    */
   def accumulator(initialValue: Int): Accumulator[java.lang.Integer] = intAccumulator(initialValue)
 
   /**
-   * Create an [[spark.Accumulator]] double variable, which tasks can "add" values
+   * Create an [[org.apache.spark.Accumulator]] double variable, which tasks can "add" values
    * to using the `add` method. Only the master can access the accumulator's `value`.
    */
   def accumulator(initialValue: Double): Accumulator[java.lang.Double] =
     doubleAccumulator(initialValue)
 
   /**
-   * Create an [[spark.Accumulator]] variable of a given type, which tasks can "add" values
+   * Create an [[org.apache.spark.Accumulator]] variable of a given type, which tasks can "add" values
    * to using the `add` method. Only the master can access the accumulator's `value`.
    */
   def accumulator[T](initialValue: T, accumulatorParam: AccumulatorParam[T]): Accumulator[T] =
     sc.accumulator(initialValue)(accumulatorParam)
 
   /**
-   * Create an [[spark.Accumulable]] shared variable of the given type, to which tasks can
+   * Create an [[org.apache.spark.Accumulable]] shared variable of the given type, to which tasks can
    * "add" values with `add`. Only the master can access the accumuable's `value`.
    */
   def accumulable[T, R](initialValue: T, param: AccumulableParam[T, R]): Accumulable[T, R] =
     sc.accumulable(initialValue)(param)
 
   /**
-   * Broadcast a read-only variable to the cluster, returning a [[spark.Broadcast]] object for
+   * Broadcast a read-only variable to the cluster, returning a [[org.apache.spark.Broadcast]] object for
    * reading it in distributed functions. The variable will be sent to each cluster only once.
    */
   def broadcast[T](value: T): Broadcast[T] = sc.broadcast(value)
diff --git a/core/src/main/scala/spark/api/java/JavaSparkContextVarargsWorkaround.java b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java
similarity index 98%
rename from core/src/main/scala/spark/api/java/JavaSparkContextVarargsWorkaround.java
rename to core/src/main/scala/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java
index 42b1de01b1032114e345a014e38ee65b8244e4b0..c9cbce5624afc3813bbe1e86ef9c8d374d577cc8 100644
--- a/core/src/main/scala/spark/api/java/JavaSparkContextVarargsWorkaround.java
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java;
+package org.apache.spark.api.java;
 
 import java.util.Arrays;
 import java.util.ArrayList;
diff --git a/core/src/main/scala/spark/api/java/JavaUtils.scala b/core/src/main/scala/org/apache/spark/api/java/JavaUtils.scala
similarity index 96%
rename from core/src/main/scala/spark/api/java/JavaUtils.scala
rename to core/src/main/scala/org/apache/spark/api/java/JavaUtils.scala
index ffc131ac83c1ef27ff12b19707ea4eeb8bd742e8..ecbf18849ad4848f2f93f88c8e6e83d7d6d9d477 100644
--- a/core/src/main/scala/spark/api/java/JavaUtils.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaUtils.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java
+package org.apache.spark.api.java
 
 import com.google.common.base.Optional
 
diff --git a/core/src/main/scala/spark/api/java/StorageLevels.java b/core/src/main/scala/org/apache/spark/api/java/StorageLevels.java
similarity index 96%
rename from core/src/main/scala/spark/api/java/StorageLevels.java
rename to core/src/main/scala/org/apache/spark/api/java/StorageLevels.java
index f385636e832f75262d51413761329ce33915e118..0744269773f8d7e8bc47d55bac2bc26b44795079 100644
--- a/core/src/main/scala/spark/api/java/StorageLevels.java
+++ b/core/src/main/scala/org/apache/spark/api/java/StorageLevels.java
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.api.java;
+package org.apache.spark.api.java;
 
-import spark.storage.StorageLevel;
+import org.apache.spark.storage.StorageLevel;
 
 /**
  * Expose some commonly useful storage level constants.
diff --git a/core/src/main/scala/spark/api/java/function/DoubleFlatMapFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
similarity index 96%
rename from core/src/main/scala/spark/api/java/function/DoubleFlatMapFunction.java
rename to core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
index 8bc88d757fd68b4f42da6fd377ee83fb483e988f..4830067f7a0db2a7b483c5c962483a876b0788ff 100644
--- a/core/src/main/scala/spark/api/java/function/DoubleFlatMapFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java.function;
+package org.apache.spark.api.java.function;
 
 
 import scala.runtime.AbstractFunction1;
diff --git a/core/src/main/scala/spark/api/java/function/DoubleFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
similarity index 96%
rename from core/src/main/scala/spark/api/java/function/DoubleFunction.java
rename to core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
index 1aa1e5dae03439be00c8439d4837d8ebd621b6c7..db34cd190ad3a583c73b0a1825f4118970855d03 100644
--- a/core/src/main/scala/spark/api/java/function/DoubleFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java.function;
+package org.apache.spark.api.java.function;
 
 
 import scala.runtime.AbstractFunction1;
diff --git a/core/src/main/scala/spark/api/java/function/FlatMapFunction.scala b/core/src/main/scala/org/apache/spark/api/java/function/FlatMapFunction.scala
similarity index 96%
rename from core/src/main/scala/spark/api/java/function/FlatMapFunction.scala
rename to core/src/main/scala/org/apache/spark/api/java/function/FlatMapFunction.scala
index 9eb0cfe3f9a275711f094c3e339d2a4ad8acebe5..158539a8461f94aa85ea6440786d50d461436825 100644
--- a/core/src/main/scala/spark/api/java/function/FlatMapFunction.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/function/FlatMapFunction.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java.function
+package org.apache.spark.api.java.function
 
 /**
  * A function that returns zero or more output records from each input record.
diff --git a/core/src/main/scala/spark/api/java/function/FlatMapFunction2.scala b/core/src/main/scala/org/apache/spark/api/java/function/FlatMapFunction2.scala
similarity index 96%
rename from core/src/main/scala/spark/api/java/function/FlatMapFunction2.scala
rename to core/src/main/scala/org/apache/spark/api/java/function/FlatMapFunction2.scala
index dda98710c2d065d3d94e54ce2662f1f6d2ec9270..5ef6a814f5a4f2aa347dc1f7487b7011308bcfce 100644
--- a/core/src/main/scala/spark/api/java/function/FlatMapFunction2.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/function/FlatMapFunction2.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java.function
+package org.apache.spark.api.java.function
 
 /**
  * A function that takes two inputs and returns zero or more output records.
diff --git a/core/src/main/scala/spark/api/java/function/Function.java b/core/src/main/scala/org/apache/spark/api/java/function/Function.java
similarity index 97%
rename from core/src/main/scala/spark/api/java/function/Function.java
rename to core/src/main/scala/org/apache/spark/api/java/function/Function.java
index 2a2ea0aacf03ccf15d9b01c101a9707a67d7c1ee..b9070cfd837eb68743042ae79500ced87815eb4c 100644
--- a/core/src/main/scala/spark/api/java/function/Function.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java.function;
+package org.apache.spark.api.java.function;
 
 import scala.reflect.ClassManifest;
 import scala.reflect.ClassManifest$;
diff --git a/core/src/main/scala/spark/api/java/function/Function2.java b/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
similarity index 96%
rename from core/src/main/scala/spark/api/java/function/Function2.java
rename to core/src/main/scala/org/apache/spark/api/java/function/Function2.java
index 952d31ece49167d2521883a6dc6d4077c7623b59..d4c9154869fe9ac14a4432951ca8cb4a490b5eb2 100644
--- a/core/src/main/scala/spark/api/java/function/Function2.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java.function;
+package org.apache.spark.api.java.function;
 
 import scala.reflect.ClassManifest;
 import scala.reflect.ClassManifest$;
diff --git a/core/src/main/scala/spark/api/java/function/PairFlatMapFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
similarity index 97%
rename from core/src/main/scala/spark/api/java/function/PairFlatMapFunction.java
rename to core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
index 4aad602da3a1c2dcbccea46ed6257edbe7550f73..c0e5544b7dff0d66576243d1e422bf4e57ee15dc 100644
--- a/core/src/main/scala/spark/api/java/function/PairFlatMapFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java.function;
+package org.apache.spark.api.java.function;
 
 import scala.Tuple2;
 import scala.reflect.ClassManifest;
diff --git a/core/src/main/scala/spark/api/java/function/PairFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
similarity index 97%
rename from core/src/main/scala/spark/api/java/function/PairFunction.java
rename to core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
index ccfe64ecf142f572517ec48952c12771c64073f2..40480fe8e816055494be1b8138a9bb487b357326 100644
--- a/core/src/main/scala/spark/api/java/function/PairFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java.function;
+package org.apache.spark.api.java.function;
 
 import scala.Tuple2;
 import scala.reflect.ClassManifest;
diff --git a/core/src/main/scala/spark/api/java/function/VoidFunction.scala b/core/src/main/scala/org/apache/spark/api/java/function/VoidFunction.scala
similarity index 96%
rename from core/src/main/scala/spark/api/java/function/VoidFunction.scala
rename to core/src/main/scala/org/apache/spark/api/java/function/VoidFunction.scala
index f6fc0b0f7dbc318903556ddc64b6ea25e9cacd08..ea94313a4ab59d113e69660030c78ba5ddfb45e6 100644
--- a/core/src/main/scala/spark/api/java/function/VoidFunction.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/function/VoidFunction.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java.function
+package org.apache.spark.api.java.function
 
 /**
  * A function with no return value.
diff --git a/core/src/main/scala/spark/api/java/function/WrappedFunction1.scala b/core/src/main/scala/org/apache/spark/api/java/function/WrappedFunction1.scala
similarity index 96%
rename from core/src/main/scala/spark/api/java/function/WrappedFunction1.scala
rename to core/src/main/scala/org/apache/spark/api/java/function/WrappedFunction1.scala
index 1758a38c4e21cb5147048a28d7c3cdb92408dd82..cfe694f65d558a16ea5cb16732904c260ef95de7 100644
--- a/core/src/main/scala/spark/api/java/function/WrappedFunction1.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/function/WrappedFunction1.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java.function
+package org.apache.spark.api.java.function
 
 import scala.runtime.AbstractFunction1
 
diff --git a/core/src/main/scala/spark/api/java/function/WrappedFunction2.scala b/core/src/main/scala/org/apache/spark/api/java/function/WrappedFunction2.scala
similarity index 96%
rename from core/src/main/scala/spark/api/java/function/WrappedFunction2.scala
rename to core/src/main/scala/org/apache/spark/api/java/function/WrappedFunction2.scala
index b093567d2c1bdf19e8e925ab97fa90cc58eb084c..eb9277c6fb4cba62577e8751b89387960c6236b2 100644
--- a/core/src/main/scala/spark/api/java/function/WrappedFunction2.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/function/WrappedFunction2.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.java.function
+package org.apache.spark.api.java.function
 
 import scala.runtime.AbstractFunction2
 
diff --git a/core/src/main/scala/spark/api/python/PythonPartitioner.scala b/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
similarity index 89%
rename from core/src/main/scala/spark/api/python/PythonPartitioner.scala
rename to core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
index ac112b8c2c773078983d4acab3c894e53a5b9021..b090c6edf3c07adad0e796000cbc81194ebae6be 100644
--- a/core/src/main/scala/spark/api/python/PythonPartitioner.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.api.python
+package org.apache.spark.api.python
 
-import spark.Partitioner
-import spark.Utils
+import org.apache.spark.Partitioner
 import java.util.Arrays
+import org.apache.spark.util.Utils
 
 /**
- * A [[spark.Partitioner]] that performs handling of byte arrays, for use by the Python API.
+ * A [[org.apache.spark.Partitioner]] that performs handling of byte arrays, for use by the Python API.
  *
  * Stores the unique id() of the Python-side partitioning function so that it is incorporated into
  * equality comparisons.  Correctness requires that the id is a unique identifier for the
diff --git a/core/src/main/scala/spark/api/python/PythonRDD.scala b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
similarity index 96%
rename from core/src/main/scala/spark/api/python/PythonRDD.scala
rename to core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
index 49671437d041b329c910de6ff1cc5360c45f66a3..ccd383396460254d5ba433d0c654004f8a5d5a23 100644
--- a/core/src/main/scala/spark/api/python/PythonRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.api.python
+package org.apache.spark.api.python
 
 import java.io._
 import java.net._
@@ -23,10 +23,12 @@ import java.util.{List => JList, ArrayList => JArrayList, Map => JMap, Collectio
 
 import scala.collection.JavaConversions._
 
-import spark.api.java.{JavaSparkContext, JavaPairRDD, JavaRDD}
-import spark.broadcast.Broadcast
-import spark._
-import spark.rdd.PipedRDD
+import org.apache.spark.api.java.{JavaSparkContext, JavaPairRDD, JavaRDD}
+import org.apache.spark.broadcast.Broadcast
+import org.apache.spark._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.rdd.PipedRDD
+import org.apache.spark.util.Utils
 
 
 private[spark] class PythonRDD[T: ClassManifest](
@@ -298,7 +300,7 @@ private object Pickle {
   val APPENDS: Byte = 'e'
 }
 
-private class BytesToString extends spark.api.java.function.Function[Array[Byte], String] {
+private class BytesToString extends org.apache.spark.api.java.function.Function[Array[Byte], String] {
   override def call(arr: Array[Byte]) : String = new String(arr, "UTF-8")
 }
 
diff --git a/core/src/main/scala/spark/api/python/PythonWorkerFactory.scala b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
similarity index 98%
rename from core/src/main/scala/spark/api/python/PythonWorkerFactory.scala
rename to core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
index 14f8320678217e3e79ed1c42fc0109bd7ea4422f..08e3f670f57f6ea63feeb59b2b2a8a397acda394 100644
--- a/core/src/main/scala/spark/api/python/PythonWorkerFactory.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.api.python
+package org.apache.spark.api.python
 
 import java.io.{File, DataInputStream, IOException}
 import java.net.{Socket, SocketException, InetAddress}
 
 import scala.collection.JavaConversions._
 
-import spark._
+import org.apache.spark._
 
 private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String, String])
     extends Logging {
diff --git a/core/src/main/scala/spark/broadcast/BitTorrentBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/BitTorrentBroadcast.scala
similarity index 99%
rename from core/src/main/scala/spark/broadcast/BitTorrentBroadcast.scala
rename to core/src/main/scala/org/apache/spark/broadcast/BitTorrentBroadcast.scala
index 6f7d385379880ae0a5b45b213f1b08ee8dd4164a..93e7815ab515a29846f5fd939f02560417d0cb82 100644
--- a/core/src/main/scala/spark/broadcast/BitTorrentBroadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/BitTorrentBroadcast.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.broadcast
+package org.apache.spark.broadcast
 
 import java.io._
 import java.net._
@@ -25,8 +25,9 @@ import java.util.concurrent.atomic.AtomicInteger
 import scala.collection.mutable.{ListBuffer, Map, Set}
 import scala.math
 
-import spark._
-import spark.storage.StorageLevel
+import org.apache.spark._
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.util.Utils
 
 private[spark] class BitTorrentBroadcast[T](@transient var value_ : T, isLocal: Boolean, id: Long)
   extends Broadcast[T](id)
diff --git a/core/src/main/scala/spark/broadcast/Broadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/Broadcast.scala
similarity index 91%
rename from core/src/main/scala/spark/broadcast/Broadcast.scala
rename to core/src/main/scala/org/apache/spark/broadcast/Broadcast.scala
index aba56a60cade240cb8cc749168054383d7023f09..43c18294c552b69a77e404ca3511634b60a94612 100644
--- a/core/src/main/scala/spark/broadcast/Broadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/Broadcast.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.broadcast
+package org.apache.spark.broadcast
 
 import java.io._
 import java.util.concurrent.atomic.AtomicLong
 
-import spark._
+import org.apache.spark._
 
 abstract class Broadcast[T](private[spark] val id: Long) extends Serializable {
   def value: T
@@ -28,7 +28,7 @@ abstract class Broadcast[T](private[spark] val id: Long) extends Serializable {
   // We cannot have an abstract readObject here due to some weird issues with
   // readObject having to be 'private' in sub-classes.
 
-  override def toString = "spark.Broadcast(" + id + ")"
+  override def toString = "Broadcast(" + id + ")"
 }
 
 private[spark] 
@@ -44,7 +44,7 @@ class BroadcastManager(val _isDriver: Boolean) extends Logging with Serializable
     synchronized {
       if (!initialized) {
         val broadcastFactoryClass = System.getProperty(
-          "spark.broadcast.factory", "spark.broadcast.HttpBroadcastFactory")
+          "spark.broadcast.factory", "org.apache.spark.broadcast.HttpBroadcastFactory")
 
         broadcastFactory =
           Class.forName(broadcastFactoryClass).newInstance.asInstanceOf[BroadcastFactory]
diff --git a/core/src/main/scala/spark/broadcast/BroadcastFactory.scala b/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala
similarity index 97%
rename from core/src/main/scala/spark/broadcast/BroadcastFactory.scala
rename to core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala
index d33d95c7d95e8031bbe147626265bd7bf66653e7..68bff75b908c73fe11dea8a46d5ccff51258a6a3 100644
--- a/core/src/main/scala/spark/broadcast/BroadcastFactory.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.broadcast
+package org.apache.spark.broadcast
 
 /**
  * An interface for all the broadcast implementations in Spark (to allow 
diff --git a/core/src/main/scala/spark/broadcast/HttpBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
similarity index 95%
rename from core/src/main/scala/spark/broadcast/HttpBroadcast.scala
rename to core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
index 138a8c21bcfeb09bed73fc9cd77a435218f527f6..9db26ae6de681d902248a13ebb9a8e2247ad33cf 100644
--- a/core/src/main/scala/spark/broadcast/HttpBroadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.broadcast
+package org.apache.spark.broadcast
 
 import java.io.{File, FileOutputStream, ObjectInputStream, OutputStream}
 import java.net.URL
@@ -23,10 +23,10 @@ import java.net.URL
 import it.unimi.dsi.fastutil.io.FastBufferedInputStream
 import it.unimi.dsi.fastutil.io.FastBufferedOutputStream
 
-import spark.{HttpServer, Logging, SparkEnv, Utils}
-import spark.io.CompressionCodec
-import spark.storage.StorageLevel
-import spark.util.{MetadataCleaner, TimeStampedHashSet}
+import org.apache.spark.{HttpServer, Logging, SparkEnv}
+import org.apache.spark.io.CompressionCodec
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.util.{Utils, MetadataCleaner, TimeStampedHashSet}
 
 
 private[spark] class HttpBroadcast[T](@transient var value_ : T, isLocal: Boolean, id: Long)
diff --git a/core/src/main/scala/spark/broadcast/MultiTracker.scala b/core/src/main/scala/org/apache/spark/broadcast/MultiTracker.scala
similarity index 99%
rename from core/src/main/scala/spark/broadcast/MultiTracker.scala
rename to core/src/main/scala/org/apache/spark/broadcast/MultiTracker.scala
index 7855d44e9b0a603f49944896ed0ef5386a375568..21ec94659e6011e63b8164b2ec53b91d6effb83c 100644
--- a/core/src/main/scala/spark/broadcast/MultiTracker.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/MultiTracker.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.broadcast
+package org.apache.spark.broadcast
 
 import java.io._
 import java.net._
@@ -23,7 +23,8 @@ import java.util.Random
 
 import scala.collection.mutable.Map
 
-import spark._
+import org.apache.spark._
+import org.apache.spark.util.Utils
 
 private object MultiTracker
 extends Logging {
diff --git a/core/src/main/scala/spark/broadcast/SourceInfo.scala b/core/src/main/scala/org/apache/spark/broadcast/SourceInfo.scala
similarity index 96%
rename from core/src/main/scala/spark/broadcast/SourceInfo.scala
rename to core/src/main/scala/org/apache/spark/broadcast/SourceInfo.scala
index b17ae63b5c43b8ad83bfb093a2cc02967cb5a133..baa1fd6da46e820ed1d706198d4ab742bb4c19b6 100644
--- a/core/src/main/scala/spark/broadcast/SourceInfo.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/SourceInfo.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.broadcast
+package org.apache.spark.broadcast
 
 import java.util.BitSet
 
-import spark._
+import org.apache.spark._
 
 /**
  * Used to keep and pass around information of peers involved in a broadcast
diff --git a/core/src/main/scala/spark/broadcast/TreeBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/TreeBroadcast.scala
similarity index 99%
rename from core/src/main/scala/spark/broadcast/TreeBroadcast.scala
rename to core/src/main/scala/org/apache/spark/broadcast/TreeBroadcast.scala
index ea1e9a12c1bfdaacf408986e8d744e761437368b..80c97ca073ecc6c944f4861fb34156a5fab199dd 100644
--- a/core/src/main/scala/spark/broadcast/TreeBroadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/TreeBroadcast.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.broadcast
+package org.apache.spark.broadcast
 
 import java.io._
 import java.net._
@@ -24,8 +24,9 @@ import java.util.{Comparator, Random, UUID}
 import scala.collection.mutable.{ListBuffer, Map, Set}
 import scala.math
 
-import spark._
-import spark.storage.StorageLevel
+import org.apache.spark._
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.util.Utils
 
 private[spark] class TreeBroadcast[T](@transient var value_ : T, isLocal: Boolean, id: Long)
 extends Broadcast[T](id) with Logging with Serializable {
diff --git a/core/src/main/scala/spark/deploy/ApplicationDescription.scala b/core/src/main/scala/org/apache/spark/deploy/ApplicationDescription.scala
similarity index 97%
rename from core/src/main/scala/spark/deploy/ApplicationDescription.scala
rename to core/src/main/scala/org/apache/spark/deploy/ApplicationDescription.scala
index a8b22fbef877771b306fcf6cde1332d7488c4514..19d393a0dbce7ac92f58a91be04334fd47fe02a4 100644
--- a/core/src/main/scala/spark/deploy/ApplicationDescription.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/ApplicationDescription.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy
+package org.apache.spark.deploy
 
 private[spark] class ApplicationDescription(
     val name: String,
diff --git a/core/src/main/scala/spark/deploy/Command.scala b/core/src/main/scala/org/apache/spark/deploy/Command.scala
similarity index 96%
rename from core/src/main/scala/spark/deploy/Command.scala
rename to core/src/main/scala/org/apache/spark/deploy/Command.scala
index bad629e96529c946482e011e8024c4a23531af67..fa8af9a6467509a35f9c58d7b98782abf358aec9 100644
--- a/core/src/main/scala/spark/deploy/Command.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/Command.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy
+package org.apache.spark.deploy
 
 import scala.collection.Map
 
diff --git a/core/src/main/scala/spark/deploy/DeployMessage.scala b/core/src/main/scala/org/apache/spark/deploy/DeployMessage.scala
similarity index 93%
rename from core/src/main/scala/spark/deploy/DeployMessage.scala
rename to core/src/main/scala/org/apache/spark/deploy/DeployMessage.scala
index 0db13ffc98723ff59183e02212a1e13a5575a5b9..c31619db279ddc7c97f3d51c8f7c446b7ed7ba59 100644
--- a/core/src/main/scala/spark/deploy/DeployMessage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/DeployMessage.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.deploy
+package org.apache.spark.deploy
 
 import scala.collection.immutable.List
 
-import spark.Utils
-import spark.deploy.ExecutorState.ExecutorState
-import spark.deploy.master.{WorkerInfo, ApplicationInfo}
-import spark.deploy.worker.ExecutorRunner
+import org.apache.spark.deploy.ExecutorState.ExecutorState
+import org.apache.spark.deploy.master.{WorkerInfo, ApplicationInfo}
+import org.apache.spark.deploy.worker.ExecutorRunner
+import org.apache.spark.util.Utils
 
 
 private[deploy] sealed trait DeployMessage extends Serializable
diff --git a/core/src/main/scala/spark/deploy/ExecutorState.scala b/core/src/main/scala/org/apache/spark/deploy/ExecutorState.scala
similarity index 97%
rename from core/src/main/scala/spark/deploy/ExecutorState.scala
rename to core/src/main/scala/org/apache/spark/deploy/ExecutorState.scala
index 08c9a3b7252b53a087d84f987d99aeac9c217027..fcfea96ad60b88f5863d0323d9019f91052082ee 100644
--- a/core/src/main/scala/spark/deploy/ExecutorState.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/ExecutorState.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy
+package org.apache.spark.deploy
 
 private[spark] object ExecutorState
   extends Enumeration("LAUNCHING", "LOADING", "RUNNING", "KILLED", "FAILED", "LOST") {
diff --git a/core/src/main/scala/spark/deploy/JsonProtocol.scala b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
similarity index 92%
rename from core/src/main/scala/spark/deploy/JsonProtocol.scala
rename to core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
index f8dcf025b44ecc4e0c65bb367199939cce5cbc4e..a6be8efef18f6e44ae681b58864ec5dd121d0e69 100644
--- a/core/src/main/scala/spark/deploy/JsonProtocol.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.deploy
+package org.apache.spark.deploy
 
 import net.liftweb.json.JsonDSL._
 
-import spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
-import spark.deploy.master.{ApplicationInfo, WorkerInfo}
-import spark.deploy.worker.ExecutorRunner
+import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
+import org.apache.spark.deploy.master.{ApplicationInfo, WorkerInfo}
+import org.apache.spark.deploy.worker.ExecutorRunner
 
 
 private[spark] object JsonProtocol {
diff --git a/core/src/main/scala/spark/deploy/LocalSparkCluster.scala b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
similarity index 92%
rename from core/src/main/scala/spark/deploy/LocalSparkCluster.scala
rename to core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
index 6b8e9f27af7c8d779a5467ac66f54631d3675030..78e3747ad81669a4ff3b6f5080067d7070860f94 100644
--- a/core/src/main/scala/spark/deploy/LocalSparkCluster.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.deploy
+package org.apache.spark.deploy
 
 import akka.actor.{ActorRef, Props, Actor, ActorSystem, Terminated}
 
-import spark.deploy.worker.Worker
-import spark.deploy.master.Master
-import spark.util.AkkaUtils
-import spark.{Logging, Utils}
+import org.apache.spark.deploy.worker.Worker
+import org.apache.spark.deploy.master.Master
+import org.apache.spark.util.{Utils, AkkaUtils}
+import org.apache.spark.{Logging}
 
 import scala.collection.mutable.ArrayBuffer
 
diff --git a/core/src/main/scala/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
similarity index 97%
rename from core/src/main/scala/spark/deploy/SparkHadoopUtil.scala
rename to core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 882161e66997005b61ea3120ffee0023f58bd010..0a5f4c368f3d1ef12374f5842f4bd7b0d0354817 100644
--- a/core/src/main/scala/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy
+package org.apache.spark.deploy
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.mapred.JobConf
 
diff --git a/core/src/main/scala/spark/deploy/WebUI.scala b/core/src/main/scala/org/apache/spark/deploy/WebUI.scala
similarity index 98%
rename from core/src/main/scala/spark/deploy/WebUI.scala
rename to core/src/main/scala/org/apache/spark/deploy/WebUI.scala
index 8ea7792ef4ecef106796d39a15881ce8c822cfb2..ae258b58b9cc5554ecc883ade04f9aa87fc98c5e 100644
--- a/core/src/main/scala/spark/deploy/WebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/WebUI.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy
+package org.apache.spark.deploy
 
 import java.text.SimpleDateFormat
 import java.util.Date
diff --git a/core/src/main/scala/spark/deploy/client/Client.scala b/core/src/main/scala/org/apache/spark/deploy/client/Client.scala
similarity index 95%
rename from core/src/main/scala/spark/deploy/client/Client.scala
rename to core/src/main/scala/org/apache/spark/deploy/client/Client.scala
index 9d5ba8a796013a0ffb5ceeab0eb3fe1461811560..a342dd724a8ed74bbc1b80bc5d2eb309ded1e7c6 100644
--- a/core/src/main/scala/spark/deploy/client/Client.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/Client.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.client
+package org.apache.spark.deploy.client
 
 import java.util.concurrent.TimeoutException
 
@@ -28,10 +28,10 @@ import akka.remote.RemoteClientLifeCycleEvent
 import akka.remote.RemoteClientShutdown
 import akka.dispatch.Await
 
-import spark.Logging
-import spark.deploy.{ApplicationDescription, ExecutorState}
-import spark.deploy.DeployMessages._
-import spark.deploy.master.Master
+import org.apache.spark.Logging
+import org.apache.spark.deploy.{ApplicationDescription, ExecutorState}
+import org.apache.spark.deploy.DeployMessages._
+import org.apache.spark.deploy.master.Master
 
 
 /**
diff --git a/core/src/main/scala/spark/deploy/client/ClientListener.scala b/core/src/main/scala/org/apache/spark/deploy/client/ClientListener.scala
similarity index 97%
rename from core/src/main/scala/spark/deploy/client/ClientListener.scala
rename to core/src/main/scala/org/apache/spark/deploy/client/ClientListener.scala
index 064024455ee068ce8a43c52777b5f12c56d280bc..4605368c1177f56639356639f3aae9951aaee06e 100644
--- a/core/src/main/scala/spark/deploy/client/ClientListener.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/ClientListener.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.client
+package org.apache.spark.deploy.client
 
 /**
  * Callbacks invoked by deploy client when various events happen. There are currently four events:
diff --git a/core/src/main/scala/spark/deploy/client/TestClient.scala b/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
similarity index 90%
rename from core/src/main/scala/spark/deploy/client/TestClient.scala
rename to core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
index 4f4daa141a4f248652ff51fe949c32b0938bcfd8..d5e9a0e09575844dc9138f1dd43dc507275529ad 100644
--- a/core/src/main/scala/spark/deploy/client/TestClient.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.deploy.client
+package org.apache.spark.deploy.client
 
-import spark.util.AkkaUtils
-import spark.{Logging, Utils}
-import spark.deploy.{Command, ApplicationDescription}
+import org.apache.spark.util.{Utils, AkkaUtils}
+import org.apache.spark.{Logging}
+import org.apache.spark.deploy.{Command, ApplicationDescription}
 
 private[spark] object TestClient {
 
diff --git a/core/src/main/scala/spark/deploy/client/TestExecutor.scala b/core/src/main/scala/org/apache/spark/deploy/client/TestExecutor.scala
similarity index 96%
rename from core/src/main/scala/spark/deploy/client/TestExecutor.scala
rename to core/src/main/scala/org/apache/spark/deploy/client/TestExecutor.scala
index 8a22b6b89fa8e6af278ac5bd8ae78f844ef8833f..c5ac45c6730d3f45f41b200d88ee62f96d16c0c1 100644
--- a/core/src/main/scala/spark/deploy/client/TestExecutor.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/TestExecutor.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.client
+package org.apache.spark.deploy.client
 
 private[spark] object TestExecutor {
   def main(args: Array[String]) {
diff --git a/core/src/main/scala/spark/deploy/master/ApplicationInfo.scala b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
similarity index 96%
rename from core/src/main/scala/spark/deploy/master/ApplicationInfo.scala
rename to core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
index 6dd2f06126527dd5c3dae462b07f9978fc23f502..bd5327627a832fc3b28202920f889bbd421d7ec5 100644
--- a/core/src/main/scala/spark/deploy/master/ApplicationInfo.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.deploy.master
+package org.apache.spark.deploy.master
 
-import spark.deploy.ApplicationDescription
+import org.apache.spark.deploy.ApplicationDescription
 import java.util.Date
 import akka.actor.ActorRef
 import scala.collection.mutable
diff --git a/core/src/main/scala/spark/deploy/master/ApplicationSource.scala b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationSource.scala
similarity index 89%
rename from core/src/main/scala/spark/deploy/master/ApplicationSource.scala
rename to core/src/main/scala/org/apache/spark/deploy/master/ApplicationSource.scala
index 4df2b6bfddd7503ef4b5b59b8e9b3c660ddfd3ec..2d75ad5a2c9af56689a726d9632d6ebbff12bcce 100644
--- a/core/src/main/scala/spark/deploy/master/ApplicationSource.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationSource.scala
@@ -1,8 +1,8 @@
-package spark.deploy.master
+package org.apache.spark.deploy.master
 
 import com.codahale.metrics.{Gauge, MetricRegistry}
 
-import spark.metrics.source.Source
+import org.apache.spark.metrics.source.Source
 
 class ApplicationSource(val application: ApplicationInfo) extends Source {
   val metricRegistry = new MetricRegistry()
diff --git a/core/src/main/scala/spark/deploy/master/ApplicationState.scala b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationState.scala
similarity index 96%
rename from core/src/main/scala/spark/deploy/master/ApplicationState.scala
rename to core/src/main/scala/org/apache/spark/deploy/master/ApplicationState.scala
index 94f0ad8baec476000a17c31360530872ca9035a3..7e804223cf48a6459a7c60caa50cff33e7675d89 100644
--- a/core/src/main/scala/spark/deploy/master/ApplicationState.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationState.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.master
+package org.apache.spark.deploy.master
 
 private[spark] object ApplicationState
   extends Enumeration("WAITING", "RUNNING", "FINISHED", "FAILED") {
diff --git a/core/src/main/scala/spark/deploy/master/ExecutorInfo.scala b/core/src/main/scala/org/apache/spark/deploy/master/ExecutorInfo.scala
similarity index 92%
rename from core/src/main/scala/spark/deploy/master/ExecutorInfo.scala
rename to core/src/main/scala/org/apache/spark/deploy/master/ExecutorInfo.scala
index 99b60f7d092a42c72ea0649e9667d111c34394fd..cf384a985e90ede61afa0eb7267d636af0eada5e 100644
--- a/core/src/main/scala/spark/deploy/master/ExecutorInfo.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ExecutorInfo.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.deploy.master
+package org.apache.spark.deploy.master
 
-import spark.deploy.ExecutorState
+import org.apache.spark.deploy.ExecutorState
 
 private[spark] class ExecutorInfo(
     val id: Int,
diff --git a/core/src/main/scala/spark/deploy/master/Master.scala b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
similarity index 97%
rename from core/src/main/scala/spark/deploy/master/Master.scala
rename to core/src/main/scala/org/apache/spark/deploy/master/Master.scala
index 04af5e149c62d3431c27095ac66ff3152c431e91..7cf0a7754f78cdf05e5c50252fe5cd98f64322e3 100644
--- a/core/src/main/scala/spark/deploy/master/Master.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.master
+package org.apache.spark.deploy.master
 
 import java.text.SimpleDateFormat
 import java.util.Date
@@ -27,12 +27,12 @@ import akka.actor.Terminated
 import akka.remote.{RemoteClientLifeCycleEvent, RemoteClientDisconnected, RemoteClientShutdown}
 import akka.util.duration._
 
-import spark.{Logging, SparkException, Utils}
-import spark.deploy.{ApplicationDescription, ExecutorState}
-import spark.deploy.DeployMessages._
-import spark.deploy.master.ui.MasterWebUI
-import spark.metrics.MetricsSystem
-import spark.util.AkkaUtils
+import org.apache.spark.{Logging, SparkException}
+import org.apache.spark.deploy.{ApplicationDescription, ExecutorState}
+import org.apache.spark.deploy.DeployMessages._
+import org.apache.spark.deploy.master.ui.MasterWebUI
+import org.apache.spark.metrics.MetricsSystem
+import org.apache.spark.util.{Utils, AkkaUtils}
 
 
 private[spark] class Master(host: String, port: Int, webUiPort: Int) extends Actor with Logging {
diff --git a/core/src/main/scala/spark/deploy/master/MasterArguments.scala b/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
similarity index 96%
rename from core/src/main/scala/spark/deploy/master/MasterArguments.scala
rename to core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
index 0ae01607673797263aa2844771d3b42d86a2a1ae..9d89b455fb961e6ddd1d9bc0883b408b3516ab38 100644
--- a/core/src/main/scala/spark/deploy/master/MasterArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
@@ -15,10 +15,9 @@
  * limitations under the License.
  */
 
-package spark.deploy.master
+package org.apache.spark.deploy.master
 
-import spark.util.IntParam
-import spark.Utils
+import org.apache.spark.util.{Utils, IntParam}
 
 /**
  * Command-line parser for the master.
diff --git a/core/src/main/scala/spark/deploy/master/MasterSource.scala b/core/src/main/scala/org/apache/spark/deploy/master/MasterSource.scala
similarity index 90%
rename from core/src/main/scala/spark/deploy/master/MasterSource.scala
rename to core/src/main/scala/org/apache/spark/deploy/master/MasterSource.scala
index b8cfa6a7736841cd582ae5a8f2aa2fe71506f578..8dd0a42f717c7a2ddc278baa1e9c60ba3101f21a 100644
--- a/core/src/main/scala/spark/deploy/master/MasterSource.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/MasterSource.scala
@@ -1,8 +1,8 @@
-package spark.deploy.master
+package org.apache.spark.deploy.master
 
 import com.codahale.metrics.{Gauge, MetricRegistry}
 
-import spark.metrics.source.Source
+import org.apache.spark.metrics.source.Source
 
 private[spark] class MasterSource(val master: Master) extends Source {
   val metricRegistry = new MetricRegistry()
diff --git a/core/src/main/scala/spark/deploy/master/WorkerInfo.scala b/core/src/main/scala/org/apache/spark/deploy/master/WorkerInfo.scala
similarity index 96%
rename from core/src/main/scala/spark/deploy/master/WorkerInfo.scala
rename to core/src/main/scala/org/apache/spark/deploy/master/WorkerInfo.scala
index 4135cfeb2843f8d9a385b9830422cac428f0ce35..6219f11f2a2b2c3b7b02bc7a56b7643ca8b1020f 100644
--- a/core/src/main/scala/spark/deploy/master/WorkerInfo.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/WorkerInfo.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.deploy.master
+package org.apache.spark.deploy.master
 
 import akka.actor.ActorRef
 import scala.collection.mutable
-import spark.Utils
+import org.apache.spark.util.Utils
 
 private[spark] class WorkerInfo(
   val id: String,
diff --git a/core/src/main/scala/spark/deploy/master/WorkerState.scala b/core/src/main/scala/org/apache/spark/deploy/master/WorkerState.scala
similarity index 96%
rename from core/src/main/scala/spark/deploy/master/WorkerState.scala
rename to core/src/main/scala/org/apache/spark/deploy/master/WorkerState.scala
index 3e50b7748d6683c69fb714cb45f611397431c139..b5ee6dca79fab36aeace009d436d7fd1ea69e481 100644
--- a/core/src/main/scala/spark/deploy/master/WorkerState.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/WorkerState.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.master
+package org.apache.spark.deploy.master
 
 private[spark] object WorkerState extends Enumeration("ALIVE", "DEAD", "DECOMMISSIONED") {
   type WorkerState = Value
diff --git a/core/src/main/scala/spark/deploy/master/ui/ApplicationPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
similarity index 93%
rename from core/src/main/scala/spark/deploy/master/ui/ApplicationPage.scala
rename to core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
index 2ad98f759cb8a9afca50ce5130f5430765b33cd3..f4e574d15dbc38a6db72477c2d459ca00927c6b9 100644
--- a/core/src/main/scala/spark/deploy/master/ui/ApplicationPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.master.ui
+package org.apache.spark.deploy.master.ui
 
 import scala.xml.Node
 
@@ -27,11 +27,11 @@ import javax.servlet.http.HttpServletRequest
 
 import net.liftweb.json.JsonAST.JValue
 
-import spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
-import spark.deploy.JsonProtocol
-import spark.deploy.master.ExecutorInfo
-import spark.ui.UIUtils
-import spark.Utils
+import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
+import org.apache.spark.deploy.JsonProtocol
+import org.apache.spark.deploy.master.ExecutorInfo
+import org.apache.spark.ui.UIUtils
+import org.apache.spark.util.Utils
 
 private[spark] class ApplicationPage(parent: MasterWebUI) {
   val master = parent.masterActorRef
diff --git a/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
similarity index 92%
rename from core/src/main/scala/spark/deploy/master/ui/IndexPage.scala
rename to core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
index 093e523e2392eb52e16900f551b240e68c221b02..d7a57229b00633292f5ad0b0e64d19d343a39b22 100644
--- a/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.master.ui
+package org.apache.spark.deploy.master.ui
 
 import javax.servlet.http.HttpServletRequest
 
@@ -27,12 +27,12 @@ import akka.util.duration._
 
 import net.liftweb.json.JsonAST.JValue
 
-import spark.Utils
-import spark.deploy.DeployWebUI
-import spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
-import spark.deploy.JsonProtocol
-import spark.deploy.master.{ApplicationInfo, WorkerInfo}
-import spark.ui.UIUtils
+import org.apache.spark.deploy.DeployWebUI
+import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
+import org.apache.spark.deploy.JsonProtocol
+import org.apache.spark.deploy.master.{ApplicationInfo, WorkerInfo}
+import org.apache.spark.ui.UIUtils
+import org.apache.spark.util.Utils
 
 private[spark] class IndexPage(parent: MasterWebUI) {
   val master = parent.masterActorRef
diff --git a/core/src/main/scala/spark/deploy/master/ui/MasterWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
similarity index 89%
rename from core/src/main/scala/spark/deploy/master/ui/MasterWebUI.scala
rename to core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
index c91e1db9f24e67ecc59dbea91a7458d8d3a8c4a0..f4df729e87137d5b743c5f1e13bd3be109c9aa73 100644
--- a/core/src/main/scala/spark/deploy/master/ui/MasterWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.master.ui
+package org.apache.spark.deploy.master.ui
 
 import akka.util.Duration
 
@@ -23,10 +23,11 @@ import javax.servlet.http.HttpServletRequest
 
 import org.eclipse.jetty.server.{Handler, Server}
 
-import spark.{Logging, Utils}
-import spark.deploy.master.Master
-import spark.ui.JettyUtils
-import spark.ui.JettyUtils._
+import org.apache.spark.{Logging}
+import org.apache.spark.deploy.master.Master
+import org.apache.spark.ui.JettyUtils
+import org.apache.spark.ui.JettyUtils._
+import org.apache.spark.util.Utils
 
 /**
  * Web UI server for the standalone master.
@@ -76,5 +77,5 @@ class MasterWebUI(val master: Master, requestedPort: Int) extends Logging {
 }
 
 private[spark] object MasterWebUI {
-  val STATIC_RESOURCE_DIR = "spark/ui/static"
+  val STATIC_RESOURCE_DIR = "org/apache/spark/ui/static"
 }
diff --git a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
similarity index 96%
rename from core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
rename to core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
index 34665ce451a4a1c1e891e08b7493456a44c3272e..e3dc30eefc56f951e69aa186982c0ddfe03573d2 100644
--- a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.worker
+package org.apache.spark.deploy.worker
 
 import java.io._
 import java.lang.System.getenv
@@ -25,9 +25,10 @@ import akka.actor.ActorRef
 import com.google.common.base.Charsets
 import com.google.common.io.Files
 
-import spark.{Utils, Logging}
-import spark.deploy.{ExecutorState, ApplicationDescription}
-import spark.deploy.DeployMessages.ExecutorStateChanged
+import org.apache.spark.{Logging}
+import org.apache.spark.deploy.{ExecutorState, ApplicationDescription}
+import org.apache.spark.deploy.DeployMessages.ExecutorStateChanged
+import org.apache.spark.util.Utils
 
 /**
  * Manages the execution of one executor process.
diff --git a/core/src/main/scala/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
similarity index 95%
rename from core/src/main/scala/spark/deploy/worker/Worker.scala
rename to core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
index 053ac5522607b0d9bd57df2d4501163adae0806c..09530beb3bec05ca7e37b1f006ae7d14d1748257 100644
--- a/core/src/main/scala/spark/deploy/worker/Worker.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.worker
+package org.apache.spark.deploy.worker
 
 import java.text.SimpleDateFormat
 import java.util.Date
@@ -27,13 +27,13 @@ import akka.actor.{ActorRef, Props, Actor, ActorSystem, Terminated}
 import akka.remote.{RemoteClientLifeCycleEvent, RemoteClientShutdown, RemoteClientDisconnected}
 import akka.util.duration._
 
-import spark.{Logging, Utils}
-import spark.deploy.ExecutorState
-import spark.deploy.DeployMessages._
-import spark.deploy.master.Master
-import spark.deploy.worker.ui.WorkerWebUI
-import spark.metrics.MetricsSystem
-import spark.util.AkkaUtils
+import org.apache.spark.{Logging}
+import org.apache.spark.deploy.ExecutorState
+import org.apache.spark.deploy.DeployMessages._
+import org.apache.spark.deploy.master.Master
+import org.apache.spark.deploy.worker.ui.WorkerWebUI
+import org.apache.spark.metrics.MetricsSystem
+import org.apache.spark.util.{Utils, AkkaUtils}
 
 
 private[spark] class Worker(
diff --git a/core/src/main/scala/spark/deploy/worker/WorkerArguments.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
similarity index 97%
rename from core/src/main/scala/spark/deploy/worker/WorkerArguments.scala
rename to core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
index 9fcd3260ca97529019d41a9049700e093bd59abd..0ae89a864fd57a48694137e2bd6554c0a16f16fc 100644
--- a/core/src/main/scala/spark/deploy/worker/WorkerArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
@@ -15,11 +15,9 @@
  * limitations under the License.
  */
 
-package spark.deploy.worker
+package org.apache.spark.deploy.worker
 
-import spark.util.IntParam
-import spark.util.MemoryParam
-import spark.Utils
+import org.apache.spark.util.{Utils, IntParam, MemoryParam}
 import java.lang.management.ManagementFactory
 
 /**
diff --git a/core/src/main/scala/spark/deploy/worker/WorkerSource.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerSource.scala
similarity index 92%
rename from core/src/main/scala/spark/deploy/worker/WorkerSource.scala
rename to core/src/main/scala/org/apache/spark/deploy/worker/WorkerSource.scala
index 39cb8e56901a1f36178f2e1e3ba4d6fb490d74e0..6427c0178fa008f2db4d454edbcb7f012d0cb5f1 100644
--- a/core/src/main/scala/spark/deploy/worker/WorkerSource.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerSource.scala
@@ -1,8 +1,8 @@
-package spark.deploy.worker
+package org.apache.spark.deploy.worker
 
 import com.codahale.metrics.{Gauge, MetricRegistry}
 
-import spark.metrics.source.Source
+import org.apache.spark.metrics.source.Source
 
 private[spark] class WorkerSource(val worker: Worker) extends Source {
   val sourceName = "worker"
diff --git a/core/src/main/scala/spark/deploy/worker/ui/IndexPage.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/IndexPage.scala
similarity index 93%
rename from core/src/main/scala/spark/deploy/worker/ui/IndexPage.scala
rename to core/src/main/scala/org/apache/spark/deploy/worker/ui/IndexPage.scala
index 243e0765cb7d0619750f6ec94ce84eddef96e157..d2d36174985923e6c09a9d607f2b7ea5d3625d7b 100644
--- a/core/src/main/scala/spark/deploy/worker/ui/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/IndexPage.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.worker.ui
+package org.apache.spark.deploy.worker.ui
 
 import javax.servlet.http.HttpServletRequest
 
@@ -27,11 +27,11 @@ import akka.util.duration._
 
 import net.liftweb.json.JsonAST.JValue
 
-import spark.Utils
-import spark.deploy.JsonProtocol
-import spark.deploy.DeployMessages.{RequestWorkerState, WorkerStateResponse}
-import spark.deploy.worker.ExecutorRunner
-import spark.ui.UIUtils
+import org.apache.spark.deploy.JsonProtocol
+import org.apache.spark.deploy.DeployMessages.{RequestWorkerState, WorkerStateResponse}
+import org.apache.spark.deploy.worker.ExecutorRunner
+import org.apache.spark.ui.UIUtils
+import org.apache.spark.util.Utils
 
 
 private[spark] class IndexPage(parent: WorkerWebUI) {
diff --git a/core/src/main/scala/spark/deploy/worker/ui/WorkerWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
similarity index 95%
rename from core/src/main/scala/spark/deploy/worker/ui/WorkerWebUI.scala
rename to core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
index 0a75ad8cf4a0ab05051dc99a356631b17c4e8bff..95d6007f3b35e1be4384c605927f4f3c87c32ce4 100644
--- a/core/src/main/scala/spark/deploy/worker/ui/WorkerWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.worker.ui
+package org.apache.spark.deploy.worker.ui
 
 import akka.util.{Duration, Timeout}
 
@@ -25,11 +25,12 @@ import javax.servlet.http.HttpServletRequest
 
 import org.eclipse.jetty.server.{Handler, Server}
 
-import spark.deploy.worker.Worker
-import spark.{Utils, Logging}
-import spark.ui.JettyUtils
-import spark.ui.JettyUtils._
-import spark.ui.UIUtils
+import org.apache.spark.deploy.worker.Worker
+import org.apache.spark.{Logging}
+import org.apache.spark.ui.JettyUtils
+import org.apache.spark.ui.JettyUtils._
+import org.apache.spark.ui.UIUtils
+import org.apache.spark.util.Utils
 
 /**
  * Web UI server for the standalone worker.
@@ -185,6 +186,6 @@ class WorkerWebUI(val worker: Worker, val workDir: File, requestedPort: Option[I
 }
 
 private[spark] object WorkerWebUI {
-  val STATIC_RESOURCE_DIR = "spark/ui/static"
+  val STATIC_RESOURCE_DIR = "org/apache/spark/ui/static"
   val DEFAULT_PORT="8081"
 }
diff --git a/core/src/main/scala/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala
similarity index 97%
rename from core/src/main/scala/spark/executor/Executor.scala
rename to core/src/main/scala/org/apache/spark/executor/Executor.scala
index fa82d2b3245dade349329cf8120c62ae1433fc11..d3658049945c2d252914aecbae314bbc7d840f66 100644
--- a/core/src/main/scala/spark/executor/Executor.scala
+++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.executor
+package org.apache.spark.executor
 
 import java.io.{File}
 import java.lang.management.ManagementFactory
@@ -25,8 +25,9 @@ import java.util.concurrent._
 import scala.collection.JavaConversions._
 import scala.collection.mutable.HashMap
 
-import spark.scheduler._
-import spark._
+import org.apache.spark.scheduler._
+import org.apache.spark._
+import org.apache.spark.util.Utils
 
 
 /**
@@ -225,13 +226,13 @@ private[spark] class Executor(
     if (classUri != null) {
       logInfo("Using REPL class URI: " + classUri)
       try {
-        val klass = Class.forName("spark.repl.ExecutorClassLoader")
+        val klass = Class.forName("org.apache.spark.repl.ExecutorClassLoader")
           .asInstanceOf[Class[_ <: ClassLoader]]
         val constructor = klass.getConstructor(classOf[String], classOf[ClassLoader])
         return constructor.newInstance(classUri, parent)
       } catch {
         case _: ClassNotFoundException =>
-          logError("Could not find spark.repl.ExecutorClassLoader on classpath!")
+          logError("Could not find org.apache.spark.repl.ExecutorClassLoader on classpath!")
           System.exit(1)
           null
       }
diff --git a/core/src/main/scala/spark/executor/ExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
similarity index 93%
rename from core/src/main/scala/spark/executor/ExecutorBackend.scala
rename to core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
index 33a6f8a824e709a504a239ae6559841d326ca652..ad7dd34c769404bea090af0faa56ed95b9d2b323 100644
--- a/core/src/main/scala/spark/executor/ExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.executor
+package org.apache.spark.executor
 
 import java.nio.ByteBuffer
-import spark.TaskState.TaskState
+import org.apache.spark.TaskState.TaskState
 
 /**
  * A pluggable interface used by the Executor to send updates to the cluster scheduler.
diff --git a/core/src/main/scala/spark/executor/ExecutorExitCode.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorExitCode.scala
similarity index 98%
rename from core/src/main/scala/spark/executor/ExecutorExitCode.scala
rename to core/src/main/scala/org/apache/spark/executor/ExecutorExitCode.scala
index 64b9fb88f898297642a1a9ca4b7bff11ff33e04d..e5c9bbbe2874e66b8fff8a16126bd1623138d8f2 100644
--- a/core/src/main/scala/spark/executor/ExecutorExitCode.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ExecutorExitCode.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.executor
+package org.apache.spark.executor
 
 /**
  * These are exit codes that executors should use to provide the master with information about
diff --git a/core/src/main/scala/spark/executor/ExecutorSource.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
similarity index 96%
rename from core/src/main/scala/spark/executor/ExecutorSource.scala
rename to core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
index d491a3c0c9599140f27f9fb1508e5453520ac77d..17653cd5608fbacc3cbb4d89ac1818fa300d90cc 100644
--- a/core/src/main/scala/spark/executor/ExecutorSource.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
@@ -1,4 +1,4 @@
-package spark.executor
+package org.apache.spark.executor
 
 import com.codahale.metrics.{Gauge, MetricRegistry}
 
@@ -8,7 +8,7 @@ import org.apache.hadoop.fs.LocalFileSystem
 
 import scala.collection.JavaConversions._
 
-import spark.metrics.source.Source
+import org.apache.spark.metrics.source.Source
 
 class ExecutorSource(val executor: Executor) extends Source {
   private def fileStats(scheme: String) : Option[FileSystem.Statistics] =
diff --git a/core/src/main/scala/spark/executor/ExecutorURLClassLoader.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorURLClassLoader.scala
similarity index 97%
rename from core/src/main/scala/spark/executor/ExecutorURLClassLoader.scala
rename to core/src/main/scala/org/apache/spark/executor/ExecutorURLClassLoader.scala
index 09d12fb65b9a6e94f94c8aa65a8a7ff29a1e389d..f9bfe8ed2f5baa2a60c94c3b6d765bcefa3bd557 100644
--- a/core/src/main/scala/spark/executor/ExecutorURLClassLoader.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ExecutorURLClassLoader.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.executor
+package org.apache.spark.executor
 
 import java.net.{URLClassLoader, URL}
 
diff --git a/core/src/main/scala/spark/executor/MesosExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
similarity index 94%
rename from core/src/main/scala/spark/executor/MesosExecutorBackend.scala
rename to core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
index 4961c42faddbcd3dd69a59cb4f509e99ba43ef0b..da620919801beb5d3f4773defd513d6dda94a362 100644
--- a/core/src/main/scala/spark/executor/MesosExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
@@ -15,15 +15,16 @@
  * limitations under the License.
  */
 
-package spark.executor
+package org.apache.spark.executor
 
 import java.nio.ByteBuffer
 import org.apache.mesos.{Executor => MesosExecutor, MesosExecutorDriver, MesosNativeLibrary, ExecutorDriver}
 import org.apache.mesos.Protos.{TaskState => MesosTaskState, TaskStatus => MesosTaskStatus, _}
-import spark.TaskState.TaskState
+import org.apache.spark.TaskState.TaskState
 import com.google.protobuf.ByteString
-import spark.{Utils, Logging}
-import spark.TaskState
+import org.apache.spark.{Logging}
+import org.apache.spark.TaskState
+import org.apache.spark.util.Utils
 
 private[spark] class MesosExecutorBackend
   extends MesosExecutor
diff --git a/core/src/main/scala/spark/executor/StandaloneExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/StandaloneExecutorBackend.scala
similarity index 94%
rename from core/src/main/scala/spark/executor/StandaloneExecutorBackend.scala
rename to core/src/main/scala/org/apache/spark/executor/StandaloneExecutorBackend.scala
index b5fb6dbe29ec0da147e7354276722bc0ac287c0e..78390238681d3bfc8284f26255c7e41c406de70d 100644
--- a/core/src/main/scala/spark/executor/StandaloneExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/StandaloneExecutorBackend.scala
@@ -15,17 +15,17 @@
  * limitations under the License.
  */
 
-package spark.executor
+package org.apache.spark.executor
 
 import java.nio.ByteBuffer
 
 import akka.actor.{ActorRef, Actor, Props, Terminated}
 import akka.remote.{RemoteClientLifeCycleEvent, RemoteClientShutdown, RemoteClientDisconnected}
 
-import spark.{Logging, Utils, SparkEnv}
-import spark.TaskState.TaskState
-import spark.scheduler.cluster.StandaloneClusterMessages._
-import spark.util.AkkaUtils
+import org.apache.spark.{Logging, SparkEnv}
+import org.apache.spark.TaskState.TaskState
+import org.apache.spark.scheduler.cluster.StandaloneClusterMessages._
+import org.apache.spark.util.{Utils, AkkaUtils}
 
 
 private[spark] class StandaloneExecutorBackend(
diff --git a/core/src/main/scala/spark/executor/TaskMetrics.scala b/core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala
similarity index 98%
rename from core/src/main/scala/spark/executor/TaskMetrics.scala
rename to core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala
index 47b8890bee96dc1ef0a2d1fdb7c6e1418e20960f..f311141148cb0c03fd6049c5f7df8c56cb566818 100644
--- a/core/src/main/scala/spark/executor/TaskMetrics.scala
+++ b/core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.executor
+package org.apache.spark.executor
 
 class TaskMetrics extends Serializable {
   /**
diff --git a/core/src/main/scala/spark/io/CompressionCodec.scala b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
similarity index 94%
rename from core/src/main/scala/spark/io/CompressionCodec.scala
rename to core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
index 0adebecadb3e5bde51799ea6816ae77e8e51b987..90a0420cafb8f4b11400fdd643bd3b4c7f3860ed 100644
--- a/core/src/main/scala/spark/io/CompressionCodec.scala
+++ b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.io
+package org.apache.spark.io
 
 import java.io.{InputStream, OutputStream}
 
@@ -55,7 +55,7 @@ private[spark] object CompressionCodec {
 
 
 /**
- * LZF implementation of [[spark.io.CompressionCodec]].
+ * LZF implementation of [[org.apache.spark.io.CompressionCodec]].
  */
 class LZFCompressionCodec extends CompressionCodec {
 
@@ -68,7 +68,7 @@ class LZFCompressionCodec extends CompressionCodec {
 
 
 /**
- * Snappy implementation of [[spark.io.CompressionCodec]].
+ * Snappy implementation of [[org.apache.spark.io.CompressionCodec]].
  * Block size can be configured by spark.io.compression.snappy.block.size.
  */
 class SnappyCompressionCodec extends CompressionCodec {
diff --git a/core/src/main/scala/spark/metrics/MetricsConfig.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
similarity index 95%
rename from core/src/main/scala/spark/metrics/MetricsConfig.scala
rename to core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
index d7fb5378a495aa1488970a035f3a9653c75321f6..0f9c4e00b1fb9ca4d77bad33e4ec317cd6e2fd6f 100644
--- a/core/src/main/scala/spark/metrics/MetricsConfig.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.metrics
+package org.apache.spark.metrics
 
 import java.util.Properties
 import java.io.{File, FileInputStream, InputStream, IOException}
@@ -23,7 +23,7 @@ import java.io.{File, FileInputStream, InputStream, IOException}
 import scala.collection.mutable
 import scala.util.matching.Regex
 
-import spark.Logging
+import org.apache.spark.Logging
 
 private[spark] class MetricsConfig(val configFile: Option[String]) extends Logging {
   initLogging()
@@ -36,7 +36,7 @@ private[spark] class MetricsConfig(val configFile: Option[String]) extends Loggi
   var propertyCategories: mutable.HashMap[String, Properties] = null
 
   private def setDefaultProperties(prop: Properties) {
-    prop.setProperty("*.sink.servlet.class", "spark.metrics.sink.MetricsServlet")
+    prop.setProperty("*.sink.servlet.class", "org.apache.spark.metrics.sink.MetricsServlet")
     prop.setProperty("*.sink.servlet.uri", "/metrics/json")
     prop.setProperty("*.sink.servlet.sample", "false")
     prop.setProperty("master.sink.servlet.uri", "/metrics/master/json")
diff --git a/core/src/main/scala/spark/metrics/MetricsSystem.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
similarity index 97%
rename from core/src/main/scala/spark/metrics/MetricsSystem.scala
rename to core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
index 4e6c6b26c86be236819f3f73945dc0cbf97f5ac6..bec0c83be8bea24c4c69bc14ad07c72cbf685329 100644
--- a/core/src/main/scala/spark/metrics/MetricsSystem.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.metrics
+package org.apache.spark.metrics
 
 import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry}
 
@@ -24,9 +24,9 @@ import java.util.concurrent.TimeUnit
 
 import scala.collection.mutable
 
-import spark.Logging
-import spark.metrics.sink.{MetricsServlet, Sink}
-import spark.metrics.source.Source
+import org.apache.spark.Logging
+import org.apache.spark.metrics.sink.{MetricsServlet, Sink}
+import org.apache.spark.metrics.source.Source
 
 /**
  * Spark Metrics System, created by specific "instance", combined by source,
diff --git a/core/src/main/scala/spark/metrics/sink/ConsoleSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
similarity index 95%
rename from core/src/main/scala/spark/metrics/sink/ConsoleSink.scala
rename to core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
index 966ba37c20e8f618e77350cb2be8b024f0b12366..bce257d6e6f47bbd4e582baedb21410aa639b412 100644
--- a/core/src/main/scala/spark/metrics/sink/ConsoleSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.metrics.sink
+package org.apache.spark.metrics.sink
 
 import com.codahale.metrics.{ConsoleReporter, MetricRegistry}
 
 import java.util.Properties
 import java.util.concurrent.TimeUnit
 
-import spark.metrics.MetricsSystem
+import org.apache.spark.metrics.MetricsSystem
 
 class ConsoleSink(val property: Properties, val registry: MetricRegistry) extends Sink {
   val CONSOLE_DEFAULT_PERIOD = 10
diff --git a/core/src/main/scala/spark/metrics/sink/CsvSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
similarity index 96%
rename from core/src/main/scala/spark/metrics/sink/CsvSink.scala
rename to core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
index cb990afdefc4c4b3261bcd8386d19baa57221269..3d1a06a395a724fe5f6f6496c30fd4fe9e47669f 100644
--- a/core/src/main/scala/spark/metrics/sink/CsvSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.metrics.sink
+package org.apache.spark.metrics.sink
 
 import com.codahale.metrics.{CsvReporter, MetricRegistry}
 
@@ -23,7 +23,7 @@ import java.io.File
 import java.util.{Locale, Properties}
 import java.util.concurrent.TimeUnit
 
-import spark.metrics.MetricsSystem
+import org.apache.spark.metrics.MetricsSystem
 
 class CsvSink(val property: Properties, val registry: MetricRegistry) extends Sink {
   val CSV_KEY_PERIOD = "period"
diff --git a/core/src/main/scala/spark/metrics/sink/JmxSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
similarity index 96%
rename from core/src/main/scala/spark/metrics/sink/JmxSink.scala
rename to core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
index ee04544c0e9ed8fcf9eade3347d30a937b0c93b4..621d086d415ccb920ab85dffe4023916246ba5cb 100644
--- a/core/src/main/scala/spark/metrics/sink/JmxSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.metrics.sink
+package org.apache.spark.metrics.sink
 
 import com.codahale.metrics.{JmxReporter, MetricRegistry}
 
diff --git a/core/src/main/scala/spark/metrics/sink/MetricsServlet.scala b/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
similarity index 96%
rename from core/src/main/scala/spark/metrics/sink/MetricsServlet.scala
rename to core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
index 17432b1ed1a56e6c7d58351ad33e406357374067..4e90dd4323951a2d151b4c4ccd84983a940f8cb0 100644
--- a/core/src/main/scala/spark/metrics/sink/MetricsServlet.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.metrics.sink
+package org.apache.spark.metrics.sink
 
 import com.codahale.metrics.MetricRegistry
 import com.codahale.metrics.json.MetricsModule
@@ -28,7 +28,7 @@ import javax.servlet.http.HttpServletRequest
 
 import org.eclipse.jetty.server.Handler
 
-import spark.ui.JettyUtils
+import org.apache.spark.ui.JettyUtils
 
 class MetricsServlet(val property: Properties, val registry: MetricRegistry) extends Sink {
   val SERVLET_KEY_URI = "uri"
diff --git a/core/src/main/scala/spark/metrics/sink/Sink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/Sink.scala
similarity index 95%
rename from core/src/main/scala/spark/metrics/sink/Sink.scala
rename to core/src/main/scala/org/apache/spark/metrics/sink/Sink.scala
index dad1a7f0fe7022e1e021cecf020df1628c13649e..3a739aa563eaee13844be80d774854472b11712f 100644
--- a/core/src/main/scala/spark/metrics/sink/Sink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/Sink.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.metrics.sink
+package org.apache.spark.metrics.sink
 
 trait Sink {
   def start: Unit
   def stop: Unit
-}
\ No newline at end of file
+}
diff --git a/core/src/main/scala/spark/metrics/source/JvmSource.scala b/core/src/main/scala/org/apache/spark/metrics/source/JvmSource.scala
similarity index 96%
rename from core/src/main/scala/spark/metrics/source/JvmSource.scala
rename to core/src/main/scala/org/apache/spark/metrics/source/JvmSource.scala
index e7710085577b5cd586de3306a979c1e5b65d3fff..75cb2b8973aa1590ec061219cdde158952e5615e 100644
--- a/core/src/main/scala/spark/metrics/source/JvmSource.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/source/JvmSource.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.metrics.source
+package org.apache.spark.metrics.source
 
 import com.codahale.metrics.MetricRegistry
 import com.codahale.metrics.jvm.{GarbageCollectorMetricSet, MemoryUsageGaugeSet}
diff --git a/core/src/main/scala/spark/metrics/source/Source.scala b/core/src/main/scala/org/apache/spark/metrics/source/Source.scala
similarity index 95%
rename from core/src/main/scala/spark/metrics/source/Source.scala
rename to core/src/main/scala/org/apache/spark/metrics/source/Source.scala
index 76199a004b560e708aa5d4cb0dd5bda79b90b51e..3fee55cc6dcd5478194d597b32f473ac02645b96 100644
--- a/core/src/main/scala/spark/metrics/source/Source.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/source/Source.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.metrics.source
+package org.apache.spark.metrics.source
 
 import com.codahale.metrics.MetricRegistry
 
diff --git a/core/src/main/scala/spark/network/BufferMessage.scala b/core/src/main/scala/org/apache/spark/network/BufferMessage.scala
similarity index 97%
rename from core/src/main/scala/spark/network/BufferMessage.scala
rename to core/src/main/scala/org/apache/spark/network/BufferMessage.scala
index e566aeac13779c76726857f73fe4ac02adbfd0f4..f736bb3713061f5258fddde2331a973dbd77ed76 100644
--- a/core/src/main/scala/spark/network/BufferMessage.scala
+++ b/core/src/main/scala/org/apache/spark/network/BufferMessage.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.network
+package org.apache.spark.network
 
 import java.nio.ByteBuffer
 
 import scala.collection.mutable.ArrayBuffer
 
-import spark.storage.BlockManager
+import org.apache.spark.storage.BlockManager
 
 
 private[spark]
diff --git a/core/src/main/scala/spark/network/Connection.scala b/core/src/main/scala/org/apache/spark/network/Connection.scala
similarity index 99%
rename from core/src/main/scala/spark/network/Connection.scala
rename to core/src/main/scala/org/apache/spark/network/Connection.scala
index 1e571d39ae5763e756bc49509c1dfbaeb104030e..95cb0206acd62e67a80edcb057743d29615e4500 100644
--- a/core/src/main/scala/spark/network/Connection.scala
+++ b/core/src/main/scala/org/apache/spark/network/Connection.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.network
+package org.apache.spark.network
 
-import spark._
+import org.apache.spark._
 
 import scala.collection.mutable.{HashMap, Queue, ArrayBuffer}
 
diff --git a/core/src/main/scala/spark/network/ConnectionManager.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
similarity index 99%
rename from core/src/main/scala/spark/network/ConnectionManager.scala
rename to core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
index 8b9f3ae18c5c90a395f33e800d2861dc29ba31be..e15a839c4e7a79ca14214fc66ca951726ad6253b 100644
--- a/core/src/main/scala/spark/network/ConnectionManager.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.network
+package org.apache.spark.network
 
-import spark._
+import org.apache.spark._
 
 import java.nio._
 import java.nio.channels._
@@ -34,6 +34,7 @@ import scala.collection.mutable.ArrayBuffer
 import akka.dispatch.{Await, Promise, ExecutionContext, Future}
 import akka.util.Duration
 import akka.util.duration._
+import org.apache.spark.util.Utils
 
 
 private[spark] class ConnectionManager(port: Int) extends Logging {
diff --git a/core/src/main/scala/spark/network/ConnectionManagerId.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
similarity index 94%
rename from core/src/main/scala/spark/network/ConnectionManagerId.scala
rename to core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
index 9d5c518293d67b60d9275a2f47a10d5d658d7044..50dd9bc2d101fe721cd07d474a4dd9f3445580b3 100644
--- a/core/src/main/scala/spark/network/ConnectionManagerId.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.network
+package org.apache.spark.network
 
 import java.net.InetSocketAddress
 
-import spark.Utils
+import org.apache.spark.util.Utils
 
 
 private[spark] case class ConnectionManagerId(host: String, port: Int) {
diff --git a/core/src/main/scala/spark/network/ConnectionManagerTest.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
similarity index 97%
rename from core/src/main/scala/spark/network/ConnectionManagerTest.scala
rename to core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
index 9e3827aaf56105d5a73415ce5be423a1e598ca03..8d9ad9604d645ffec3232c96150a89f622498237 100644
--- a/core/src/main/scala/spark/network/ConnectionManagerTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.network
+package org.apache.spark.network
 
-import spark._
-import spark.SparkContext._
+import org.apache.spark._
+import org.apache.spark.SparkContext._
 
 import scala.io.Source
 
diff --git a/core/src/main/scala/spark/network/Message.scala b/core/src/main/scala/org/apache/spark/network/Message.scala
similarity index 98%
rename from core/src/main/scala/spark/network/Message.scala
rename to core/src/main/scala/org/apache/spark/network/Message.scala
index a25457ea35ff76a52f5f5247caf36f5b4c413987..f2ecc6d439aaad826578cb4f5c86d4ada559ee07 100644
--- a/core/src/main/scala/spark/network/Message.scala
+++ b/core/src/main/scala/org/apache/spark/network/Message.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.network
+package org.apache.spark.network
 
 import java.nio.ByteBuffer
 import java.net.InetSocketAddress
diff --git a/core/src/main/scala/spark/network/MessageChunk.scala b/core/src/main/scala/org/apache/spark/network/MessageChunk.scala
similarity index 97%
rename from core/src/main/scala/spark/network/MessageChunk.scala
rename to core/src/main/scala/org/apache/spark/network/MessageChunk.scala
index 784db5ab62dce219e8f8fc99c628616954ad2b32..e0fe57b80d5cd693080f9e7e79fb5a8a4f01f569 100644
--- a/core/src/main/scala/spark/network/MessageChunk.scala
+++ b/core/src/main/scala/org/apache/spark/network/MessageChunk.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.network
+package org.apache.spark.network
 
 import java.nio.ByteBuffer
 
diff --git a/core/src/main/scala/spark/network/MessageChunkHeader.scala b/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
similarity index 98%
rename from core/src/main/scala/spark/network/MessageChunkHeader.scala
rename to core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
index 18d0cbcc14642eb65ef1a0fcd518207299728349..235fbc39b3bd254e5457652378abf3f1c956377a 100644
--- a/core/src/main/scala/spark/network/MessageChunkHeader.scala
+++ b/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.network
+package org.apache.spark.network
 
 import java.net.InetAddress
 import java.net.InetSocketAddress
diff --git a/core/src/main/scala/spark/network/ReceiverTest.scala b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
similarity index 97%
rename from core/src/main/scala/spark/network/ReceiverTest.scala
rename to core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
index 2bbc736f4055ab0abdc5a8815009092a785de0d3..781715108be6361197eed6e946d3a6664c5161b7 100644
--- a/core/src/main/scala/spark/network/ReceiverTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.network
+package org.apache.spark.network
 
 import java.nio.ByteBuffer
 import java.net.InetAddress
diff --git a/core/src/main/scala/spark/network/SenderTest.scala b/core/src/main/scala/org/apache/spark/network/SenderTest.scala
similarity index 98%
rename from core/src/main/scala/spark/network/SenderTest.scala
rename to core/src/main/scala/org/apache/spark/network/SenderTest.scala
index 542c54c36b0e7dd0ef1a81abf27da9462a5a41d8..777574980fbc63a82ebc51b60ab02fa7a37c5c6e 100644
--- a/core/src/main/scala/spark/network/SenderTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/SenderTest.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.network
+package org.apache.spark.network
 
 import java.nio.ByteBuffer
 import java.net.InetAddress
diff --git a/core/src/main/scala/spark/network/netty/FileHeader.scala b/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
similarity index 96%
rename from core/src/main/scala/spark/network/netty/FileHeader.scala
rename to core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
index bf46d32aa3a473c46b1d8fc41ab209eb57c89678..3c29700920a341cd69fed842f12322098092989b 100644
--- a/core/src/main/scala/spark/network/netty/FileHeader.scala
+++ b/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.network.netty
+package org.apache.spark.network.netty
 
 import io.netty.buffer._
 
-import spark.Logging
+import org.apache.spark.Logging
 
 private[spark] class FileHeader (
   val fileLen: Int,
diff --git a/core/src/main/scala/spark/network/netty/ShuffleCopier.scala b/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala
similarity index 96%
rename from core/src/main/scala/spark/network/netty/ShuffleCopier.scala
rename to core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala
index b01f6369f659c7ce87ccd04ff4760d4c8d0fd1c5..9493ccffd99e52b230326bb29d5d24b8139f29fb 100644
--- a/core/src/main/scala/spark/network/netty/ShuffleCopier.scala
+++ b/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.network.netty
+package org.apache.spark.network.netty
 
 import java.util.concurrent.Executors
 
@@ -23,8 +23,8 @@ import io.netty.buffer.ByteBuf
 import io.netty.channel.ChannelHandlerContext
 import io.netty.util.CharsetUtil
 
-import spark.Logging
-import spark.network.ConnectionManagerId
+import org.apache.spark.Logging
+import org.apache.spark.network.ConnectionManagerId
 
 import scala.collection.JavaConverters._
 
diff --git a/core/src/main/scala/spark/network/netty/ShuffleSender.scala b/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala
similarity index 96%
rename from core/src/main/scala/spark/network/netty/ShuffleSender.scala
rename to core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala
index cdf88b03a0840ce884d65543ccf01cecd35b4ef7..537f225469a8df772886ea315365e05e810726f2 100644
--- a/core/src/main/scala/spark/network/netty/ShuffleSender.scala
+++ b/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.network.netty
+package org.apache.spark.network.netty
 
 import java.io.File
 
-import spark.Logging
+import org.apache.spark.Logging
 
 
 private[spark] class ShuffleSender(portIn: Int, val pResolver: PathResolver) extends Logging {
diff --git a/core/src/main/scala/org/apache/spark/package.scala b/core/src/main/scala/org/apache/spark/package.scala
new file mode 100644
index 0000000000000000000000000000000000000000..c0ec527339b0d8a18ecbdfeac01052aa6faa5193
--- /dev/null
+++ b/core/src/main/scala/org/apache/spark/package.scala
@@ -0,0 +1,35 @@
+import org.apache.spark.rdd.{SequenceFileRDDFunctions, DoubleRDDFunctions, PairRDDFunctions}
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Core Spark functionality. [[org.apache.spark.SparkContext]] serves as the main entry point to
+ * Spark, while [[org.apache.spark.rdd.RDD]] is the data type representing a distributed collection,
+ * and provides most parallel operations.
+ *
+ * In addition, [[org.apache.spark.rdd.PairRDDFunctions]] contains operations available only on RDDs
+ * of key-value pairs, such as `groupByKey` and `join`; [[org.apache.spark.rdd.DoubleRDDFunctions]]
+ * contains operations available only on RDDs of Doubles; and
+ * [[org.apache.spark.rdd.SequenceFileRDDFunctions]] contains operations available on RDDs that can
+ * be saved as SequenceFiles. These operations are automatically available on any RDD of the right
+ * type (e.g. RDD[(Int, Int)] through implicit conversions when you
+ * `import org.apache.spark.SparkContext._`.
+ */
+package object spark { 
+  // For package docs only
+}
diff --git a/core/src/main/scala/spark/partial/ApproximateActionListener.scala b/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala
similarity index 95%
rename from core/src/main/scala/spark/partial/ApproximateActionListener.scala
rename to core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala
index 691d939150a4d14354cf140575138d6928e11d1b..d71069444a73f498eac40995755362f9afb4f9d9 100644
--- a/core/src/main/scala/spark/partial/ApproximateActionListener.scala
+++ b/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala
@@ -15,10 +15,11 @@
  * limitations under the License.
  */
 
-package spark.partial
+package org.apache.spark.partial
 
-import spark._
-import spark.scheduler.JobListener
+import org.apache.spark._
+import org.apache.spark.scheduler.JobListener
+import org.apache.spark.rdd.RDD
 
 /**
  * A JobListener for an approximate single-result action, such as count() or non-parallel reduce().
diff --git a/core/src/main/scala/spark/partial/ApproximateEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/ApproximateEvaluator.scala
similarity index 97%
rename from core/src/main/scala/spark/partial/ApproximateEvaluator.scala
rename to core/src/main/scala/org/apache/spark/partial/ApproximateEvaluator.scala
index 5eae144dfbccc575a9f973c4547099057dfda18c..9c2859c8b9850295aae46f32387e9f6b81128369 100644
--- a/core/src/main/scala/spark/partial/ApproximateEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/ApproximateEvaluator.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.partial
+package org.apache.spark.partial
 
 /**
  * An object that computes a function incrementally by merging in results of type U from multiple
diff --git a/core/src/main/scala/spark/partial/BoundedDouble.scala b/core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala
similarity index 96%
rename from core/src/main/scala/spark/partial/BoundedDouble.scala
rename to core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala
index 8bdbe6c0123e712ab1a36796b25ffcb9938c799e..5f4450859cc9b2c6810484bfc995cd7aa32af710 100644
--- a/core/src/main/scala/spark/partial/BoundedDouble.scala
+++ b/core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.partial
+package org.apache.spark.partial
 
 /**
  * A Double with error bars on it.
diff --git a/core/src/main/scala/spark/partial/CountEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/CountEvaluator.scala
similarity index 98%
rename from core/src/main/scala/spark/partial/CountEvaluator.scala
rename to core/src/main/scala/org/apache/spark/partial/CountEvaluator.scala
index 6aa92094eb4a0f71911f2cfab5eea744e25876d6..3155dfe165664ff9e006a058585dfef6d20e1417 100644
--- a/core/src/main/scala/spark/partial/CountEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/CountEvaluator.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.partial
+package org.apache.spark.partial
 
 import cern.jet.stat.Probability
 
diff --git a/core/src/main/scala/spark/partial/GroupedCountEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala
similarity index 98%
rename from core/src/main/scala/spark/partial/GroupedCountEvaluator.scala
rename to core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala
index ebe2e5a1e3aff9d80b29809e5bffe8147c89229a..e519e3a54846e4b609f319cdb9c508db81542551 100644
--- a/core/src/main/scala/spark/partial/GroupedCountEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.partial
+package org.apache.spark.partial
 
 import java.util.{HashMap => JHashMap}
 import java.util.{Map => JMap}
diff --git a/core/src/main/scala/spark/partial/GroupedMeanEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala
similarity index 97%
rename from core/src/main/scala/spark/partial/GroupedMeanEvaluator.scala
rename to core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala
index 2dadbbd5fb49628d16f4614525006cb2cc59eebf..cf8a5680b663d0663f306d3a88eae72e25c5c968 100644
--- a/core/src/main/scala/spark/partial/GroupedMeanEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.partial
+package org.apache.spark.partial
 
 import java.util.{HashMap => JHashMap}
 import java.util.{Map => JMap}
@@ -24,7 +24,7 @@ import scala.collection.mutable.HashMap
 import scala.collection.Map
 import scala.collection.JavaConversions.mapAsScalaMap
 
-import spark.util.StatCounter
+import org.apache.spark.util.StatCounter
 
 /**
  * An ApproximateEvaluator for means by key. Returns a map of key to confidence interval.
diff --git a/core/src/main/scala/spark/partial/GroupedSumEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala
similarity index 97%
rename from core/src/main/scala/spark/partial/GroupedSumEvaluator.scala
rename to core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala
index ae2b63f7cb7dea855481e6454d0e2e729ceb84a2..8225a5d933ce50261db5edb4bb8c372d1915068f 100644
--- a/core/src/main/scala/spark/partial/GroupedSumEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.partial
+package org.apache.spark.partial
 
 import java.util.{HashMap => JHashMap}
 import java.util.{Map => JMap}
@@ -24,7 +24,7 @@ import scala.collection.mutable.HashMap
 import scala.collection.Map
 import scala.collection.JavaConversions.mapAsScalaMap
 
-import spark.util.StatCounter
+import org.apache.spark.util.StatCounter
 
 /**
  * An ApproximateEvaluator for sums by key. Returns a map of key to confidence interval.
diff --git a/core/src/main/scala/spark/partial/MeanEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/MeanEvaluator.scala
similarity index 96%
rename from core/src/main/scala/spark/partial/MeanEvaluator.scala
rename to core/src/main/scala/org/apache/spark/partial/MeanEvaluator.scala
index 5ddcad70759eb048d7157f103da2b81cf639757e..d24959cba8727016c23eb87d7444d44e79f00ce9 100644
--- a/core/src/main/scala/spark/partial/MeanEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/MeanEvaluator.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.partial
+package org.apache.spark.partial
 
 import cern.jet.stat.Probability
 
-import spark.util.StatCounter
+import org.apache.spark.util.StatCounter
 
 /**
  * An ApproximateEvaluator for means.
diff --git a/core/src/main/scala/spark/partial/PartialResult.scala b/core/src/main/scala/org/apache/spark/partial/PartialResult.scala
similarity index 99%
rename from core/src/main/scala/spark/partial/PartialResult.scala
rename to core/src/main/scala/org/apache/spark/partial/PartialResult.scala
index 922a9f9bc65a0c7eaabd273b1e2c54ac9b9c53b3..5ce49b8100ee6ad42afa29b4add097c0bb3f3742 100644
--- a/core/src/main/scala/spark/partial/PartialResult.scala
+++ b/core/src/main/scala/org/apache/spark/partial/PartialResult.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.partial
+package org.apache.spark.partial
 
 class PartialResult[R](initialVal: R, isFinal: Boolean) {
   private var finalValue: Option[R] = if (isFinal) Some(initialVal) else None
diff --git a/core/src/main/scala/spark/partial/StudentTCacher.scala b/core/src/main/scala/org/apache/spark/partial/StudentTCacher.scala
similarity index 98%
rename from core/src/main/scala/spark/partial/StudentTCacher.scala
rename to core/src/main/scala/org/apache/spark/partial/StudentTCacher.scala
index f3bb987d46c3320c261488e0166cfc466d24008f..92915ee66d29f3a2100a6aa3ffb12fa435dffd74 100644
--- a/core/src/main/scala/spark/partial/StudentTCacher.scala
+++ b/core/src/main/scala/org/apache/spark/partial/StudentTCacher.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.partial
+package org.apache.spark.partial
 
 import cern.jet.stat.Probability
 
diff --git a/core/src/main/scala/spark/partial/SumEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/SumEvaluator.scala
similarity index 97%
rename from core/src/main/scala/spark/partial/SumEvaluator.scala
rename to core/src/main/scala/org/apache/spark/partial/SumEvaluator.scala
index 4083abef03d25482ae18fd7e1dcdcfe873855505..a74f80094434c02cbb00a5e25cc7db3b6558e620 100644
--- a/core/src/main/scala/spark/partial/SumEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/SumEvaluator.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.partial
+package org.apache.spark.partial
 
 import cern.jet.stat.Probability
 
-import spark.util.StatCounter
+import org.apache.spark.util.StatCounter
 
 /**
  * An ApproximateEvaluator for sums. It estimates the mean and the cont and multiplies them
diff --git a/core/src/main/scala/spark/rdd/BlockRDD.scala b/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala
similarity index 92%
rename from core/src/main/scala/spark/rdd/BlockRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala
index 03800584ae0a012edea90042904d86380fd6f020..bca6956a182a2c3dbd049e3cc857d6d412e464c4 100644
--- a/core/src/main/scala/spark/rdd/BlockRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark.{RDD, SparkContext, SparkEnv, Partition, TaskContext}
-import spark.storage.BlockManager
+import org.apache.spark.{SparkContext, SparkEnv, Partition, TaskContext}
+import org.apache.spark.storage.BlockManager
 
 private[spark] class BlockRDDPartition(val blockId: String, idx: Int) extends Partition {
   val index = idx
diff --git a/core/src/main/scala/spark/rdd/CartesianRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
similarity index 98%
rename from core/src/main/scala/spark/rdd/CartesianRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
index 91b3e69d6fb3e759916c917be1d4e8b004466b53..9b0c882481cfc8357e3758dc89c7197ae2dd648c 100644
--- a/core/src/main/scala/spark/rdd/CartesianRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
 import java.io.{ObjectOutputStream, IOException}
-import spark._
+import org.apache.spark._
 
 
 private[spark]
diff --git a/core/src/main/scala/spark/rdd/CheckpointRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala
similarity index 98%
rename from core/src/main/scala/spark/rdd/CheckpointRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala
index 1ad5fe6539017fb0e2a9a1dd7b598758da03faf3..3311757189aeebe874c048345ce880eae23bd316 100644
--- a/core/src/main/scala/spark/rdd/CheckpointRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark._
+import org.apache.spark._
 import org.apache.hadoop.mapred.{FileInputFormat, SequenceFileInputFormat, JobConf, Reporter}
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.io.{NullWritable, BytesWritable}
@@ -138,7 +138,7 @@ private[spark] object CheckpointRDD extends Logging {
   // each split file having multiple blocks. This needs to be run on a
   // cluster (mesos or standalone) using HDFS.
   def main(args: Array[String]) {
-    import spark._
+    import org.apache.spark._
 
     val Array(cluster, hdfsPath) = args
     val env = SparkEnv.get
diff --git a/core/src/main/scala/spark/rdd/CoGroupedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
similarity index 96%
rename from core/src/main/scala/spark/rdd/CoGroupedRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
index 01b6c23dcc202a5b701dbbf34d97aa529a0eaeec..0187256a8e3268ebed4b44a783cb939e39801506 100644
--- a/core/src/main/scala/spark/rdd/CoGroupedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
 import java.io.{ObjectOutputStream, IOException}
 import java.util.{HashMap => JHashMap}
@@ -23,8 +23,8 @@ import java.util.{HashMap => JHashMap}
 import scala.collection.JavaConversions
 import scala.collection.mutable.ArrayBuffer
 
-import spark.{Partition, Partitioner, RDD, SparkEnv, TaskContext}
-import spark.{Dependency, OneToOneDependency, ShuffleDependency}
+import org.apache.spark.{Partition, Partitioner, SparkEnv, TaskContext}
+import org.apache.spark.{Dependency, OneToOneDependency, ShuffleDependency}
 
 
 private[spark] sealed trait CoGroupSplitDep extends Serializable
diff --git a/core/src/main/scala/spark/rdd/CoalescedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala
similarity index 99%
rename from core/src/main/scala/spark/rdd/CoalescedRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala
index e612d026b221a1a7ccc8e9013639c93ffbee4a4b..c5de6362a9aa7c75298b3e38029e5ab8a4e43388 100644
--- a/core/src/main/scala/spark/rdd/CoalescedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark._
+import org.apache.spark._
 import java.io.{ObjectOutputStream, IOException}
 import scala.collection.mutable
 import scala.Some
diff --git a/core/src/main/scala/spark/DoubleRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
similarity index 85%
rename from core/src/main/scala/spark/DoubleRDDFunctions.scala
rename to core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
index 104168e61ccdfbf70e5d504b18481decfc2d41b6..a4bec417529fc9022b9da540993bc235cae67b81 100644
--- a/core/src/main/scala/spark/DoubleRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
@@ -15,17 +15,18 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.rdd
 
-import spark.partial.BoundedDouble
-import spark.partial.MeanEvaluator
-import spark.partial.PartialResult
-import spark.partial.SumEvaluator
-import spark.util.StatCounter
+import org.apache.spark.partial.BoundedDouble
+import org.apache.spark.partial.MeanEvaluator
+import org.apache.spark.partial.PartialResult
+import org.apache.spark.partial.SumEvaluator
+import org.apache.spark.util.StatCounter
+import org.apache.spark.{TaskContext, Logging}
 
 /**
  * Extra functions available on RDDs of Doubles through an implicit conversion.
- * Import `spark.SparkContext._` at the top of your program to use these functions.
+ * Import `org.apache.spark.SparkContext._` at the top of your program to use these functions.
  */
 class DoubleRDDFunctions(self: RDD[Double]) extends Logging with Serializable {
   /** Add up the elements in this RDD. */
@@ -34,7 +35,7 @@ class DoubleRDDFunctions(self: RDD[Double]) extends Logging with Serializable {
   }
 
   /**
-   * Return a [[spark.util.StatCounter]] object that captures the mean, variance and count
+   * Return a [[org.apache.spark.util.StatCounter]] object that captures the mean, variance and count
    * of the RDD's elements in one operation.
    */
   def stats(): StatCounter = {
diff --git a/core/src/main/scala/spark/rdd/EmptyRDD.scala b/core/src/main/scala/org/apache/spark/rdd/EmptyRDD.scala
similarity index 91%
rename from core/src/main/scala/spark/rdd/EmptyRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/EmptyRDD.scala
index d7d4db5d3093642fc1258b55621a1f71f032d77e..c8900d1a9346d45eaa51bf5127c6f0f755148dde 100644
--- a/core/src/main/scala/spark/rdd/EmptyRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/EmptyRDD.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark.{RDD, SparkContext, SparkEnv, Partition, TaskContext}
+import org.apache.spark.{SparkContext, SparkEnv, Partition, TaskContext}
 
 
 /**
diff --git a/core/src/main/scala/spark/rdd/FilteredRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala
similarity index 92%
rename from core/src/main/scala/spark/rdd/FilteredRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala
index 783508cfd173ca6441760c6cfc93c7f3d0562817..5312dc0b593882e5868f9633e3c11c7ceb2efd7e 100644
--- a/core/src/main/scala/spark/rdd/FilteredRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark.{OneToOneDependency, RDD, Partition, TaskContext}
+import org.apache.spark.{OneToOneDependency, Partition, TaskContext}
 
 private[spark] class FilteredRDD[T: ClassManifest](
     prev: RDD[T],
diff --git a/core/src/main/scala/spark/rdd/FlatMappedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala
similarity index 93%
rename from core/src/main/scala/spark/rdd/FlatMappedRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala
index ed75eac3ff6edba36181a789167bca2c2484db70..cbdf6d84c07062654f2bc1e42c04616433b00376 100644
--- a/core/src/main/scala/spark/rdd/FlatMappedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark.{RDD, Partition, TaskContext}
+import org.apache.spark.{Partition, TaskContext}
 
 
 private[spark]
diff --git a/core/src/main/scala/spark/rdd/FlatMappedValuesRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala
similarity index 94%
rename from core/src/main/scala/spark/rdd/FlatMappedValuesRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala
index a6bdce89d8a4ce317107e31da8d11ba8b14cfb2d..82000bac092dac7986950e0d73bc6b4da78296ef 100644
--- a/core/src/main/scala/spark/rdd/FlatMappedValuesRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark.{TaskContext, Partition, RDD}
+import org.apache.spark.{TaskContext, Partition}
 
 
 private[spark]
diff --git a/core/src/main/scala/spark/rdd/GlommedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala
similarity index 93%
rename from core/src/main/scala/spark/rdd/GlommedRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala
index 1573f8a289a697499004d8b13d4f0ae3a629a335..829545d7b0aff389f48ffac2e067b53625879ffd 100644
--- a/core/src/main/scala/spark/rdd/GlommedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark.{RDD, Partition, TaskContext}
+import org.apache.spark.{Partition, TaskContext}
 
 private[spark] class GlommedRDD[T: ClassManifest](prev: RDD[T])
   extends RDD[Array[T]](prev) {
diff --git a/core/src/main/scala/spark/rdd/HadoopRDD.scala b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
similarity index 91%
rename from core/src/main/scala/spark/rdd/HadoopRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
index e512423fd63ab464de4ab3711e300d1a5726b3a4..2cb6734e4199c2f50dcef278dbb23691c4cd3621 100644
--- a/core/src/main/scala/spark/rdd/HadoopRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
@@ -15,25 +15,19 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
 import java.io.EOFException
-import java.util.NoSuchElementException
 
-import org.apache.hadoop.io.LongWritable
-import org.apache.hadoop.io.NullWritable
-import org.apache.hadoop.io.Text
-import org.apache.hadoop.mapred.FileInputFormat
 import org.apache.hadoop.mapred.InputFormat
 import org.apache.hadoop.mapred.InputSplit
 import org.apache.hadoop.mapred.JobConf
-import org.apache.hadoop.mapred.TextInputFormat
 import org.apache.hadoop.mapred.RecordReader
 import org.apache.hadoop.mapred.Reporter
 import org.apache.hadoop.util.ReflectionUtils
 
-import spark.{Dependency, Logging, Partition, RDD, SerializableWritable, SparkContext, SparkEnv, TaskContext}
-import spark.util.NextIterator
+import org.apache.spark.{Logging, Partition, SerializableWritable, SparkContext, SparkEnv, TaskContext}
+import org.apache.spark.util.NextIterator
 import org.apache.hadoop.conf.{Configuration, Configurable}
 
 
diff --git a/core/src/main/scala/spark/rdd/JdbcRDD.scala b/core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala
similarity index 96%
rename from core/src/main/scala/spark/rdd/JdbcRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala
index 59132437d20cf817343c326e9a65ab1aeaeba79f..aca01468842c44477614beca2787ddfbfcc41a31 100644
--- a/core/src/main/scala/spark/rdd/JdbcRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
 import java.sql.{Connection, ResultSet}
 
-import spark.{Logging, Partition, RDD, SparkContext, TaskContext}
-import spark.util.NextIterator
+import org.apache.spark.{Logging, Partition, SparkContext, TaskContext}
+import org.apache.spark.util.NextIterator
 
 private[spark] class JdbcPartition(idx: Int, val lower: Long, val upper: Long) extends Partition {
   override def index = idx
diff --git a/core/src/main/scala/spark/rdd/MapPartitionsRDD.scala b/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala
similarity index 94%
rename from core/src/main/scala/spark/rdd/MapPartitionsRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala
index af8f0a112f2acd184af1211faa04119adeab3e1e..203179c4ea823efb7d1b48cbf3e612f289729ae9 100644
--- a/core/src/main/scala/spark/rdd/MapPartitionsRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark.{RDD, Partition, TaskContext}
+import org.apache.spark.{Partition, TaskContext}
 
 
 private[spark]
diff --git a/core/src/main/scala/spark/rdd/MapPartitionsWithIndexRDD.scala b/core/src/main/scala/org/apache/spark/rdd/MapPartitionsWithIndexRDD.scala
similarity index 95%
rename from core/src/main/scala/spark/rdd/MapPartitionsWithIndexRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/MapPartitionsWithIndexRDD.scala
index 3b4e9518fd0acc2706cffa20e1771ea72bc746ba..3ed833901073a442709cac851793c5f8557447cc 100644
--- a/core/src/main/scala/spark/rdd/MapPartitionsWithIndexRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/MapPartitionsWithIndexRDD.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark.{RDD, Partition, TaskContext}
+import org.apache.spark.{Partition, TaskContext}
 
 
 /**
diff --git a/core/src/main/scala/spark/rdd/MappedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/MappedRDD.scala
similarity index 93%
rename from core/src/main/scala/spark/rdd/MappedRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/MappedRDD.scala
index 8b411dd85d9159465893b97067b7919d54f46cdb..e8be1c4816e455f0529cc8c5afcdfbdb117f9815 100644
--- a/core/src/main/scala/spark/rdd/MappedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/MappedRDD.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark.{RDD, Partition, TaskContext}
+import org.apache.spark.{Partition, TaskContext}
 
 private[spark]
 class MappedRDD[U: ClassManifest, T: ClassManifest](prev: RDD[T], f: T => U)
diff --git a/core/src/main/scala/spark/rdd/MappedValuesRDD.scala b/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala
similarity index 94%
rename from core/src/main/scala/spark/rdd/MappedValuesRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala
index 8334e3b557dd22f8deb597b8e11c775a0d4dee63..d33c1af58154c3b85fc7a5fa1a4f2bcd433ebe61 100644
--- a/core/src/main/scala/spark/rdd/MappedValuesRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
 
-import spark.{TaskContext, Partition, RDD}
+import org.apache.spark.{TaskContext, Partition}
 
 private[spark]
 class MappedValuesRDD[K, V, U](prev: RDD[_ <: Product2[K, V]], f: V => U)
diff --git a/core/src/main/scala/spark/rdd/NewHadoopRDD.scala b/core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala
similarity index 96%
rename from core/src/main/scala/spark/rdd/NewHadoopRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala
index b1877dc06e5103ca6b57099a35d546b973a3cd30..7b3a89f7e006a56a8f4e7b3bc7183f5d87379d82 100644
--- a/core/src/main/scala/spark/rdd/NewHadoopRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
 import java.text.SimpleDateFormat
 import java.util.Date
@@ -24,7 +24,7 @@ import org.apache.hadoop.conf.{Configurable, Configuration}
 import org.apache.hadoop.io.Writable
 import org.apache.hadoop.mapreduce._
 
-import spark.{Dependency, Logging, Partition, RDD, SerializableWritable, SparkContext, TaskContext}
+import org.apache.spark.{Dependency, Logging, Partition, SerializableWritable, SparkContext, TaskContext}
 
 
 private[spark]
diff --git a/core/src/main/scala/spark/rdd/OrderedRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/OrderedRDDFunctions.scala
similarity index 87%
rename from core/src/main/scala/spark/rdd/OrderedRDDFunctions.scala
rename to core/src/main/scala/org/apache/spark/rdd/OrderedRDDFunctions.scala
index 9154b760359aa1836d214219f216a136dfc25b09..697be8b997bbdfa2d93efb92477839b382f9b5b8 100644
--- a/core/src/main/scala/spark/rdd/OrderedRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/OrderedRDDFunctions.scala
@@ -15,14 +15,15 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark.{RangePartitioner, Logging, RDD}
+import org.apache.spark.{RangePartitioner, Logging}
 
 /**
  * Extra functions available on RDDs of (key, value) pairs where the key is sortable through
- * an implicit conversion. Import `spark.SparkContext._` at the top of your program to use these
- * functions. They will work with any key type that has a `scala.math.Ordered` implementation.
+ * an implicit conversion. Import `org.apache.spark.SparkContext._` at the top of your program to
+ * use these functions. They will work with any key type that has a `scala.math.Ordered`
+ * implementation.
  */
 class OrderedRDDFunctions[K <% Ordered[K]: ClassManifest,
                           V: ClassManifest,
diff --git a/core/src/main/scala/spark/PairRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
similarity index 97%
rename from core/src/main/scala/spark/PairRDDFunctions.scala
rename to core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
index cc1285dd95f4765d9cbcd994db021cb1830031d7..a47c5122759dbd72bb2cbdcec1ad72da6c6ff8db 100644
--- a/core/src/main/scala/spark/PairRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
@@ -15,40 +15,40 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.rdd
 
 import java.nio.ByteBuffer
-import java.util.{Date, HashMap => JHashMap}
+import java.util.Date
 import java.text.SimpleDateFormat
+import java.util.{HashMap => JHashMap}
 
 import scala.collection.{mutable, Map}
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.JavaConversions._
 
+import org.apache.hadoop.mapred._
+import org.apache.hadoop.io.compress.CompressionCodec
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.Path
-import org.apache.hadoop.io.compress.CompressionCodec
 import org.apache.hadoop.io.SequenceFile.CompressionType
-import org.apache.hadoop.mapred.FileOutputCommitter
 import org.apache.hadoop.mapred.FileOutputFormat
-import org.apache.hadoop.mapred.SparkHadoopWriter
-import org.apache.hadoop.mapred.JobConf
 import org.apache.hadoop.mapred.OutputFormat
-
+import org.apache.hadoop.mapreduce.{OutputFormat => NewOutputFormat}
 import org.apache.hadoop.mapreduce.lib.output.{FileOutputFormat => NewFileOutputFormat}
-import org.apache.hadoop.mapreduce.{OutputFormat => NewOutputFormat,
-    RecordWriter => NewRecordWriter, Job => NewAPIHadoopJob, SparkHadoopMapReduceUtil}
-import org.apache.hadoop.security.UserGroupInformation
+import org.apache.hadoop.mapreduce.SparkHadoopMapReduceUtil
+import org.apache.hadoop.mapreduce.{Job => NewAPIHadoopJob}
+import org.apache.hadoop.mapreduce.{RecordWriter => NewRecordWriter}
 
-import spark.partial.BoundedDouble
-import spark.partial.PartialResult
-import spark.rdd._
-import spark.SparkContext._
-import spark.Partitioner._
+import org.apache.spark._
+import org.apache.spark.SparkContext._
+import org.apache.spark.partial.{BoundedDouble, PartialResult}
+import org.apache.spark.Aggregator
+import org.apache.spark.Partitioner
+import org.apache.spark.Partitioner.defaultPartitioner
 
 /**
  * Extra functions available on RDDs of (key, value) pairs through an implicit conversion.
- * Import `spark.SparkContext._` at the top of your program to use these functions.
+ * Import `org.apache.spark.SparkContext._` at the top of your program to use these functions.
  */
 class PairRDDFunctions[K: ClassManifest, V: ClassManifest](self: RDD[(K, V)])
   extends Logging
@@ -559,7 +559,7 @@ class PairRDDFunctions[K: ClassManifest, V: ClassManifest](self: RDD[(K, V)])
     val formatter = new SimpleDateFormat("yyyyMMddHHmm")
     val jobtrackerID = formatter.format(new Date())
     val stageId = self.id
-    def writeShard(context: spark.TaskContext, iter: Iterator[(K,V)]): Int = {
+    def writeShard(context: TaskContext, iter: Iterator[(K,V)]): Int = {
       // Hadoop wants a 32-bit task attempt ID, so if ours is bigger than Int.MaxValue, roll it
       // around by taking a mod. We expect that no task will be attempted 2 billion times.
       val attemptNumber = (context.attemptId % Int.MaxValue).toInt
@@ -571,7 +571,7 @@ class PairRDDFunctions[K: ClassManifest, V: ClassManifest](self: RDD[(K, V)])
       committer.setupTask(hadoopContext)
       val writer = format.getRecordWriter(hadoopContext).asInstanceOf[NewRecordWriter[K,V]]
       while (iter.hasNext) {
-        val (k, v) = iter.next
+        val (k, v) = iter.next()
         writer.write(k, v)
       }
       writer.close(hadoopContext)
@@ -697,7 +697,6 @@ class PairRDDFunctions[K: ClassManifest, V: ClassManifest](self: RDD[(K, V)])
   private[spark] def getValueClass() = implicitly[ClassManifest[V]].erasure
 }
 
-
 private[spark] object Manifests {
   val seqSeqManifest = classManifest[Seq[Seq[_]]]
 }
diff --git a/core/src/main/scala/spark/rdd/ParallelCollectionRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
similarity index 97%
rename from core/src/main/scala/spark/rdd/ParallelCollectionRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
index 33079cd53937d99062c2fd6ddc0e04a68476a007..6dbd4309aae6af943b78a15bc1209146b5c76699 100644
--- a/core/src/main/scala/spark/rdd/ParallelCollectionRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
@@ -15,14 +15,16 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
 import scala.collection.immutable.NumericRange
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.Map
-import spark._
+import org.apache.spark._
 import java.io._
 import scala.Serializable
+import org.apache.spark.serializer.JavaSerializer
+import org.apache.spark.util.Utils
 
 private[spark] class ParallelCollectionPartition[T: ClassManifest](
     var rddId: Long,
diff --git a/core/src/main/scala/spark/rdd/PartitionPruningRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala
similarity index 96%
rename from core/src/main/scala/spark/rdd/PartitionPruningRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala
index d8700becb0e870ff5242c77219b53c777771c210..165cd412fcfb85f23089719205e53d49c4af12bc 100644
--- a/core/src/main/scala/spark/rdd/PartitionPruningRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark.{NarrowDependency, RDD, SparkEnv, Partition, TaskContext}
+import org.apache.spark.{NarrowDependency, SparkEnv, Partition, TaskContext}
 
 
 class PartitionPruningRDDPartition(idx: Int, val parentSplit: Partition) extends Partition {
diff --git a/core/src/main/scala/spark/rdd/PipedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala
similarity index 96%
rename from core/src/main/scala/spark/rdd/PipedRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala
index 2cefdc78b01b27da5f051c6d75ae59bc685dfac7..d5304ab0aed67088406fedf50da1faf549699004 100644
--- a/core/src/main/scala/spark/rdd/PipedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
 import java.io.PrintWriter
 import java.util.StringTokenizer
@@ -25,8 +25,8 @@ import scala.collection.JavaConversions._
 import scala.collection.mutable.ArrayBuffer
 import scala.io.Source
 
-import spark.{RDD, SparkEnv, Partition, TaskContext}
-import spark.broadcast.Broadcast
+import org.apache.spark.{SparkEnv, Partition, TaskContext}
+import org.apache.spark.broadcast.Broadcast
 
 
 /**
diff --git a/core/src/main/scala/spark/RDD.scala b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
similarity index 95%
rename from core/src/main/scala/spark/RDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/RDD.scala
index 25a6951732b055d87123be8b6044ca0eb548bd98..e143ecd0961cd42b5d0f464b70ac737f73a6bc88 100644
--- a/core/src/main/scala/spark/RDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.rdd
 
 import java.util.Random
 
@@ -31,43 +31,28 @@ import org.apache.hadoop.mapred.TextOutputFormat
 
 import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap}
 
-import spark.Partitioner._
-import spark.api.java.JavaRDD
-import spark.partial.BoundedDouble
-import spark.partial.CountEvaluator
-import spark.partial.GroupedCountEvaluator
-import spark.partial.PartialResult
-import spark.rdd.CoalescedRDD
-import spark.rdd.CartesianRDD
-import spark.rdd.FilteredRDD
-import spark.rdd.FlatMappedRDD
-import spark.rdd.GlommedRDD
-import spark.rdd.MappedRDD
-import spark.rdd.MapPartitionsRDD
-import spark.rdd.MapPartitionsWithIndexRDD
-import spark.rdd.PipedRDD
-import spark.rdd.SampledRDD
-import spark.rdd.ShuffledRDD
-import spark.rdd.UnionRDD
-import spark.rdd.ZippedRDD
-import spark.rdd.ZippedPartitionsRDD2
-import spark.rdd.ZippedPartitionsRDD3
-import spark.rdd.ZippedPartitionsRDD4
-import spark.storage.StorageLevel
-import spark.util.BoundedPriorityQueue
-
-import SparkContext._
+import org.apache.spark.Partitioner._
+import org.apache.spark.api.java.JavaRDD
+import org.apache.spark.partial.BoundedDouble
+import org.apache.spark.partial.CountEvaluator
+import org.apache.spark.partial.GroupedCountEvaluator
+import org.apache.spark.partial.PartialResult
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.util.{Utils, BoundedPriorityQueue}
+
+import org.apache.spark.SparkContext._
+import org.apache.spark._
 
 /**
  * A Resilient Distributed Dataset (RDD), the basic abstraction in Spark. Represents an immutable,
  * partitioned collection of elements that can be operated on in parallel. This class contains the
  * basic operations available on all RDDs, such as `map`, `filter`, and `persist`. In addition,
- * [[spark.PairRDDFunctions]] contains operations available only on RDDs of key-value pairs, such
- * as `groupByKey` and `join`; [[spark.DoubleRDDFunctions]] contains operations available only on
- * RDDs of Doubles; and [[spark.SequenceFileRDDFunctions]] contains operations available on RDDs
- * that can be saved as SequenceFiles. These operations are automatically available on any RDD of
- * the right type (e.g. RDD[(Int, Int)] through implicit conversions when you
- * `import spark.SparkContext._`.
+ * [[org.apache.spark.rdd.PairRDDFunctions]] contains operations available only on RDDs of key-value
+ * pairs, such as `groupByKey` and `join`; [[org.apache.spark.rdd.DoubleRDDFunctions]] contains
+ * operations available only on RDDs of Doubles; and [[org.apache.spark.rdd.SequenceFileRDDFunctions]]
+ * contains operations available on RDDs that can be saved as SequenceFiles. These operations are
+ * automatically available on any RDD of the right type (e.g. RDD[(Int, Int)] through implicit
+ * conversions when you `import org.apache.spark.SparkContext._`.
  *
  * Internally, each RDD is characterized by five main properties:
  *
@@ -893,7 +878,7 @@ abstract class RDD[T: ClassManifest](
     dependencies.head.rdd.asInstanceOf[RDD[U]]
   }
 
-  /** The [[spark.SparkContext]] that this RDD was created on. */
+  /** The [[org.apache.spark.SparkContext]] that this RDD was created on. */
   def context = sc
 
   // Avoid handling doCheckpoint multiple times to prevent excessive recursion
@@ -929,7 +914,7 @@ abstract class RDD[T: ClassManifest](
    * Clears the dependencies of this RDD. This method must ensure that all references
    * to the original parent RDDs is removed to enable the parent RDDs to be garbage
    * collected. Subclasses of RDD may override this method for implementing their own cleaning
-   * logic. See [[spark.rdd.UnionRDD]] for an example.
+   * logic. See [[org.apache.spark.rdd.UnionRDD]] for an example.
    */
   protected def clearDependencies() {
     dependencies_ = null
diff --git a/core/src/main/scala/spark/RDDCheckpointData.scala b/core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala
similarity index 96%
rename from core/src/main/scala/spark/RDDCheckpointData.scala
rename to core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala
index b615f820eb47ac7dd3a8e7d45a5c112c71cbd389..6009a41570ebd32c52613aa065dd8ef0d0434f51 100644
--- a/core/src/main/scala/spark/RDDCheckpointData.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala
@@ -15,12 +15,13 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.rdd
 
 import org.apache.hadoop.fs.Path
 import org.apache.hadoop.conf.Configuration
-import rdd.{CheckpointRDD, CoalescedRDD}
-import scheduler.{ResultTask, ShuffleMapTask}
+
+import org.apache.spark.{Partition, SparkException, Logging}
+import org.apache.spark.scheduler.{ResultTask, ShuffleMapTask}
 
 /**
  * Enumeration to manage state transitions of an RDD through checkpointing
diff --git a/core/src/main/scala/spark/rdd/SampledRDD.scala b/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala
similarity index 96%
rename from core/src/main/scala/spark/rdd/SampledRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala
index 574c9b141de3f869b1817799679a87f5e279a08a..2c5253ae301c39d127733b7f7eb6a1641a49d8f3 100644
--- a/core/src/main/scala/spark/rdd/SampledRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
 import java.util.Random
 
 import cern.jet.random.Poisson
 import cern.jet.random.engine.DRand
 
-import spark.{RDD, Partition, TaskContext}
+import org.apache.spark.{Partition, TaskContext}
 
 private[spark]
 class SampledRDDPartition(val prev: Partition, val seed: Int) extends Partition with Serializable {
diff --git a/core/src/main/scala/spark/SequenceFileRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala
similarity index 83%
rename from core/src/main/scala/spark/SequenceFileRDDFunctions.scala
rename to core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala
index 9f30b7f22f2b8e72110bdbded9b92df17240f79d..5fe4676029d971f6d58cde827b135e6247c17073 100644
--- a/core/src/main/scala/spark/SequenceFileRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala
@@ -15,40 +15,22 @@
  * limitations under the License.
  */
 
-package spark
-
-import java.io.EOFException
-import java.net.URL
-import java.io.ObjectInputStream
-import java.util.concurrent.atomic.AtomicLong
-import java.util.HashSet
-import java.util.Random
-import java.util.Date
-
-import scala.collection.mutable.ArrayBuffer
-import scala.collection.mutable.Map
-import scala.collection.mutable.HashMap
+package org.apache.spark.rdd
 
 import org.apache.hadoop.mapred.JobConf
-import org.apache.hadoop.mapred.OutputFormat
-import org.apache.hadoop.mapred.TextOutputFormat
 import org.apache.hadoop.mapred.SequenceFileOutputFormat
-import org.apache.hadoop.mapred.OutputCommitter
-import org.apache.hadoop.mapred.FileOutputCommitter
 import org.apache.hadoop.io.compress.CompressionCodec
 import org.apache.hadoop.io.Writable
-import org.apache.hadoop.io.NullWritable
-import org.apache.hadoop.io.BytesWritable
-import org.apache.hadoop.io.Text
 
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
+import org.apache.spark.Logging
 
 /**
  * Extra functions available on RDDs of (key, value) pairs to create a Hadoop SequenceFile,
  * through an implicit conversion. Note that this can't be part of PairRDDFunctions because
  * we need more implicit parameters to convert our keys and values to Writable.
  *
- * Users should import `spark.SparkContext._` at the top of their program to use these functions.
+ * Import `org.apache.spark.SparkContext._` at the top of their program to use these functions.
  */
 class SequenceFileRDDFunctions[K <% Writable: ClassManifest, V <% Writable : ClassManifest](
     self: RDD[(K, V)])
diff --git a/core/src/main/scala/spark/rdd/ShuffledRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala
similarity index 94%
rename from core/src/main/scala/spark/rdd/ShuffledRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala
index 51c05af064ef7456c0020d0f78a68ebd31a9b069..9537152335052165fe63762549992d5c38a14e59 100644
--- a/core/src/main/scala/spark/rdd/ShuffledRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark.{Dependency, Partitioner, RDD, SparkEnv, ShuffleDependency, Partition, TaskContext}
+import org.apache.spark.{Dependency, Partitioner, SparkEnv, ShuffleDependency, Partition, TaskContext}
 
 
 private[spark] class ShuffledRDDPartition(val idx: Int) extends Partition {
diff --git a/core/src/main/scala/spark/rdd/SubtractedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala
similarity index 93%
rename from core/src/main/scala/spark/rdd/SubtractedRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala
index dadef5e17de594431ca70002438b025448248cfc..8c1a29dfff0f9405d4d82f02aac21db64ef8a666 100644
--- a/core/src/main/scala/spark/rdd/SubtractedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala
@@ -15,19 +15,18 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
 import java.util.{HashMap => JHashMap}
 import scala.collection.JavaConversions._
 import scala.collection.mutable.ArrayBuffer
-import spark.RDD
-import spark.Partitioner
-import spark.Dependency
-import spark.TaskContext
-import spark.Partition
-import spark.SparkEnv
-import spark.ShuffleDependency
-import spark.OneToOneDependency
+import org.apache.spark.Partitioner
+import org.apache.spark.Dependency
+import org.apache.spark.TaskContext
+import org.apache.spark.Partition
+import org.apache.spark.SparkEnv
+import org.apache.spark.ShuffleDependency
+import org.apache.spark.OneToOneDependency
 
 
 /**
diff --git a/core/src/main/scala/spark/rdd/UnionRDD.scala b/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala
similarity index 95%
rename from core/src/main/scala/spark/rdd/UnionRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala
index 2776826f187aab0e2d5f6cdb4c6adaef9231215e..ae8a9f36a60145f2741aa8579a40eedc2dbc04dc 100644
--- a/core/src/main/scala/spark/rdd/UnionRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
 import scala.collection.mutable.ArrayBuffer
-import spark.{Dependency, RangeDependency, RDD, SparkContext, Partition, TaskContext}
+import org.apache.spark.{Dependency, RangeDependency, SparkContext, Partition, TaskContext}
 import java.io.{ObjectOutputStream, IOException}
 
 private[spark] class UnionPartition[T: ClassManifest](idx: Int, rdd: RDD[T], splitIndex: Int)
diff --git a/core/src/main/scala/spark/rdd/ZippedPartitionsRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
similarity index 97%
rename from core/src/main/scala/spark/rdd/ZippedPartitionsRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
index 9a0831bd899e2ff48e7db6601e8fdc72ac279208..31e6fd519d0ddad814f8da707bf7f1f23974df73 100644
--- a/core/src/main/scala/spark/rdd/ZippedPartitionsRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark.{Utils, OneToOneDependency, RDD, SparkContext, Partition, TaskContext}
+import org.apache.spark.{OneToOneDependency, SparkContext, Partition, TaskContext}
 import java.io.{ObjectOutputStream, IOException}
 
 private[spark] class ZippedPartitionsPartition(
diff --git a/core/src/main/scala/spark/rdd/ZippedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala
similarity index 96%
rename from core/src/main/scala/spark/rdd/ZippedRDD.scala
rename to core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala
index 4074e50e44040b96b756efeb69c238b7165ff142..567b67dfee0bf3083a8ee49539f81c10f59432d9 100644
--- a/core/src/main/scala/spark/rdd/ZippedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
-import spark.{Utils, OneToOneDependency, RDD, SparkContext, Partition, TaskContext}
+import org.apache.spark.{OneToOneDependency, SparkContext, Partition, TaskContext}
 import java.io.{ObjectOutputStream, IOException}
 
 
diff --git a/core/src/main/scala/spark/scheduler/ActiveJob.scala b/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
similarity index 94%
rename from core/src/main/scala/spark/scheduler/ActiveJob.scala
rename to core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
index fecc3e9648a4ab749857c9dd371df4d3e98a8b64..0b04607d019a8431b1ab1aa07f3bda77f3421151 100644
--- a/core/src/main/scala/spark/scheduler/ActiveJob.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
-import spark.TaskContext
+import org.apache.spark.TaskContext
 
 import java.util.Properties
 
diff --git a/core/src/main/scala/spark/scheduler/DAGScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
similarity index 98%
rename from core/src/main/scala/spark/scheduler/DAGScheduler.scala
rename to core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
index 7275bd346ad33a3a8e5a915b2f2eeadc416996a2..92add5b073ab0ccf99a8928d3774705d41307ed6 100644
--- a/core/src/main/scala/spark/scheduler/DAGScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import java.io.NotSerializableException
 import java.util.Properties
@@ -24,12 +24,13 @@ import java.util.concurrent.atomic.AtomicInteger
 
 import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet, Map}
 
-import spark._
-import spark.executor.TaskMetrics
-import spark.partial.{ApproximateActionListener, ApproximateEvaluator, PartialResult}
-import spark.scheduler.cluster.TaskInfo
-import spark.storage.{BlockManager, BlockManagerMaster}
-import spark.util.{MetadataCleaner, TimeStampedHashMap}
+import org.apache.spark._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.partial.{ApproximateActionListener, ApproximateEvaluator, PartialResult}
+import org.apache.spark.scheduler.cluster.TaskInfo
+import org.apache.spark.storage.{BlockManager, BlockManagerMaster}
+import org.apache.spark.util.{MetadataCleaner, TimeStampedHashMap}
 
 /**
  * The high-level scheduling layer that implements stage-oriented scheduling. It computes a DAG of
diff --git a/core/src/main/scala/spark/scheduler/DAGSchedulerEvent.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
similarity index 92%
rename from core/src/main/scala/spark/scheduler/DAGSchedulerEvent.scala
rename to core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
index b8ba0e92394c257aa5432253f1e9f6030ba141f1..0d996706489f0c3c50f04a1108d6a2a541c5f2ed 100644
--- a/core/src/main/scala/spark/scheduler/DAGSchedulerEvent.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
@@ -15,15 +15,16 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import java.util.Properties
 
-import spark.scheduler.cluster.TaskInfo
+import org.apache.spark.scheduler.cluster.TaskInfo
 import scala.collection.mutable.Map
 
-import spark._
-import spark.executor.TaskMetrics
+import org.apache.spark._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.executor.TaskMetrics
 
 /**
  * Types of events that can be handled by the DAGScheduler. The DAGScheduler uses an event queue
diff --git a/core/src/main/scala/spark/scheduler/DAGSchedulerSource.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala
similarity index 92%
rename from core/src/main/scala/spark/scheduler/DAGSchedulerSource.scala
rename to core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala
index 98c4fb7e59a03755cebefb21fe6a99f0a2a89f82..ce0dc9093d8d36e0b8538731db102af7e13879eb 100644
--- a/core/src/main/scala/spark/scheduler/DAGSchedulerSource.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala
@@ -1,8 +1,8 @@
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import com.codahale.metrics.{Gauge,MetricRegistry}
 
-import spark.metrics.source.Source
+import org.apache.spark.metrics.source.Source
 
 private[spark] class DAGSchedulerSource(val dagScheduler: DAGScheduler) extends Source {
   val metricRegistry = new MetricRegistry()
diff --git a/core/src/main/scala/spark/scheduler/InputFormatInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala
similarity index 98%
rename from core/src/main/scala/spark/scheduler/InputFormatInfo.scala
rename to core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala
index 8f1b9b29b54c3f8c309b9f09fbe827fb9eb20b51..370ccd183c37849ce269798d9aa4a0a891368343 100644
--- a/core/src/main/scala/spark/scheduler/InputFormatInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
-import spark.{Logging, SparkEnv}
+import org.apache.spark.{Logging, SparkEnv}
 import scala.collection.immutable.Set
 import org.apache.hadoop.mapred.{FileInputFormat, JobConf}
 import org.apache.hadoop.security.UserGroupInformation
diff --git a/core/src/main/scala/spark/scheduler/JobListener.scala b/core/src/main/scala/org/apache/spark/scheduler/JobListener.scala
similarity index 97%
rename from core/src/main/scala/spark/scheduler/JobListener.scala
rename to core/src/main/scala/org/apache/spark/scheduler/JobListener.scala
index af108b8fec35674f5535fa4a37e999403059555d..50c2b9acd609c9b781b1f95a0a0547da50ea035b 100644
--- a/core/src/main/scala/spark/scheduler/JobListener.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/JobListener.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 /**
  * Interface used to listen for job completion or failure events after submitting a job to the
diff --git a/core/src/main/scala/spark/scheduler/JobLogger.scala b/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
similarity index 95%
rename from core/src/main/scala/spark/scheduler/JobLogger.scala
rename to core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
index 1bc9fabdffdcb83eda48fa341eb87a9dd90f9f0b..c8b78bf00a83c0ffeded1835c080ecaa14b3a97e 100644
--- a/core/src/main/scala/spark/scheduler/JobLogger.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import java.io.PrintWriter
 import java.io.File
@@ -27,9 +27,10 @@ import java.util.concurrent.LinkedBlockingQueue
 import scala.collection.mutable.{Map, HashMap, ListBuffer}
 import scala.io.Source
 
-import spark._
-import spark.executor.TaskMetrics
-import spark.scheduler.cluster.TaskInfo
+import org.apache.spark._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.scheduler.cluster.TaskInfo
 
 // Used to record runtime information for each job, including RDD graph 
 // tasks' start/stop shuffle information and information from outside
diff --git a/core/src/main/scala/spark/scheduler/JobResult.scala b/core/src/main/scala/org/apache/spark/scheduler/JobResult.scala
similarity index 96%
rename from core/src/main/scala/spark/scheduler/JobResult.scala
rename to core/src/main/scala/org/apache/spark/scheduler/JobResult.scala
index a61b335152ebb700d2dfd1abe4e6482f9e59be02..c381348a8d424af31171f1d8d07ab7fb931afc8d 100644
--- a/core/src/main/scala/spark/scheduler/JobResult.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/JobResult.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 /**
  * A result of a job in the DAGScheduler.
diff --git a/core/src/main/scala/spark/scheduler/JobWaiter.scala b/core/src/main/scala/org/apache/spark/scheduler/JobWaiter.scala
similarity index 98%
rename from core/src/main/scala/spark/scheduler/JobWaiter.scala
rename to core/src/main/scala/org/apache/spark/scheduler/JobWaiter.scala
index 69cd161c1f0eba0dcd68b23c629464e3de4ac00a..200d8817990a42a7de74245a4d66a7b148778941 100644
--- a/core/src/main/scala/spark/scheduler/JobWaiter.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/JobWaiter.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import scala.collection.mutable.ArrayBuffer
 
diff --git a/core/src/main/scala/spark/scheduler/MapStatus.scala b/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala
similarity index 95%
rename from core/src/main/scala/spark/scheduler/MapStatus.scala
rename to core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala
index 2f6a68ee85b248a378e74c0e69fe3bd8420b6d28..1c61687f280064998b704c4d96422af4e5cd2057 100644
--- a/core/src/main/scala/spark/scheduler/MapStatus.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
-import spark.storage.BlockManagerId
+import org.apache.spark.storage.BlockManagerId
 import java.io.{ObjectOutput, ObjectInput, Externalizable}
 
 /**
diff --git a/core/src/main/scala/spark/scheduler/ResultTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
similarity index 95%
rename from core/src/main/scala/spark/scheduler/ResultTask.scala
rename to core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
index d066df5dc1dddfc288efc1deed9610bda6d30d9b..2b007cbe824bc1eca40c3cdd3c9e054567d5f1ff 100644
--- a/core/src/main/scala/spark/scheduler/ResultTask.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
@@ -15,13 +15,16 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
-import spark._
 import java.io._
-import util.{MetadataCleaner, TimeStampedHashMap}
 import java.util.zip.{GZIPInputStream, GZIPOutputStream}
 
+import org.apache.spark._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.rdd.RDDCheckpointData
+import org.apache.spark.util.{MetadataCleaner, TimeStampedHashMap}
+
 private[spark] object ResultTask {
 
   // A simple map between the stage id to the serialized byte array of a task.
diff --git a/core/src/main/scala/spark/scheduler/ShuffleMapTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala
similarity index 95%
rename from core/src/main/scala/spark/scheduler/ShuffleMapTask.scala
rename to core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala
index f2a038576b722cce8c8d2737d7856eeef8091c32..764775fedea2ecadd5995a45d0604e68d289c0fd 100644
--- a/core/src/main/scala/spark/scheduler/ShuffleMapTask.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala
@@ -15,17 +15,19 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import java.io._
 import java.util.zip.{GZIPInputStream, GZIPOutputStream}
 
 import scala.collection.mutable.HashMap
 
-import spark._
-import spark.executor.ShuffleWriteMetrics
-import spark.storage._
-import spark.util.{TimeStampedHashMap, MetadataCleaner}
+import org.apache.spark._
+import org.apache.spark.executor.ShuffleWriteMetrics
+import org.apache.spark.storage._
+import org.apache.spark.util.{TimeStampedHashMap, MetadataCleaner}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.rdd.RDDCheckpointData
 
 
 private[spark] object ShuffleMapTask {
diff --git a/core/src/main/scala/spark/scheduler/SparkListener.scala b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
similarity index 95%
rename from core/src/main/scala/spark/scheduler/SparkListener.scala
rename to core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
index e5531011c2b87f71762cc8a4276cade0deb2c797..c3cf4b8907ebba878fcfb65d894ab97da0a06af8 100644
--- a/core/src/main/scala/spark/scheduler/SparkListener.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import java.util.Properties
-import spark.scheduler.cluster.TaskInfo
-import spark.util.Distribution
-import spark.{Logging, SparkContext, TaskEndReason, Utils}
-import spark.executor.TaskMetrics
+import org.apache.spark.scheduler.cluster.TaskInfo
+import org.apache.spark.util.{Utils, Distribution}
+import org.apache.spark.{Logging, SparkContext, TaskEndReason}
+import org.apache.spark.executor.TaskMetrics
 
 sealed trait SparkListenerEvents
 
@@ -79,7 +79,7 @@ trait SparkListener {
  */
 class StatsReportListener extends SparkListener with Logging {
   override def onStageCompleted(stageCompleted: StageCompleted) {
-    import spark.scheduler.StatsReportListener._
+    import org.apache.spark.scheduler.StatsReportListener._
     implicit val sc = stageCompleted
     this.logInfo("Finished stage: " + stageCompleted.stageInfo)
     showMillisDistribution("task runtime:", (info, _) => Some(info.duration))
diff --git a/core/src/main/scala/spark/scheduler/SparkListenerBus.scala b/core/src/main/scala/org/apache/spark/scheduler/SparkListenerBus.scala
similarity index 95%
rename from core/src/main/scala/spark/scheduler/SparkListenerBus.scala
rename to core/src/main/scala/org/apache/spark/scheduler/SparkListenerBus.scala
index f55ed455ed3f700cc5e6cc7664c1a7d39d40699b..a65e1ecd6d4bdc89903779fa3d12ce82782cdba4 100644
--- a/core/src/main/scala/spark/scheduler/SparkListenerBus.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SparkListenerBus.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import java.util.concurrent.LinkedBlockingQueue
 
 import scala.collection.mutable.{ArrayBuffer, SynchronizedBuffer}
 
-import spark.Logging
+import org.apache.spark.Logging
 
 /** Asynchronously passes SparkListenerEvents to registered SparkListeners. */
 private[spark] class SparkListenerBus() extends Logging {
diff --git a/core/src/main/scala/spark/scheduler/SplitInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/SplitInfo.scala
similarity index 98%
rename from core/src/main/scala/spark/scheduler/SplitInfo.scala
rename to core/src/main/scala/org/apache/spark/scheduler/SplitInfo.scala
index 4e3661ec5da9b71f58ca1a0797885472481b9325..5b40a3eb29b30d57f10c380f8d6ca90dafc8642e 100644
--- a/core/src/main/scala/spark/scheduler/SplitInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SplitInfo.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import collection.mutable.ArrayBuffer
 
diff --git a/core/src/main/scala/spark/scheduler/Stage.scala b/core/src/main/scala/org/apache/spark/scheduler/Stage.scala
similarity index 96%
rename from core/src/main/scala/spark/scheduler/Stage.scala
rename to core/src/main/scala/org/apache/spark/scheduler/Stage.scala
index c599c00ac4406cb7e91357f4a3c0eb6dbc24eda1..aa293dc6b31b5c03784cd7745ce5ccd134f1fde4 100644
--- a/core/src/main/scala/spark/scheduler/Stage.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/Stage.scala
@@ -15,12 +15,11 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
-import java.net.URI
-
-import spark._
-import spark.storage.BlockManagerId
+import org.apache.spark._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.storage.BlockManagerId
 
 /**
  * A stage is a set of independent tasks all computing the same function that need to run as part
diff --git a/core/src/main/scala/spark/scheduler/StageInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/StageInfo.scala
similarity index 88%
rename from core/src/main/scala/spark/scheduler/StageInfo.scala
rename to core/src/main/scala/org/apache/spark/scheduler/StageInfo.scala
index c4026f995a49fd3849796bc389f407b4f88760ef..72cb1c9ce8a2fac9e184db8a30d8cb69d976fc76 100644
--- a/core/src/main/scala/spark/scheduler/StageInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/StageInfo.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
-import spark.scheduler.cluster.TaskInfo
+import org.apache.spark.scheduler.cluster.TaskInfo
 import scala.collection._
-import spark.executor.TaskMetrics
+import org.apache.spark.executor.TaskMetrics
 
 case class StageInfo(
     val stage: Stage,
diff --git a/core/src/main/scala/spark/scheduler/Task.scala b/core/src/main/scala/org/apache/spark/scheduler/Task.scala
similarity index 95%
rename from core/src/main/scala/spark/scheduler/Task.scala
rename to core/src/main/scala/org/apache/spark/scheduler/Task.scala
index 0ab2ae6cfe9f655ea74420a693f1938c8352beea..598d91752a31e81b715cb324c0e348e8aedd518f 100644
--- a/core/src/main/scala/spark/scheduler/Task.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/Task.scala
@@ -15,15 +15,15 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
-import spark.serializer.SerializerInstance
+import org.apache.spark.serializer.SerializerInstance
 import java.io.{DataInputStream, DataOutputStream}
 import java.nio.ByteBuffer
 import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream
-import spark.util.ByteBufferInputStream
+import org.apache.spark.util.ByteBufferInputStream
 import scala.collection.mutable.HashMap
-import spark.executor.TaskMetrics
+import org.apache.spark.executor.TaskMetrics
 
 /**
  * A task to execute on a worker node.
diff --git a/core/src/main/scala/spark/scheduler/TaskLocation.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskLocation.scala
similarity index 97%
rename from core/src/main/scala/spark/scheduler/TaskLocation.scala
rename to core/src/main/scala/org/apache/spark/scheduler/TaskLocation.scala
index fea117e956d454e7eaa1bea3aa2c7506a6423f08..67c9a6760b1b3766de7730e90b3c371529a646ce 100644
--- a/core/src/main/scala/spark/scheduler/TaskLocation.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskLocation.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 /**
  * A location where a task should run. This can either be a host or a (host, executorID) pair.
diff --git a/core/src/main/scala/spark/scheduler/TaskResult.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala
similarity index 93%
rename from core/src/main/scala/spark/scheduler/TaskResult.scala
rename to core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala
index fc4856756b127d4cee32e34231c4b55365b15f54..5c7e5bb9775e9ae1a9889ce4825599b3e3fb6d5e 100644
--- a/core/src/main/scala/spark/scheduler/TaskResult.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala
@@ -15,14 +15,15 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import java.io._
 
 import scala.collection.mutable.Map
-import spark.executor.TaskMetrics
-import spark.{Utils, SparkEnv}
+import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.{SparkEnv}
 import java.nio.ByteBuffer
+import org.apache.spark.util.Utils
 
 // Task result. Also contains updates to accumulator variables.
 // TODO: Use of distributed cache to return result is a hack to get around
diff --git a/core/src/main/scala/spark/scheduler/TaskScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
similarity index 92%
rename from core/src/main/scala/spark/scheduler/TaskScheduler.scala
rename to core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
index 4943d58e254034f43c2f4ce6b305455b5cf40e1a..63be8ba3f58ee584d7626c7113d3162ff61b251b 100644
--- a/core/src/main/scala/spark/scheduler/TaskScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
-import spark.scheduler.cluster.Pool
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark.scheduler.cluster.Pool
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
 /**
  * Low-level task scheduler interface, implemented by both ClusterScheduler and LocalScheduler.
  * These schedulers get sets of tasks submitted to them from the DAGScheduler for each stage,
diff --git a/core/src/main/scala/spark/scheduler/TaskSchedulerListener.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerListener.scala
similarity index 90%
rename from core/src/main/scala/spark/scheduler/TaskSchedulerListener.scala
rename to core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerListener.scala
index 64be50b2d02b8e13b2795024c933379f5312830c..83be051c1a8092029f6bf13b2408ae77fb9fa75f 100644
--- a/core/src/main/scala/spark/scheduler/TaskSchedulerListener.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerListener.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
-import spark.scheduler.cluster.TaskInfo
+import org.apache.spark.scheduler.cluster.TaskInfo
 import scala.collection.mutable.Map
 
-import spark.TaskEndReason
-import spark.executor.TaskMetrics
+import org.apache.spark.TaskEndReason
+import org.apache.spark.executor.TaskMetrics
 
 /**
  * Interface for getting events back from the TaskScheduler.
diff --git a/core/src/main/scala/spark/scheduler/TaskSet.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSet.scala
similarity index 97%
rename from core/src/main/scala/spark/scheduler/TaskSet.scala
rename to core/src/main/scala/org/apache/spark/scheduler/TaskSet.scala
index dc3550dd0b1f705a701757355bdb44cf712db370..c3ad325156f53786224a58fd48e50697c9c94344 100644
--- a/core/src/main/scala/spark/scheduler/TaskSet.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSet.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import java.util.Properties
 
diff --git a/core/src/main/scala/spark/scheduler/cluster/ClusterScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/ClusterScheduler.scala
similarity index 98%
rename from core/src/main/scala/spark/scheduler/cluster/ClusterScheduler.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/ClusterScheduler.scala
index 679d899b472a280805f29abcb901548ddef253c8..3196ab5022aea1fe0ecf2190e4b71a22e0489aee 100644
--- a/core/src/main/scala/spark/scheduler/cluster/ClusterScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/ClusterScheduler.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
 import java.lang.{Boolean => JBoolean}
 
@@ -23,10 +23,10 @@ import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.HashMap
 import scala.collection.mutable.HashSet
 
-import spark._
-import spark.TaskState.TaskState
-import spark.scheduler._
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark._
+import org.apache.spark.TaskState.TaskState
+import org.apache.spark.scheduler._
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
 import java.nio.ByteBuffer
 import java.util.concurrent.atomic.AtomicLong
 import java.util.{TimerTask, Timer}
diff --git a/core/src/main/scala/spark/scheduler/cluster/ClusterTaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManager.scala
similarity index 98%
rename from core/src/main/scala/spark/scheduler/cluster/ClusterTaskSetManager.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManager.scala
index a4d6880abb55d7b6d2c35c98fb6574a31c45439c..1b31c8c57ec9a5bc94ec9e6c65eef5dd7929c7d8 100644
--- a/core/src/main/scala/spark/scheduler/cluster/ClusterTaskSetManager.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManager.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
 import java.nio.ByteBuffer
 import java.util.{Arrays, NoSuchElementException}
@@ -26,15 +26,15 @@ import scala.collection.mutable.HashSet
 import scala.math.max
 import scala.math.min
 
-import spark.{FetchFailed, Logging, Resubmitted, SparkEnv, Success, TaskEndReason, TaskState, Utils}
-import spark.{ExceptionFailure, SparkException, TaskResultTooBigFailure}
-import spark.TaskState.TaskState
-import spark.scheduler._
+import org.apache.spark.{FetchFailed, Logging, Resubmitted, SparkEnv, Success, TaskEndReason, TaskState}
+import org.apache.spark.{ExceptionFailure, SparkException, TaskResultTooBigFailure}
+import org.apache.spark.TaskState.TaskState
+import org.apache.spark.scheduler._
 import scala.Some
-import spark.FetchFailed
-import spark.ExceptionFailure
-import spark.TaskResultTooBigFailure
-import spark.util.{SystemClock, Clock}
+import org.apache.spark.FetchFailed
+import org.apache.spark.ExceptionFailure
+import org.apache.spark.TaskResultTooBigFailure
+import org.apache.spark.util.{SystemClock, Clock}
 
 
 /**
diff --git a/core/src/main/scala/spark/scheduler/cluster/ExecutorLossReason.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/ExecutorLossReason.scala
similarity index 93%
rename from core/src/main/scala/spark/scheduler/cluster/ExecutorLossReason.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/ExecutorLossReason.scala
index 8825f2dd2424887dbf91d64a5a447a0853f11715..5077b2b48b5749aaf0b6264ffc93767f5560a9db 100644
--- a/core/src/main/scala/spark/scheduler/cluster/ExecutorLossReason.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/ExecutorLossReason.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
-import spark.executor.ExecutorExitCode
+import org.apache.spark.executor.ExecutorExitCode
 
 /**
  * Represents an explanation for a executor or whole slave failing or exiting.
diff --git a/core/src/main/scala/spark/scheduler/cluster/Pool.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/Pool.scala
similarity index 96%
rename from core/src/main/scala/spark/scheduler/cluster/Pool.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/Pool.scala
index 83708f07e1a4fb7d77a4dc08380b75803faa57ec..35b32600da6e038fd0aa81bbe3d329527f2fd807 100644
--- a/core/src/main/scala/spark/scheduler/cluster/Pool.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/Pool.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.HashMap
 
-import spark.Logging
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark.Logging
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
 
 /**
  * An Schedulable entity that represent collection of Pools or TaskSetManagers
diff --git a/core/src/main/scala/spark/scheduler/cluster/Schedulable.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/Schedulable.scala
similarity index 93%
rename from core/src/main/scala/spark/scheduler/cluster/Schedulable.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/Schedulable.scala
index e77e8e4162e1a435d33a6a84117920b71d60ba74..f4726450ec2396e03b58d4f740a945876dba9268 100644
--- a/core/src/main/scala/spark/scheduler/cluster/Schedulable.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/Schedulable.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
 
 import scala.collection.mutable.ArrayBuffer
 /**
diff --git a/core/src/main/scala/spark/scheduler/cluster/SchedulableBuilder.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulableBuilder.scala
similarity index 97%
rename from core/src/main/scala/spark/scheduler/cluster/SchedulableBuilder.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulableBuilder.scala
index 2fc8a76a0575b868fad3f076bac068879fafdb42..d04eeb6b98334bd6b9da95622f2741d3d7df2280 100644
--- a/core/src/main/scala/spark/scheduler/cluster/SchedulableBuilder.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulableBuilder.scala
@@ -15,15 +15,15 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
 import java.io.{File, FileInputStream, FileOutputStream, FileNotFoundException}
 import java.util.Properties
 
 import scala.xml.XML
 
-import spark.Logging
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark.Logging
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
 
 
 /**
diff --git a/core/src/main/scala/spark/scheduler/cluster/SchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulerBackend.scala
similarity index 94%
rename from core/src/main/scala/spark/scheduler/cluster/SchedulerBackend.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulerBackend.scala
index 4431744ec33e838a8fd621474833234d85541250..d57eb3276f7a186a80b0fe40fe2296b86f5eb0ab 100644
--- a/core/src/main/scala/spark/scheduler/cluster/SchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulerBackend.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
-import spark.{SparkContext, Utils}
+import org.apache.spark.{SparkContext}
 
 /**
  * A backend interface for cluster scheduling systems that allows plugging in different ones under
diff --git a/core/src/main/scala/spark/scheduler/cluster/SchedulingAlgorithm.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulingAlgorithm.scala
similarity index 98%
rename from core/src/main/scala/spark/scheduler/cluster/SchedulingAlgorithm.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulingAlgorithm.scala
index 69e0ac2a6bd83234b55cdf00894b03c3b4d09d74..cbeed4731a79c3205890beaae31b322085a57369 100644
--- a/core/src/main/scala/spark/scheduler/cluster/SchedulingAlgorithm.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulingAlgorithm.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
 /**
  * An interface for sort algorithm
diff --git a/core/src/main/scala/spark/scheduler/cluster/SchedulingMode.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulingMode.scala
similarity index 96%
rename from core/src/main/scala/spark/scheduler/cluster/SchedulingMode.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulingMode.scala
index 55cdf4791f985725d7bf05f431da100ba281bb60..34811389a02f7d27c6203a1ad6c77cbd90a03a6a 100644
--- a/core/src/main/scala/spark/scheduler/cluster/SchedulingMode.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulingMode.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
 /**
  *  "FAIR" and "FIFO" determines which policy is used
diff --git a/core/src/main/scala/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
similarity index 89%
rename from core/src/main/scala/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
index 7ac574bdc8e574e0b852fdffc156fc4a710a27b9..d003bf1bba92d4339badad080e65711e21800077 100644
--- a/core/src/main/scala/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
@@ -15,12 +15,13 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
-import spark.{Utils, Logging, SparkContext}
-import spark.deploy.client.{Client, ClientListener}
-import spark.deploy.{Command, ApplicationDescription}
+import org.apache.spark.{Logging, SparkContext}
+import org.apache.spark.deploy.client.{Client, ClientListener}
+import org.apache.spark.deploy.{Command, ApplicationDescription}
 import scala.collection.mutable.HashMap
+import org.apache.spark.util.Utils
 
 private[spark] class SparkDeploySchedulerBackend(
     scheduler: ClusterScheduler,
@@ -45,7 +46,8 @@ private[spark] class SparkDeploySchedulerBackend(
       System.getProperty("spark.driver.host"), System.getProperty("spark.driver.port"),
       StandaloneSchedulerBackend.ACTOR_NAME)
     val args = Seq(driverUrl, "{{EXECUTOR_ID}}", "{{HOSTNAME}}", "{{CORES}}")
-    val command = Command("spark.executor.StandaloneExecutorBackend", args, sc.executorEnvs)
+    val command = Command(
+      "org.apache.spark.executor.StandaloneExecutorBackend", args, sc.executorEnvs)
     val sparkHome = sc.getSparkHome().getOrElse(null)
     val appDesc = new ApplicationDescription(appName, maxCores, executorMemory, command, sparkHome,
         sc.ui.appUIAddress)
diff --git a/core/src/main/scala/spark/scheduler/cluster/StandaloneClusterMessage.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneClusterMessage.scala
similarity index 93%
rename from core/src/main/scala/spark/scheduler/cluster/StandaloneClusterMessage.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneClusterMessage.scala
index 05c29eb72f65f573cc1d76334c07cf22f69322c0..9c36d221f69deeafc6c1786a972f879ac6c564a3 100644
--- a/core/src/main/scala/spark/scheduler/cluster/StandaloneClusterMessage.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneClusterMessage.scala
@@ -15,13 +15,12 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
 import java.nio.ByteBuffer
 
-import spark.TaskState.TaskState
-import spark.Utils
-import spark.util.SerializableBuffer
+import org.apache.spark.TaskState.TaskState
+import org.apache.spark.util.{Utils, SerializableBuffer}
 
 
 private[spark] sealed trait StandaloneClusterMessage extends Serializable
diff --git a/core/src/main/scala/spark/scheduler/cluster/StandaloneSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
similarity index 97%
rename from core/src/main/scala/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
index 3203be10294d375d860e0e14ed24912a2edb31d0..b4ea0be415e58ac94c4a7b2419eaca99bc45c147 100644
--- a/core/src/main/scala/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
 import java.util.concurrent.atomic.AtomicInteger
 
@@ -28,8 +28,9 @@ import akka.remote.{RemoteClientShutdown, RemoteClientDisconnected, RemoteClient
 import akka.util.Duration
 import akka.util.duration._
 
-import spark.{Utils, SparkException, Logging, TaskState}
-import spark.scheduler.cluster.StandaloneClusterMessages._
+import org.apache.spark.{SparkException, Logging, TaskState}
+import org.apache.spark.scheduler.cluster.StandaloneClusterMessages._
+import org.apache.spark.util.Utils
 
 /**
  * A standalone scheduler backend, which waits for standalone executors to connect to it through
diff --git a/core/src/main/scala/spark/scheduler/cluster/TaskDescription.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskDescription.scala
similarity index 93%
rename from core/src/main/scala/spark/scheduler/cluster/TaskDescription.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/TaskDescription.scala
index 187553233fb42a28ec85c95fc4f8710eccb08c2a..309ac2f6c9d0537bd178570debf2b7c315be59de 100644
--- a/core/src/main/scala/spark/scheduler/cluster/TaskDescription.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskDescription.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
 import java.nio.ByteBuffer
-import spark.util.SerializableBuffer
+import org.apache.spark.util.SerializableBuffer
 
 private[spark] class TaskDescription(
     val taskId: Long,
diff --git a/core/src/main/scala/spark/scheduler/cluster/TaskInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskInfo.scala
similarity index 95%
rename from core/src/main/scala/spark/scheduler/cluster/TaskInfo.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/TaskInfo.scala
index c2c5522686f74d62384a5acb0d25d50d7e988085..9685fb1a67b7ade2637ef78021077084eae50c9e 100644
--- a/core/src/main/scala/spark/scheduler/cluster/TaskInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskInfo.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
-import spark.Utils
+import org.apache.spark.util.Utils
 
 /**
  * Information about a running task attempt inside a TaskSet.
diff --git a/core/src/main/scala/spark/scheduler/cluster/TaskLocality.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskLocality.scala
similarity index 96%
rename from core/src/main/scala/spark/scheduler/cluster/TaskLocality.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/TaskLocality.scala
index 1c33e41f87e81d082bd3151c4f55fd39df32f32f..5d4130e14a8ad1ab40f87111e56596c7ff217f0d 100644
--- a/core/src/main/scala/spark/scheduler/cluster/TaskLocality.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskLocality.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
 
 private[spark] object TaskLocality
diff --git a/core/src/main/scala/spark/scheduler/cluster/TaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskSetManager.scala
similarity index 93%
rename from core/src/main/scala/spark/scheduler/cluster/TaskSetManager.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/TaskSetManager.scala
index 0248830b7a24c250dbdd6b41d2c9a4068f692f88..648a3ef922c906e7fbe82f7ade252bd8f6b0f7ca 100644
--- a/core/src/main/scala/spark/scheduler/cluster/TaskSetManager.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskSetManager.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
 import java.nio.ByteBuffer
 
-import spark.TaskState.TaskState
-import spark.scheduler.TaskSet
+import org.apache.spark.TaskState.TaskState
+import org.apache.spark.scheduler.TaskSet
 
 /**
  * Tracks and schedules the tasks within a single TaskSet. This class keeps track of the status of
diff --git a/core/src/main/scala/spark/scheduler/cluster/WorkerOffer.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/WorkerOffer.scala
similarity index 95%
rename from core/src/main/scala/spark/scheduler/cluster/WorkerOffer.scala
rename to core/src/main/scala/org/apache/spark/scheduler/cluster/WorkerOffer.scala
index 1d09bd9b034193f71bf22e2aa8bb5c44424c2994..938f62883a104df5e60b11333e939ef20293792e 100644
--- a/core/src/main/scala/spark/scheduler/cluster/WorkerOffer.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/WorkerOffer.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
 /**
  * Represents free resources available on an executor.
diff --git a/core/src/main/scala/spark/scheduler/local/LocalScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/local/LocalScheduler.scala
similarity index 96%
rename from core/src/main/scala/spark/scheduler/local/LocalScheduler.scala
rename to core/src/main/scala/org/apache/spark/scheduler/local/LocalScheduler.scala
index 5be4dbd9f0b527489eeba69ac234cbc8b8593ad8..e8fa5e2f1704277a766fc8bdf2b4bb5adb492698 100644
--- a/core/src/main/scala/spark/scheduler/local/LocalScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/local/LocalScheduler.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler.local
+package org.apache.spark.scheduler.local
 
 import java.io.File
 import java.lang.management.ManagementFactory
@@ -27,13 +27,14 @@ import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.HashMap
 import scala.collection.mutable.HashSet
 
-import spark._
-import spark.TaskState.TaskState
-import spark.executor.ExecutorURLClassLoader
-import spark.scheduler._
-import spark.scheduler.cluster._
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark._
+import org.apache.spark.TaskState.TaskState
+import org.apache.spark.executor.ExecutorURLClassLoader
+import org.apache.spark.scheduler._
+import org.apache.spark.scheduler.cluster._
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
 import akka.actor._
+import org.apache.spark.util.Utils
 
 /**
  * A FIFO or Fair TaskScheduler implementation that runs tasks locally in a thread pool. Optionally
diff --git a/core/src/main/scala/spark/scheduler/local/LocalTaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/local/LocalTaskSetManager.scala
similarity index 94%
rename from core/src/main/scala/spark/scheduler/local/LocalTaskSetManager.scala
rename to core/src/main/scala/org/apache/spark/scheduler/local/LocalTaskSetManager.scala
index e237f289e367bd31062ad0e99eff8d7b571ea1b7..e52cb998bdf42df09e45b23b2692c9aa0b497a13 100644
--- a/core/src/main/scala/spark/scheduler/local/LocalTaskSetManager.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/local/LocalTaskSetManager.scala
@@ -15,16 +15,16 @@
  * limitations under the License.
  */
 
-package spark.scheduler.local
+package org.apache.spark.scheduler.local
 
 import java.nio.ByteBuffer
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.HashMap
 
-import spark.{ExceptionFailure, Logging, SparkEnv, Success, TaskState}
-import spark.TaskState.TaskState
-import spark.scheduler.{Task, TaskResult, TaskSet}
-import spark.scheduler.cluster.{Schedulable, TaskDescription, TaskInfo, TaskLocality, TaskSetManager}
+import org.apache.spark.{ExceptionFailure, Logging, SparkEnv, Success, TaskState}
+import org.apache.spark.TaskState.TaskState
+import org.apache.spark.scheduler.{Task, TaskResult, TaskSet}
+import org.apache.spark.scheduler.cluster.{Schedulable, TaskDescription, TaskInfo, TaskLocality, TaskSetManager}
 
 
 private[spark] class LocalTaskSetManager(sched: LocalScheduler, val taskSet: TaskSet)
diff --git a/core/src/main/scala/spark/scheduler/mesos/CoarseMesosSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/mesos/CoarseMesosSchedulerBackend.scala
similarity index 94%
rename from core/src/main/scala/spark/scheduler/mesos/CoarseMesosSchedulerBackend.scala
rename to core/src/main/scala/org/apache/spark/scheduler/mesos/CoarseMesosSchedulerBackend.scala
index eef3ee142519f0ac9b6cdf6e62bc576090fa208e..3dbe61d7067f2fb808b22082efd21a6832410501 100644
--- a/core/src/main/scala/spark/scheduler/mesos/CoarseMesosSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/mesos/CoarseMesosSchedulerBackend.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler.mesos
+package org.apache.spark.scheduler.mesos
 
 import com.google.protobuf.ByteString
 
@@ -23,14 +23,14 @@ import org.apache.mesos.{Scheduler => MScheduler}
 import org.apache.mesos._
 import org.apache.mesos.Protos.{TaskInfo => MesosTaskInfo, TaskState => MesosTaskState, _}
 
-import spark.{SparkException, Utils, Logging, SparkContext}
+import org.apache.spark.{SparkException, Logging, SparkContext}
 import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
 import scala.collection.JavaConversions._
 import java.io.File
-import spark.scheduler.cluster._
+import org.apache.spark.scheduler.cluster._
 import java.util.{ArrayList => JArrayList, List => JList}
 import java.util.Collections
-import spark.TaskState
+import org.apache.spark.TaskState
 
 /**
  * A SchedulerBackend that runs tasks on Mesos, but uses "coarse-grained" tasks, where it holds
@@ -126,14 +126,16 @@ private[spark] class CoarseMesosSchedulerBackend(
     val uri = System.getProperty("spark.executor.uri")
     if (uri == null) {
       val runScript = new File(sparkHome, "spark-class").getCanonicalPath
-      command.setValue("\"%s\" spark.executor.StandaloneExecutorBackend %s %s %s %d".format(
-        runScript, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
+      command.setValue(
+        "\"%s\" org.apache.spark.executor.StandaloneExecutorBackend %s %s %s %d".format(
+          runScript, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
     } else {
       // Grab everything to the first '.'. We'll use that and '*' to
       // glob the directory "correctly".
       val basename = uri.split('/').last.split('.').head
-      command.setValue("cd %s*; ./spark-class spark.executor.StandaloneExecutorBackend %s %s %s %d".format(
-        basename, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
+      command.setValue(
+        "cd %s*; ./spark-class org.apache.spark.executor.StandaloneExecutorBackend %s %s %s %d".format(
+          basename, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
       command.addUris(CommandInfo.URI.newBuilder().setValue(uri))
     }
     return command.build()
diff --git a/core/src/main/scala/spark/scheduler/mesos/MesosSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/mesos/MesosSchedulerBackend.scala
similarity index 98%
rename from core/src/main/scala/spark/scheduler/mesos/MesosSchedulerBackend.scala
rename to core/src/main/scala/org/apache/spark/scheduler/mesos/MesosSchedulerBackend.scala
index f6069a5775e2ba6e3550091ab69ff7dff62f4776..541f86e3381db89a169b1ffb2e69e0d98cecd20f 100644
--- a/core/src/main/scala/spark/scheduler/mesos/MesosSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/mesos/MesosSchedulerBackend.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.scheduler.mesos
+package org.apache.spark.scheduler.mesos
 
 import com.google.protobuf.ByteString
 
@@ -23,14 +23,15 @@ import org.apache.mesos.{Scheduler => MScheduler}
 import org.apache.mesos._
 import org.apache.mesos.Protos.{TaskInfo => MesosTaskInfo, TaskState => MesosTaskState, _}
 
-import spark.{SparkException, Utils, Logging, SparkContext}
+import org.apache.spark.{SparkException, Logging, SparkContext}
 import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
 import scala.collection.JavaConversions._
 import java.io.File
-import spark.scheduler.cluster._
+import org.apache.spark.scheduler.cluster._
 import java.util.{ArrayList => JArrayList, List => JList}
 import java.util.Collections
-import spark.TaskState
+import org.apache.spark.TaskState
+import org.apache.spark.util.Utils
 
 /**
  * A SchedulerBackend for running fine-grained tasks on Mesos. Each Spark task is mapped to a
diff --git a/core/src/main/scala/spark/JavaSerializer.scala b/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala
similarity index 94%
rename from core/src/main/scala/spark/JavaSerializer.scala
rename to core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala
index 04c5f44e6be363c0a2ab9bf0b66297ada36437f6..4de81617b1dd8bf38b617b86e1e67d1158729463 100644
--- a/core/src/main/scala/spark/JavaSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala
@@ -15,13 +15,12 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.serializer
 
 import java.io._
 import java.nio.ByteBuffer
 
-import serializer.{Serializer, SerializerInstance, DeserializationStream, SerializationStream}
-import spark.util.ByteBufferInputStream
+import org.apache.spark.util.ByteBufferInputStream
 
 private[spark] class JavaSerializationStream(out: OutputStream) extends SerializationStream {
   val objOut = new ObjectOutputStream(out)
diff --git a/core/src/main/scala/spark/KryoSerializer.scala b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
similarity index 92%
rename from core/src/main/scala/spark/KryoSerializer.scala
rename to core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
index eeb2993d8a5081bdecc66bb6049fb19d41dc39e3..24ef204aa19909dc20bcc5b169a09c0ace56856e 100644
--- a/core/src/main/scala/spark/KryoSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala
@@ -15,17 +15,74 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.serializer
 
-import java.io._
 import java.nio.ByteBuffer
-import com.esotericsoftware.kryo.{Kryo, KryoException}
-import com.esotericsoftware.kryo.io.{Input => KryoInput, Output => KryoOutput}
+import java.io.{EOFException, InputStream, OutputStream}
+
 import com.esotericsoftware.kryo.serializers.{JavaSerializer => KryoJavaSerializer}
+import com.esotericsoftware.kryo.{KryoException, Kryo}
+import com.esotericsoftware.kryo.io.{Input => KryoInput, Output => KryoOutput}
 import com.twitter.chill.ScalaKryoInstantiator
-import serializer.{SerializerInstance, DeserializationStream, SerializationStream}
-import spark.broadcast._
-import spark.storage._
+
+import org.apache.spark.{SerializableWritable, Logging}
+import org.apache.spark.storage.{GetBlock, GotBlock, PutBlock, StorageLevel}
+
+import org.apache.spark.broadcast.HttpBroadcast
+
+/**
+ * A Spark serializer that uses the [[http://code.google.com/p/kryo/wiki/V1Documentation Kryo 1.x library]].
+ */
+class KryoSerializer extends org.apache.spark.serializer.Serializer with Logging {
+  private val bufferSize = System.getProperty("spark.kryoserializer.buffer.mb", "2").toInt * 1024 * 1024
+
+  def newKryoOutput() = new KryoOutput(bufferSize)
+
+  def newKryoInput() = new KryoInput(bufferSize)
+
+  def newKryo(): Kryo = {
+    val instantiator = new ScalaKryoInstantiator
+    val kryo = instantiator.newKryo()
+    val classLoader = Thread.currentThread.getContextClassLoader
+
+    // Register some commonly used classes
+    val toRegister: Seq[AnyRef] = Seq(
+      ByteBuffer.allocate(1),
+      StorageLevel.MEMORY_ONLY,
+      PutBlock("1", ByteBuffer.allocate(1), StorageLevel.MEMORY_ONLY),
+      GotBlock("1", ByteBuffer.allocate(1)),
+      GetBlock("1")
+    )
+
+    for (obj <- toRegister) kryo.register(obj.getClass)
+
+    // Allow sending SerializableWritable
+    kryo.register(classOf[SerializableWritable[_]], new KryoJavaSerializer())
+    kryo.register(classOf[HttpBroadcast[_]], new KryoJavaSerializer())
+
+    // Allow the user to register their own classes by setting spark.kryo.registrator
+    try {
+      Option(System.getProperty("spark.kryo.registrator")).foreach { regCls =>
+        logDebug("Running user registrator: " + regCls)
+        val reg = Class.forName(regCls, true, classLoader).newInstance().asInstanceOf[KryoRegistrator]
+        reg.registerClasses(kryo)
+      }
+    } catch {
+      case _: Exception => println("Failed to register spark.kryo.registrator")
+    }
+
+    kryo.setClassLoader(classLoader)
+
+    // Allow disabling Kryo reference tracking if user knows their object graphs don't have loops
+    kryo.setReferences(System.getProperty("spark.kryo.referenceTracking", "true").toBoolean)
+
+    kryo
+  }
+
+  def newInstance(): SerializerInstance = {
+    new KryoSerializerInstance(this)
+  }
+}
 
 private[spark]
 class KryoSerializationStream(kryo: Kryo, outStream: OutputStream) extends SerializationStream {
@@ -100,57 +157,3 @@ private[spark] class KryoSerializerInstance(ks: KryoSerializer) extends Serializ
 trait KryoRegistrator {
   def registerClasses(kryo: Kryo)
 }
-
-/**
- * A Spark serializer that uses the [[http://code.google.com/p/kryo/wiki/V1Documentation Kryo 1.x library]].
- */
-class KryoSerializer extends spark.serializer.Serializer with Logging {
-  private val bufferSize = System.getProperty("spark.kryoserializer.buffer.mb", "2").toInt * 1024 * 1024
-
-  def newKryoOutput() = new KryoOutput(bufferSize)
-
-  def newKryoInput() = new KryoInput(bufferSize)
-
-  def newKryo(): Kryo = {
-    val instantiator = new ScalaKryoInstantiator
-    val kryo = instantiator.newKryo()
-    val classLoader = Thread.currentThread.getContextClassLoader
-
-    // Register some commonly used classes
-    val toRegister: Seq[AnyRef] = Seq(
-      ByteBuffer.allocate(1),
-      StorageLevel.MEMORY_ONLY,
-      PutBlock("1", ByteBuffer.allocate(1), StorageLevel.MEMORY_ONLY),
-      GotBlock("1", ByteBuffer.allocate(1)),
-      GetBlock("1")
-    )
-
-    for (obj <- toRegister) kryo.register(obj.getClass)
-
-    // Allow sending SerializableWritable
-    kryo.register(classOf[SerializableWritable[_]], new KryoJavaSerializer())
-    kryo.register(classOf[HttpBroadcast[_]], new KryoJavaSerializer())
-
-    // Allow the user to register their own classes by setting spark.kryo.registrator
-    try {
-      Option(System.getProperty("spark.kryo.registrator")).foreach { regCls =>
-        logDebug("Running user registrator: " + regCls)
-        val reg = Class.forName(regCls, true, classLoader).newInstance().asInstanceOf[KryoRegistrator]
-        reg.registerClasses(kryo)
-      }
-    } catch {
-      case _: Exception => println("Failed to register spark.kryo.registrator")
-    }
-
-    kryo.setClassLoader(classLoader)
-
-    // Allow disabling Kryo reference tracking if user knows their object graphs don't have loops
-    kryo.setReferences(System.getProperty("spark.kryo.referenceTracking", "true").toBoolean)
-
-    kryo
-  }
-
-  def newInstance(): SerializerInstance = {
-    new KryoSerializerInstance(this)
-  }
-}
\ No newline at end of file
diff --git a/core/src/main/scala/spark/serializer/Serializer.scala b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
similarity index 91%
rename from core/src/main/scala/spark/serializer/Serializer.scala
rename to core/src/main/scala/org/apache/spark/serializer/Serializer.scala
index dc94d42bb66ac23c843d2f04a649e2bb4593aec5..160cca4d6c54a69dca1232a38b5cbf94ad9e54d7 100644
--- a/core/src/main/scala/spark/serializer/Serializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
@@ -15,19 +15,19 @@
  * limitations under the License.
  */
 
-package spark.serializer
+package org.apache.spark.serializer
 
 import java.io.{EOFException, InputStream, OutputStream}
 import java.nio.ByteBuffer
 
 import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream
 
-import spark.util.ByteBufferInputStream
+import org.apache.spark.util.{NextIterator, ByteBufferInputStream}
 
 
 /**
  * A serializer. Because some serialization libraries are not thread safe, this class is used to
- * create [[spark.serializer.SerializerInstance]] objects that do the actual serialization and are
+ * create [[org.apache.spark.serializer.SerializerInstance]] objects that do the actual serialization and are
  * guaranteed to only be called from one thread at a time.
  */
 trait Serializer {
@@ -95,7 +95,7 @@ trait DeserializationStream {
    * Read the elements of this stream through an iterator. This can only be called once, as
    * reading each element will consume data from the input source.
    */
-  def asIterator: Iterator[Any] = new spark.util.NextIterator[Any] {
+  def asIterator: Iterator[Any] = new NextIterator[Any] {
     override protected def getNext() = {
       try {
         readObject[Any]()
diff --git a/core/src/main/scala/spark/serializer/SerializerManager.scala b/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala
similarity index 98%
rename from core/src/main/scala/spark/serializer/SerializerManager.scala
rename to core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala
index b7b24705a2a05d0822629b07be080cd3bd486e9e..2955986feced55d1a2f3ec00f174b5b793560f8c 100644
--- a/core/src/main/scala/spark/serializer/SerializerManager.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.serializer
+package org.apache.spark.serializer
 
 import java.util.concurrent.ConcurrentHashMap
 
diff --git a/core/src/main/scala/spark/storage/BlockException.scala b/core/src/main/scala/org/apache/spark/storage/BlockException.scala
similarity index 96%
rename from core/src/main/scala/spark/storage/BlockException.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockException.scala
index 8ebfaf3cbf6cdcb51a2b9916ab370f3930a45bb3..290dbce4f545a6d5ccf292529cec1f4eafc3a51e 100644
--- a/core/src/main/scala/spark/storage/BlockException.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockException.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 private[spark]
 case class BlockException(blockId: String, message: String) extends Exception(message)
diff --git a/core/src/main/scala/spark/storage/BlockFetchTracker.scala b/core/src/main/scala/org/apache/spark/storage/BlockFetchTracker.scala
similarity index 96%
rename from core/src/main/scala/spark/storage/BlockFetchTracker.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockFetchTracker.scala
index 265e554ad88f485e79011366d452d40d533a4c26..2e0b0e6eda76525d557d8aeb6b5dc7ba9f0eeca5 100644
--- a/core/src/main/scala/spark/storage/BlockFetchTracker.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockFetchTracker.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 private[spark] trait BlockFetchTracker {
   def totalBlocks : Int
diff --git a/core/src/main/scala/spark/storage/BlockFetcherIterator.scala b/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala
similarity index 97%
rename from core/src/main/scala/spark/storage/BlockFetcherIterator.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala
index 568783d893134069277ed428e2288b900aa9bbde..3aeda3879d1f15a7424c6470959eb56e7619ab46 100644
--- a/core/src/main/scala/spark/storage/BlockFetcherIterator.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import java.nio.ByteBuffer
 import java.util.concurrent.LinkedBlockingQueue
@@ -26,13 +26,13 @@ import scala.collection.mutable.Queue
 
 import io.netty.buffer.ByteBuf
 
-import spark.Logging
-import spark.Utils
-import spark.SparkException
-import spark.network.BufferMessage
-import spark.network.ConnectionManagerId
-import spark.network.netty.ShuffleCopier
-import spark.serializer.Serializer
+import org.apache.spark.Logging
+import org.apache.spark.SparkException
+import org.apache.spark.network.BufferMessage
+import org.apache.spark.network.ConnectionManagerId
+import org.apache.spark.network.netty.ShuffleCopier
+import org.apache.spark.serializer.Serializer
+import org.apache.spark.util.Utils
 
 
 /**
diff --git a/core/src/main/scala/spark/storage/BlockManager.scala b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
similarity index 99%
rename from core/src/main/scala/spark/storage/BlockManager.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockManager.scala
index 2a6ec2a55dddd893a2b14e2843fc1bf0e22dc63b..60fdc5f2ee24845a917cb94b3662c595e1d932ab 100644
--- a/core/src/main/scala/spark/storage/BlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import java.io.{InputStream, OutputStream}
 import java.nio.{ByteBuffer, MappedByteBuffer}
@@ -29,11 +29,11 @@ import akka.util.duration._
 
 import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream
 
-import spark.{Logging, SparkEnv, SparkException, Utils}
-import spark.io.CompressionCodec
-import spark.network._
-import spark.serializer.Serializer
-import spark.util.{ByteBufferInputStream, IdGenerator, MetadataCleaner, TimeStampedHashMap}
+import org.apache.spark.{Logging, SparkEnv, SparkException}
+import org.apache.spark.io.CompressionCodec
+import org.apache.spark.network._
+import org.apache.spark.serializer.Serializer
+import org.apache.spark.util._
 
 import sun.nio.ch.DirectBuffer
 
diff --git a/core/src/main/scala/spark/storage/BlockManagerId.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala
similarity index 94%
rename from core/src/main/scala/spark/storage/BlockManagerId.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala
index b36a6176c0815bf6d990ef75bc1745484ff36a26..74207f59af17078fea9a7997d5d12456397812aa 100644
--- a/core/src/main/scala/spark/storage/BlockManagerId.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import java.io.{Externalizable, IOException, ObjectInput, ObjectOutput}
 import java.util.concurrent.ConcurrentHashMap
-import spark.Utils
+import org.apache.spark.util.Utils
 
 /**
  * This class represent an unique identifier for a BlockManager.
@@ -92,13 +92,13 @@ private[spark] class BlockManagerId private (
 private[spark] object BlockManagerId {
 
   /**
-   * Returns a [[spark.storage.BlockManagerId]] for the given configuraiton.
+   * Returns a [[org.apache.spark.storage.BlockManagerId]] for the given configuraiton.
    *
    * @param execId ID of the executor.
    * @param host Host name of the block manager.
    * @param port Port of the block manager.
    * @param nettyPort Optional port for the Netty-based shuffle sender.
-   * @return A new [[spark.storage.BlockManagerId]].
+   * @return A new [[org.apache.spark.storage.BlockManagerId]].
    */
   def apply(execId: String, host: String, port: Int, nettyPort: Int) =
     getCachedBlockManagerId(new BlockManagerId(execId, host, port, nettyPort))
diff --git a/core/src/main/scala/spark/storage/BlockManagerMaster.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
similarity index 97%
rename from core/src/main/scala/spark/storage/BlockManagerMaster.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
index 76128e8cfffe9f0773f7807d950bc56554ebdc93..cf463d6ffc5cfc5500999fd9b905ec16df9fa4a4 100644
--- a/core/src/main/scala/spark/storage/BlockManagerMaster.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
@@ -15,15 +15,15 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import akka.actor.ActorRef
 import akka.dispatch.{Await, Future}
 import akka.pattern.ask
 import akka.util.Duration
 
-import spark.{Logging, SparkException}
-import spark.storage.BlockManagerMessages._
+import org.apache.spark.{Logging, SparkException}
+import org.apache.spark.storage.BlockManagerMessages._
 
 
 private[spark] class BlockManagerMaster(var driverActor: ActorRef) extends Logging {
diff --git a/core/src/main/scala/spark/storage/BlockManagerMasterActor.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala
similarity index 98%
rename from core/src/main/scala/spark/storage/BlockManagerMasterActor.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala
index b7a981d1014d48457fbe95d7981fa734d28c3691..c7b23ab094adefdac680a9ae407ff740b58fcfce 100644
--- a/core/src/main/scala/spark/storage/BlockManagerMasterActor.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import java.util.{HashMap => JHashMap}
 
@@ -28,8 +28,9 @@ import akka.pattern.ask
 import akka.util.Duration
 import akka.util.duration._
 
-import spark.{Logging, Utils, SparkException}
-import spark.storage.BlockManagerMessages._
+import org.apache.spark.{Logging, SparkException}
+import org.apache.spark.storage.BlockManagerMessages._
+import org.apache.spark.util.Utils
 
 
 /**
diff --git a/core/src/main/scala/spark/storage/BlockManagerMessages.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala
similarity index 99%
rename from core/src/main/scala/spark/storage/BlockManagerMessages.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala
index 9375a9ca54825138d3988a6615af8d941ebff06a..24333a179c96467aa3f42e0aea0d69d0bb7edc6c 100644
--- a/core/src/main/scala/spark/storage/BlockManagerMessages.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import java.io.{Externalizable, ObjectInput, ObjectOutput}
 
diff --git a/core/src/main/scala/spark/storage/BlockManagerSlaveActor.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala
similarity index 93%
rename from core/src/main/scala/spark/storage/BlockManagerSlaveActor.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala
index 6e5fb43732905e66deedb0ea6e2bb17272e908a2..951503019f765001bd0313d17c663f6b3695afa2 100644
--- a/core/src/main/scala/spark/storage/BlockManagerSlaveActor.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import akka.actor.Actor
 
-import spark.storage.BlockManagerMessages._
+import org.apache.spark.storage.BlockManagerMessages._
 
 
 /**
diff --git a/core/src/main/scala/spark/storage/BlockManagerSource.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala
similarity index 95%
rename from core/src/main/scala/spark/storage/BlockManagerSource.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala
index 2aecd1ea7167662c9ef858abb646a72ed856a9c3..24190cdd675e91c2a0d07d5a58d744f05ed6b86b 100644
--- a/core/src/main/scala/spark/storage/BlockManagerSource.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala
@@ -1,8 +1,8 @@
-package spark.storage
+package org.apache.spark.storage
 
 import com.codahale.metrics.{Gauge,MetricRegistry}
 
-import spark.metrics.source.Source
+import org.apache.spark.metrics.source.Source
 
 
 private[spark] class BlockManagerSource(val blockManager: BlockManager) extends Source {
diff --git a/core/src/main/scala/spark/storage/BlockManagerWorker.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala
similarity index 97%
rename from core/src/main/scala/spark/storage/BlockManagerWorker.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala
index 39064bce92c9476bf314301286ee6c6af53deb83..678c38203c797a16bed558c85543c5a5be1fdeb4 100644
--- a/core/src/main/scala/spark/storage/BlockManagerWorker.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala
@@ -15,12 +15,13 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import java.nio.ByteBuffer
 
-import spark.{Logging, Utils}
-import spark.network._
+import org.apache.spark.{Logging}
+import org.apache.spark.network._
+import org.apache.spark.util.Utils
 
 /**
  * A network interface for BlockManager. Each slave should have one
diff --git a/core/src/main/scala/spark/storage/BlockMessage.scala b/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala
similarity index 98%
rename from core/src/main/scala/spark/storage/BlockMessage.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockMessage.scala
index bcce26b7c14e94316b6818b9eef47156f0503cce..d8fa6a91d15f51d2e30e6a7f0c546c6e20970afa 100644
--- a/core/src/main/scala/spark/storage/BlockMessage.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import java.nio.ByteBuffer
 
 import scala.collection.mutable.StringBuilder
 import scala.collection.mutable.ArrayBuffer
 
-import spark.network._
+import org.apache.spark.network._
 
 private[spark] case class GetBlock(id: String)
 private[spark] case class GotBlock(id: String, data: ByteBuffer)
diff --git a/core/src/main/scala/spark/storage/BlockMessageArray.scala b/core/src/main/scala/org/apache/spark/storage/BlockMessageArray.scala
similarity index 98%
rename from core/src/main/scala/spark/storage/BlockMessageArray.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockMessageArray.scala
index ee2fc167d5b5198bfafebe634f6f3d1b8c2d8fc4..0aaf846b5b6606d3af2ba5fce6147f79050d5ea9 100644
--- a/core/src/main/scala/spark/storage/BlockMessageArray.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockMessageArray.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import java.nio.ByteBuffer
 
 import scala.collection.mutable.ArrayBuffer
 
-import spark._
-import spark.network._
+import org.apache.spark._
+import org.apache.spark.network._
 
 private[spark]
 class BlockMessageArray(var blockMessages: Seq[BlockMessage]) extends Seq[BlockMessage] with Logging {
diff --git a/core/src/main/scala/spark/storage/BlockObjectWriter.scala b/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala
similarity index 98%
rename from core/src/main/scala/spark/storage/BlockObjectWriter.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala
index 3812009ca16709bda71e60366180cda77a2b2536..39f103297f90c86eb563da121c13797f9f76054b 100644
--- a/core/src/main/scala/spark/storage/BlockObjectWriter.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 
 /**
diff --git a/core/src/main/scala/spark/storage/BlockStore.scala b/core/src/main/scala/org/apache/spark/storage/BlockStore.scala
similarity index 96%
rename from core/src/main/scala/spark/storage/BlockStore.scala
rename to core/src/main/scala/org/apache/spark/storage/BlockStore.scala
index c8db0022b0254919535bc610950162751678ab86..fa834371f4800e82fb1f6541d0db88db60b962a5 100644
--- a/core/src/main/scala/spark/storage/BlockStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockStore.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import java.nio.ByteBuffer
 import scala.collection.mutable.ArrayBuffer
 
-import spark.Logging
+import org.apache.spark.Logging
 
 /**
  * Abstract class to store blocks
diff --git a/core/src/main/scala/spark/storage/DiskStore.scala b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
similarity index 97%
rename from core/src/main/scala/spark/storage/DiskStore.scala
rename to core/src/main/scala/org/apache/spark/storage/DiskStore.scala
index b14497157e9e2123c14aaee58499bfb54bd290e5..fc25ef0fae2c9533b82b6c0fb1054ca6e71d3d16 100644
--- a/core/src/main/scala/spark/storage/DiskStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import java.io.{File, FileOutputStream, OutputStream, RandomAccessFile}
 import java.nio.ByteBuffer
@@ -28,12 +28,12 @@ import scala.collection.mutable.ArrayBuffer
 
 import it.unimi.dsi.fastutil.io.FastBufferedOutputStream
 
-import spark.Utils
-import spark.executor.ExecutorExitCode
-import spark.serializer.{Serializer, SerializationStream}
-import spark.Logging
-import spark.network.netty.ShuffleSender
-import spark.network.netty.PathResolver
+import org.apache.spark.executor.ExecutorExitCode
+import org.apache.spark.serializer.{Serializer, SerializationStream}
+import org.apache.spark.Logging
+import org.apache.spark.network.netty.ShuffleSender
+import org.apache.spark.network.netty.PathResolver
+import org.apache.spark.util.Utils
 
 
 /**
diff --git a/core/src/main/scala/spark/storage/MemoryStore.scala b/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala
similarity index 99%
rename from core/src/main/scala/spark/storage/MemoryStore.scala
rename to core/src/main/scala/org/apache/spark/storage/MemoryStore.scala
index 5a51f5cf313fa01a6de197a8c8337b4da72fba52..3b3b2342fa213e8db7f5b1a18ee692016b241148 100644
--- a/core/src/main/scala/spark/storage/MemoryStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import java.util.LinkedHashMap
 import java.util.concurrent.ArrayBlockingQueue
-import spark.{SizeEstimator, Utils}
 import java.nio.ByteBuffer
 import collection.mutable.ArrayBuffer
+import org.apache.spark.util.{SizeEstimator, Utils}
 
 /**
  * Stores blocks in memory, either as ArrayBuffers of deserialized Java objects or as
diff --git a/core/src/main/scala/spark/storage/PutResult.scala b/core/src/main/scala/org/apache/spark/storage/PutResult.scala
similarity index 97%
rename from core/src/main/scala/spark/storage/PutResult.scala
rename to core/src/main/scala/org/apache/spark/storage/PutResult.scala
index 3a0974fe15f13b1fdd1333bfdc86fea727d96c16..2eba2f06b5bfd96e02260525d6e3309dd84d2919 100644
--- a/core/src/main/scala/spark/storage/PutResult.scala
+++ b/core/src/main/scala/org/apache/spark/storage/PutResult.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import java.nio.ByteBuffer
 
diff --git a/core/src/main/scala/spark/storage/ShuffleBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
similarity index 96%
rename from core/src/main/scala/spark/storage/ShuffleBlockManager.scala
rename to core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
index 8a7a6f9ed3d0833aa08f5d346176512f7ca32124..9da11efb570d8d4c218c5884665a4ba0d237a07e 100644
--- a/core/src/main/scala/spark/storage/ShuffleBlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
-import spark.serializer.Serializer
+import org.apache.spark.serializer.Serializer
 
 
 private[spark]
diff --git a/core/src/main/scala/spark/storage/StorageLevel.scala b/core/src/main/scala/org/apache/spark/storage/StorageLevel.scala
similarity index 97%
rename from core/src/main/scala/spark/storage/StorageLevel.scala
rename to core/src/main/scala/org/apache/spark/storage/StorageLevel.scala
index f52650988c9dc9a4497486ca744922bba2948d88..755f1a760ee051dfd9c94ce6efb5d7765609060d 100644
--- a/core/src/main/scala/spark/storage/StorageLevel.scala
+++ b/core/src/main/scala/org/apache/spark/storage/StorageLevel.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import java.io.{Externalizable, IOException, ObjectInput, ObjectOutput}
 
@@ -23,7 +23,7 @@ import java.io.{Externalizable, IOException, ObjectInput, ObjectOutput}
  * Flags for controlling the storage of an RDD. Each StorageLevel records whether to use memory,
  * whether to drop the RDD to disk if it falls out of memory, whether to keep the data in memory
  * in a serialized format, and whether to replicate the RDD partitions on multiple nodes.
- * The [[spark.storage.StorageLevel$]] singleton object contains some static constants for
+ * The [[org.apache.spark.storage.StorageLevel$]] singleton object contains some static constants for
  * commonly useful storage levels. To create your own storage level object, use the factor method
  * of the singleton object (`StorageLevel(...)`).
  */
diff --git a/core/src/main/scala/spark/storage/StorageUtils.scala b/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
similarity index 97%
rename from core/src/main/scala/spark/storage/StorageUtils.scala
rename to core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
index 123b8f634571f9af7636ff8339881a9e57204a47..2bb7715696cc7b25ede77a4ebe405df5fb73016f 100644
--- a/core/src/main/scala/spark/storage/StorageUtils.scala
+++ b/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
@@ -15,10 +15,11 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
-import spark.{Utils, SparkContext}
+import org.apache.spark.{SparkContext}
 import BlockManagerMasterActor.BlockStatus
+import org.apache.spark.util.Utils
 
 private[spark]
 case class StorageStatus(blockManagerId: BlockManagerId, maxMem: Long,
diff --git a/core/src/main/scala/spark/storage/ThreadingTest.scala b/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala
similarity index 98%
rename from core/src/main/scala/spark/storage/ThreadingTest.scala
rename to core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala
index b3ab1ff4b4dca7b324b7bf416ac993bc36c0c607..f2ae8dd97dab1758308e3b36ffd1db1eb03b0955 100644
--- a/core/src/main/scala/spark/storage/ThreadingTest.scala
+++ b/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import akka.actor._
 
-import spark.KryoSerializer
 import java.util.concurrent.ArrayBlockingQueue
 import util.Random
+import org.apache.spark.serializer.KryoSerializer
 
 /**
  * This class tests the BlockManager and MemoryStore for thread safety and
diff --git a/core/src/main/scala/spark/ui/JettyUtils.scala b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
similarity index 98%
rename from core/src/main/scala/spark/ui/JettyUtils.scala
rename to core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
index f66fe39905228e3167dabf33a1ab87740e7c01f5..7211dbc7c6681b7c7209471e7d294889d2f3343c 100644
--- a/core/src/main/scala/spark/ui/JettyUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.ui
+package org.apache.spark.ui
 
 import javax.servlet.http.{HttpServletResponse, HttpServletRequest}
 
@@ -29,7 +29,7 @@ import org.eclipse.jetty.server.{Server, Request, Handler}
 import org.eclipse.jetty.server.handler.{ResourceHandler, HandlerList, ContextHandler, AbstractHandler}
 import org.eclipse.jetty.util.thread.QueuedThreadPool
 
-import spark.Logging
+import org.apache.spark.Logging
 
 
 /** Utilities for launching a web server using Jetty's HTTP Server class */
@@ -117,7 +117,6 @@ private[spark] object JettyUtils extends Logging {
 
       Try { server.start() } match {
         case s: Success[_] =>
-          sys.addShutdownHook(server.stop()) // Be kind, un-bind
           (server, server.getConnectors.head.getLocalPort)
         case f: Failure[_] =>
           server.stop()
diff --git a/core/src/main/scala/spark/ui/Page.scala b/core/src/main/scala/org/apache/spark/ui/Page.scala
similarity index 97%
rename from core/src/main/scala/spark/ui/Page.scala
rename to core/src/main/scala/org/apache/spark/ui/Page.scala
index 87376a19d86e9b2a1ca1cc97a464355a6ed50549..b2a069a37552d7d43acd4923220ca7feaf62c064 100644
--- a/core/src/main/scala/spark/ui/Page.scala
+++ b/core/src/main/scala/org/apache/spark/ui/Page.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.ui
+package org.apache.spark.ui
 
 private[spark] object Page extends Enumeration {
   val Stages, Storage, Environment, Executors = Value
diff --git a/core/src/main/scala/spark/ui/SparkUI.scala b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
similarity index 87%
rename from core/src/main/scala/spark/ui/SparkUI.scala
rename to core/src/main/scala/org/apache/spark/ui/SparkUI.scala
index 23ded44ba383de802aabbecaf136bfd53cbc09d4..ad456ea5653c27cd707d5b3f0687d36911133364 100644
--- a/core/src/main/scala/spark/ui/SparkUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
@@ -15,18 +15,19 @@
  * limitations under the License.
  */
 
-package spark.ui
+package org.apache.spark.ui
 
 import javax.servlet.http.HttpServletRequest
 
 import org.eclipse.jetty.server.{Handler, Server}
 
-import spark.{Logging, SparkContext, SparkEnv, Utils}
-import spark.ui.env.EnvironmentUI
-import spark.ui.exec.ExecutorsUI
-import spark.ui.storage.BlockManagerUI
-import spark.ui.jobs.JobProgressUI
-import spark.ui.JettyUtils._
+import org.apache.spark.{Logging, SparkContext, SparkEnv}
+import org.apache.spark.ui.env.EnvironmentUI
+import org.apache.spark.ui.exec.ExecutorsUI
+import org.apache.spark.ui.storage.BlockManagerUI
+import org.apache.spark.ui.jobs.JobProgressUI
+import org.apache.spark.ui.JettyUtils._
+import org.apache.spark.util.Utils
 
 /** Top level user interface for Spark */
 private[spark] class SparkUI(sc: SparkContext) extends Logging {
@@ -83,5 +84,5 @@ private[spark] class SparkUI(sc: SparkContext) extends Logging {
 
 private[spark] object SparkUI {
   val DEFAULT_PORT = "3030"
-  val STATIC_RESOURCE_DIR = "spark/ui/static"
+  val STATIC_RESOURCE_DIR = "org/apache/spark/ui/static"
 }
diff --git a/core/src/main/scala/spark/ui/UIUtils.scala b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
similarity index 98%
rename from core/src/main/scala/spark/ui/UIUtils.scala
rename to core/src/main/scala/org/apache/spark/ui/UIUtils.scala
index 51bb18d888dc0fb4a8dfc216d7ed36d2a00feca2..ce1acf564cc9d2ed4ccfacd95c617295a472e1a9 100644
--- a/core/src/main/scala/spark/ui/UIUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.ui
+package org.apache.spark.ui
 
 import scala.xml.Node
 
-import spark.SparkContext
+import org.apache.spark.SparkContext
 
 /** Utility functions for generating XML pages with spark content. */
 private[spark] object UIUtils {
diff --git a/core/src/main/scala/spark/ui/UIWorkloadGenerator.scala b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
similarity index 95%
rename from core/src/main/scala/spark/ui/UIWorkloadGenerator.scala
rename to core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
index 5ff0572f0a33b37e8c6d4a9579ef871bd923cabf..0ecb22d2f96f3f98f2adec3440bc1c9d90f7800c 100644
--- a/core/src/main/scala/spark/ui/UIWorkloadGenerator.scala
+++ b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.ui
+package org.apache.spark.ui
 
 import scala.util.Random
 
-import spark.SparkContext
-import spark.SparkContext._
-import spark.scheduler.cluster.SchedulingMode
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.scheduler.cluster.SchedulingMode
 
 
 /**
diff --git a/core/src/main/scala/spark/ui/env/EnvironmentUI.scala b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
similarity index 94%
rename from core/src/main/scala/spark/ui/env/EnvironmentUI.scala
rename to core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
index b1be1a27efe2fe66ff02db8a477153a5e5518a90..c5bf2acc9ec95d1ee11ed85d7ff2cff68f5ac405 100644
--- a/core/src/main/scala/spark/ui/env/EnvironmentUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.ui.env
+package org.apache.spark.ui.env
 
 import javax.servlet.http.HttpServletRequest
 
@@ -25,10 +25,10 @@ import scala.xml.Node
 
 import org.eclipse.jetty.server.Handler
 
-import spark.ui.JettyUtils._
-import spark.ui.UIUtils
-import spark.ui.Page.Environment
-import spark.SparkContext
+import org.apache.spark.ui.JettyUtils._
+import org.apache.spark.ui.UIUtils
+import org.apache.spark.ui.Page.Environment
+import org.apache.spark.SparkContext
 
 
 private[spark] class EnvironmentUI(sc: SparkContext) {
diff --git a/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala
similarity index 90%
rename from core/src/main/scala/spark/ui/exec/ExecutorsUI.scala
rename to core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala
index 0a7021fbf866d74d30244386ac1191be2cb140fe..6e56c22d04a0072d4ae83cb82f55522ed175a262 100644
--- a/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala
@@ -1,4 +1,4 @@
-package spark.ui.exec
+package org.apache.spark.ui.exec
 
 import javax.servlet.http.HttpServletRequest
 
@@ -7,13 +7,14 @@ import scala.xml.Node
 
 import org.eclipse.jetty.server.Handler
 
-import spark.{ExceptionFailure, Logging, Utils, SparkContext}
-import spark.executor.TaskMetrics
-import spark.scheduler.cluster.TaskInfo
-import spark.scheduler.{SparkListenerTaskStart, SparkListenerTaskEnd, SparkListener}
-import spark.ui.JettyUtils._
-import spark.ui.Page.Executors
-import spark.ui.UIUtils
+import org.apache.spark.{ExceptionFailure, Logging, SparkContext}
+import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.scheduler.cluster.TaskInfo
+import org.apache.spark.scheduler.{SparkListenerTaskStart, SparkListenerTaskEnd, SparkListener}
+import org.apache.spark.ui.JettyUtils._
+import org.apache.spark.ui.Page.Executors
+import org.apache.spark.ui.UIUtils
+import org.apache.spark.util.Utils
 
 
 private[spark] class ExecutorsUI(val sc: SparkContext) {
diff --git a/core/src/main/scala/spark/ui/jobs/IndexPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
similarity index 95%
rename from core/src/main/scala/spark/ui/jobs/IndexPage.scala
rename to core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
index 8867a6c90c17d06e9cf24e252709754b672876f7..3b428effafad5c650708672cf9a5df23e3f3d1ed 100644
--- a/core/src/main/scala/spark/ui/jobs/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
@@ -15,15 +15,15 @@
  * limitations under the License.
  */
 
-package spark.ui.jobs
+package org.apache.spark.ui.jobs
 
 import javax.servlet.http.HttpServletRequest
 
 import scala.xml.{NodeSeq, Node}
 
-import spark.scheduler.cluster.SchedulingMode
-import spark.ui.Page._
-import spark.ui.UIUtils._
+import org.apache.spark.scheduler.cluster.SchedulingMode
+import org.apache.spark.ui.Page._
+import org.apache.spark.ui.UIUtils._
 
 
 /** Page showing list of all ongoing and recently finished stages and pools*/
diff --git a/core/src/main/scala/spark/ui/jobs/JobProgressListener.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
similarity index 96%
rename from core/src/main/scala/spark/ui/jobs/JobProgressListener.scala
rename to core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
index 1d9767a83c8f65a75a7897eee406ae19a91b12f9..86e0af0399f4e012c732ca202bea449a6de0b1e7 100644
--- a/core/src/main/scala/spark/ui/jobs/JobProgressListener.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
@@ -1,12 +1,12 @@
-package spark.ui.jobs
+package org.apache.spark.ui.jobs
 
 import scala.Seq
 import scala.collection.mutable.{ListBuffer, HashMap, HashSet}
 
-import spark.{ExceptionFailure, SparkContext, Success, Utils}
-import spark.scheduler._
-import spark.scheduler.cluster.TaskInfo
-import spark.executor.TaskMetrics
+import org.apache.spark.{ExceptionFailure, SparkContext, Success}
+import org.apache.spark.scheduler._
+import org.apache.spark.scheduler.cluster.TaskInfo
+import org.apache.spark.executor.TaskMetrics
 import collection.mutable
 
 /**
diff --git a/core/src/main/scala/spark/ui/jobs/JobProgressUI.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala
similarity index 85%
rename from core/src/main/scala/spark/ui/jobs/JobProgressUI.scala
rename to core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala
index c83f102ff32ff024dc1c776741209176bdeaf1f3..6aecef5120451a58909c64e9bd5f558332316b00 100644
--- a/core/src/main/scala/spark/ui/jobs/JobProgressUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.ui.jobs
+package org.apache.spark.ui.jobs
 
 import akka.util.Duration
 
@@ -28,12 +28,13 @@ import org.eclipse.jetty.server.Handler
 import scala.Seq
 import scala.collection.mutable.{HashSet, ListBuffer, HashMap, ArrayBuffer}
 
-import spark.ui.JettyUtils._
-import spark.{ExceptionFailure, SparkContext, Success, Utils}
-import spark.scheduler._
+import org.apache.spark.ui.JettyUtils._
+import org.apache.spark.{ExceptionFailure, SparkContext, Success}
+import org.apache.spark.scheduler._
 import collection.mutable
-import spark.scheduler.cluster.SchedulingMode
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark.scheduler.cluster.SchedulingMode
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark.util.Utils
 
 /** Web UI showing progress status of all jobs in the given SparkContext. */
 private[spark] class JobProgressUI(val sc: SparkContext) {
diff --git a/core/src/main/scala/spark/ui/jobs/PoolPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala
similarity index 87%
rename from core/src/main/scala/spark/ui/jobs/PoolPage.scala
rename to core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala
index 7fb74dce4065967e23ced4669e1eacb259cb923f..ce92b6932b3f0a5d25bca6d2f9763d35b1b816c2 100644
--- a/core/src/main/scala/spark/ui/jobs/PoolPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala
@@ -1,13 +1,13 @@
-package spark.ui.jobs
+package org.apache.spark.ui.jobs
 
 import javax.servlet.http.HttpServletRequest
 
 import scala.xml.{NodeSeq, Node}
 import scala.collection.mutable.HashSet
 
-import spark.scheduler.Stage
-import spark.ui.UIUtils._
-import spark.ui.Page._
+import org.apache.spark.scheduler.Stage
+import org.apache.spark.ui.UIUtils._
+import org.apache.spark.ui.Page._
 
 /** Page showing specific pool details */
 private[spark] class PoolPage(parent: JobProgressUI) {
diff --git a/core/src/main/scala/spark/ui/jobs/PoolTable.scala b/core/src/main/scala/org/apache/spark/ui/jobs/PoolTable.scala
similarity index 92%
rename from core/src/main/scala/spark/ui/jobs/PoolTable.scala
rename to core/src/main/scala/org/apache/spark/ui/jobs/PoolTable.scala
index 621828f9c3ea44899b0fee7b6bc6ceba9cc1837c..f31465e59daaaf9022141c3653013ffeb647870a 100644
--- a/core/src/main/scala/spark/ui/jobs/PoolTable.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/PoolTable.scala
@@ -1,11 +1,11 @@
-package spark.ui.jobs
+package org.apache.spark.ui.jobs
 
 import scala.collection.mutable.HashMap
 import scala.collection.mutable.HashSet
 import scala.xml.Node
 
-import spark.scheduler.Stage
-import spark.scheduler.cluster.Schedulable
+import org.apache.spark.scheduler.Stage
+import org.apache.spark.scheduler.cluster.Schedulable
 
 /** Table showing list of pools */
 private[spark] class PoolTable(pools: Seq[Schedulable], listener: JobProgressListener) {
diff --git a/core/src/main/scala/spark/ui/jobs/StagePage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
similarity index 95%
rename from core/src/main/scala/spark/ui/jobs/StagePage.scala
rename to core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
index c2341475c7fffc776ec371d90f480ea8b8380067..a9969ab1c0870bc52d86bcf78ab4dc7699a4709d 100644
--- a/core/src/main/scala/spark/ui/jobs/StagePage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.ui.jobs
+package org.apache.spark.ui.jobs
 
 import java.util.Date
 
@@ -23,12 +23,12 @@ import javax.servlet.http.HttpServletRequest
 
 import scala.xml.Node
 
-import spark.ui.UIUtils._
-import spark.ui.Page._
-import spark.util.Distribution
-import spark.{ExceptionFailure, Utils}
-import spark.scheduler.cluster.TaskInfo
-import spark.executor.TaskMetrics
+import org.apache.spark.ui.UIUtils._
+import org.apache.spark.ui.Page._
+import org.apache.spark.util.{Utils, Distribution}
+import org.apache.spark.{ExceptionFailure}
+import org.apache.spark.scheduler.cluster.TaskInfo
+import org.apache.spark.executor.TaskMetrics
 
 /** Page showing statistics and task list for a given stage */
 private[spark] class StagePage(parent: JobProgressUI) {
diff --git a/core/src/main/scala/spark/ui/jobs/StageTable.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala
similarity index 95%
rename from core/src/main/scala/spark/ui/jobs/StageTable.scala
rename to core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala
index 2b1bc984fcab451d0d2561d889fe36473bd99d98..71e58a977ef13ab00accb3b6c206fb4e9c4d2bff 100644
--- a/core/src/main/scala/spark/ui/jobs/StageTable.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala
@@ -1,13 +1,13 @@
-package spark.ui.jobs
+package org.apache.spark.ui.jobs
 
 import java.util.Date
 
 import scala.xml.Node
 import scala.collection.mutable.HashSet
 
-import spark.Utils
-import spark.scheduler.cluster.{SchedulingMode, TaskInfo}
-import spark.scheduler.Stage
+import org.apache.spark.scheduler.cluster.{SchedulingMode, TaskInfo}
+import org.apache.spark.scheduler.Stage
+import org.apache.spark.util.Utils
 
 
 /** Page showing list of all ongoing and recently finished stages */
diff --git a/core/src/main/scala/spark/ui/storage/BlockManagerUI.scala b/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala
similarity index 92%
rename from core/src/main/scala/spark/ui/storage/BlockManagerUI.scala
rename to core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala
index 49ed069c75958f8eedee2236854f49b1b517836e..1d633d374a0a04abfc1e1172b342beee3debe77f 100644
--- a/core/src/main/scala/spark/ui/storage/BlockManagerUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.ui.storage
+package org.apache.spark.ui.storage
 
 import akka.util.Duration
 
@@ -23,8 +23,8 @@ import javax.servlet.http.HttpServletRequest
 
 import org.eclipse.jetty.server.Handler
 
-import spark.{Logging, SparkContext}
-import spark.ui.JettyUtils._
+import org.apache.spark.{Logging, SparkContext}
+import org.apache.spark.ui.JettyUtils._
 
 /** Web UI showing storage status of all RDD's in the given SparkContext. */
 private[spark] class BlockManagerUI(val sc: SparkContext) extends Logging {
diff --git a/core/src/main/scala/spark/ui/storage/IndexPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala
similarity index 90%
rename from core/src/main/scala/spark/ui/storage/IndexPage.scala
rename to core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala
index fc6273c694542e8719d96fe213f4fa3875247b45..c3ec9073704c26fd02b4326611edde630f54ecdf 100644
--- a/core/src/main/scala/spark/ui/storage/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala
@@ -15,16 +15,16 @@
  * limitations under the License.
  */
 
-package spark.ui.storage
+package org.apache.spark.ui.storage
 
 import javax.servlet.http.HttpServletRequest
 
 import scala.xml.Node
 
-import spark.storage.{RDDInfo, StorageUtils}
-import spark.Utils
-import spark.ui.UIUtils._
-import spark.ui.Page._
+import org.apache.spark.storage.{RDDInfo, StorageUtils}
+import org.apache.spark.ui.UIUtils._
+import org.apache.spark.ui.Page._
+import org.apache.spark.util.Utils
 
 /** Page showing list of RDD's currently stored in the cluster */
 private[spark] class IndexPage(parent: BlockManagerUI) {
diff --git a/core/src/main/scala/spark/ui/storage/RDDPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
similarity index 93%
rename from core/src/main/scala/spark/ui/storage/RDDPage.scala
rename to core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
index b128a5614dcc7678fb3c8b17e6a82ed77b345083..43c125767709fc5ff9bfe7983689265726eb0a58 100644
--- a/core/src/main/scala/spark/ui/storage/RDDPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
@@ -15,17 +15,17 @@
  * limitations under the License.
  */
 
-package spark.ui.storage
+package org.apache.spark.ui.storage
 
 import javax.servlet.http.HttpServletRequest
 
 import scala.xml.Node
 
-import spark.Utils
-import spark.storage.{StorageStatus, StorageUtils}
-import spark.storage.BlockManagerMasterActor.BlockStatus
-import spark.ui.UIUtils._
-import spark.ui.Page._
+import org.apache.spark.storage.{StorageStatus, StorageUtils}
+import org.apache.spark.storage.BlockManagerMasterActor.BlockStatus
+import org.apache.spark.ui.UIUtils._
+import org.apache.spark.ui.Page._
+import org.apache.spark.util.Utils
 
 
 /** Page showing storage details for a given RDD */
diff --git a/core/src/main/scala/spark/util/AkkaUtils.scala b/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala
similarity index 99%
rename from core/src/main/scala/spark/util/AkkaUtils.scala
rename to core/src/main/scala/org/apache/spark/util/AkkaUtils.scala
index 9233277bdb376c7df6ceca91855251a5cbcb2dba..d4c5065c3f5cfdeec6cba991d3919e719d84301c 100644
--- a/core/src/main/scala/spark/util/AkkaUtils.scala
+++ b/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 import akka.actor.{ActorSystem, ExtendedActorSystem}
 import com.typesafe.config.ConfigFactory
diff --git a/core/src/main/scala/spark/util/BoundedPriorityQueue.scala b/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala
similarity index 98%
rename from core/src/main/scala/spark/util/BoundedPriorityQueue.scala
rename to core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala
index 0575497f5d8e87de35e345193cda23e158592263..0b51c23f7b2803498e527f56ad07e1e889aa1436 100644
--- a/core/src/main/scala/spark/util/BoundedPriorityQueue.scala
+++ b/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 import java.io.Serializable
 import java.util.{PriorityQueue => JPriorityQueue}
diff --git a/core/src/main/scala/spark/util/ByteBufferInputStream.scala b/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala
similarity index 96%
rename from core/src/main/scala/spark/util/ByteBufferInputStream.scala
rename to core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala
index 47a28e2f76e4067d095bbb38de34baa420cbc037..e214d2a519a20d94281af8e66f6c4d7716e7bbdf 100644
--- a/core/src/main/scala/spark/util/ByteBufferInputStream.scala
+++ b/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 import java.io.InputStream
 import java.nio.ByteBuffer
-import spark.storage.BlockManager
+import org.apache.spark.storage.BlockManager
 
 /**
  * Reads data from a ByteBuffer, and optionally cleans it up using BlockManager.dispose()
diff --git a/core/src/main/scala/spark/util/Clock.scala b/core/src/main/scala/org/apache/spark/util/Clock.scala
similarity index 97%
rename from core/src/main/scala/spark/util/Clock.scala
rename to core/src/main/scala/org/apache/spark/util/Clock.scala
index aa71a5b4424640bff202c2efbc9d631bd0c9ee95..97c2b45aabf2875c2682dcd350b92c935376c099 100644
--- a/core/src/main/scala/spark/util/Clock.scala
+++ b/core/src/main/scala/org/apache/spark/util/Clock.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 /**
  * An interface to represent clocks, so that they can be mocked out in unit tests.
diff --git a/core/src/main/scala/spark/ClosureCleaner.scala b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
similarity index 99%
rename from core/src/main/scala/spark/ClosureCleaner.scala
rename to core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
index 8b39241095b886e8ecb0d6831a33a99ec245e3a3..7108595e3e44518d38f48d5a76b529d2b0febdfe 100644
--- a/core/src/main/scala/spark/ClosureCleaner.scala
+++ b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.util
 
 import java.lang.reflect.Field
 
@@ -25,6 +25,7 @@ import scala.collection.mutable.Set
 import org.objectweb.asm.{ClassReader, ClassVisitor, MethodVisitor, Type}
 import org.objectweb.asm.Opcodes._
 import java.io.{InputStream, IOException, ByteArrayOutputStream, ByteArrayInputStream, BufferedInputStream}
+import org.apache.spark.Logging
 
 private[spark] object ClosureCleaner extends Logging {
   // Get an ASM class reader for a given class from the JAR that loaded it
diff --git a/core/src/main/scala/spark/util/CompletionIterator.scala b/core/src/main/scala/org/apache/spark/util/CompletionIterator.scala
similarity index 97%
rename from core/src/main/scala/spark/util/CompletionIterator.scala
rename to core/src/main/scala/org/apache/spark/util/CompletionIterator.scala
index 210450892b00463ba05b455811464cd5a2a33026..dc15a38b29d702282986266b6674138b5c670c39 100644
--- a/core/src/main/scala/spark/util/CompletionIterator.scala
+++ b/core/src/main/scala/org/apache/spark/util/CompletionIterator.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 /**
  * Wrapper around an iterator which calls a completion method after it successfully iterates through all the elements
diff --git a/core/src/main/scala/spark/util/Distribution.scala b/core/src/main/scala/org/apache/spark/util/Distribution.scala
similarity index 98%
rename from core/src/main/scala/spark/util/Distribution.scala
rename to core/src/main/scala/org/apache/spark/util/Distribution.scala
index 5d4d7a6c50a5c68375343918fc86ce04e61dbdf8..33bf3562fe342b70424642f186514be16a7222fd 100644
--- a/core/src/main/scala/spark/util/Distribution.scala
+++ b/core/src/main/scala/org/apache/spark/util/Distribution.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 import java.io.PrintStream
 
diff --git a/core/src/main/scala/spark/util/IdGenerator.scala b/core/src/main/scala/org/apache/spark/util/IdGenerator.scala
similarity index 97%
rename from core/src/main/scala/spark/util/IdGenerator.scala
rename to core/src/main/scala/org/apache/spark/util/IdGenerator.scala
index 3422280559559a5e83c0b6954cdcb3bc63d527db..17e55f7996bf77574acf620ee2a9b6ef96b61ffc 100644
--- a/core/src/main/scala/spark/util/IdGenerator.scala
+++ b/core/src/main/scala/org/apache/spark/util/IdGenerator.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 import java.util.concurrent.atomic.AtomicInteger
 
diff --git a/core/src/main/scala/spark/util/IntParam.scala b/core/src/main/scala/org/apache/spark/util/IntParam.scala
similarity index 97%
rename from core/src/main/scala/spark/util/IntParam.scala
rename to core/src/main/scala/org/apache/spark/util/IntParam.scala
index daf0d58fa25fb80fd62d83ed557f7b8261b5673a..626bb49eeae2fa5fca1471bbc2247540176d5b01 100644
--- a/core/src/main/scala/spark/util/IntParam.scala
+++ b/core/src/main/scala/org/apache/spark/util/IntParam.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 /**
  * An extractor object for parsing strings into integers.
diff --git a/core/src/main/scala/spark/util/MemoryParam.scala b/core/src/main/scala/org/apache/spark/util/MemoryParam.scala
similarity index 96%
rename from core/src/main/scala/spark/util/MemoryParam.scala
rename to core/src/main/scala/org/apache/spark/util/MemoryParam.scala
index 298562323ae05124493d8911056e49c1e3ad3c1d..4869c9897a4929e8e346db66bbae979d57c425d1 100644
--- a/core/src/main/scala/spark/util/MemoryParam.scala
+++ b/core/src/main/scala/org/apache/spark/util/MemoryParam.scala
@@ -15,9 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
-
-import spark.Utils
+package org.apache.spark.util
 
 /**
  * An extractor object for parsing JVM memory strings, such as "10g", into an Int representing
diff --git a/core/src/main/scala/spark/util/MetadataCleaner.scala b/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
similarity index 97%
rename from core/src/main/scala/spark/util/MetadataCleaner.scala
rename to core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
index 92909e09590a362bbef6c3cf6ad698697fe1cc12..a430a75451173b197378981c8836bb8904f340df 100644
--- a/core/src/main/scala/spark/util/MetadataCleaner.scala
+++ b/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 import java.util.concurrent.{TimeUnit, ScheduledFuture, Executors}
 import java.util.{TimerTask, Timer}
-import spark.Logging
+import org.apache.spark.Logging
 
 
 /**
diff --git a/core/src/main/scala/spark/util/MutablePair.scala b/core/src/main/scala/org/apache/spark/util/MutablePair.scala
similarity index 97%
rename from core/src/main/scala/spark/util/MutablePair.scala
rename to core/src/main/scala/org/apache/spark/util/MutablePair.scala
index 78d404e66b1e205e92064183a347f46d1eef2f03..34f1f6606fc3fbbdfca834c7843a6d9f2fe4f7c8 100644
--- a/core/src/main/scala/spark/util/MutablePair.scala
+++ b/core/src/main/scala/org/apache/spark/util/MutablePair.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 
 /**
diff --git a/core/src/main/scala/spark/util/NextIterator.scala b/core/src/main/scala/org/apache/spark/util/NextIterator.scala
similarity index 98%
rename from core/src/main/scala/spark/util/NextIterator.scala
rename to core/src/main/scala/org/apache/spark/util/NextIterator.scala
index 22163ece8dd21bee8c13418bb7369027a3c2a49c..8266e5e495efcc88d58b0e7f63948fb34f5211de 100644
--- a/core/src/main/scala/spark/util/NextIterator.scala
+++ b/core/src/main/scala/org/apache/spark/util/NextIterator.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 /** Provides a basic/boilerplate Iterator implementation. */
 private[spark] abstract class NextIterator[U] extends Iterator[U] {
diff --git a/core/src/main/scala/spark/util/RateLimitedOutputStream.scala b/core/src/main/scala/org/apache/spark/util/RateLimitedOutputStream.scala
similarity index 98%
rename from core/src/main/scala/spark/util/RateLimitedOutputStream.scala
rename to core/src/main/scala/org/apache/spark/util/RateLimitedOutputStream.scala
index 00f782bbe7620f941224e320175c043ecb609cd2..47e1b450043b32b8d9178dc6dce40a9768c318d7 100644
--- a/core/src/main/scala/spark/util/RateLimitedOutputStream.scala
+++ b/core/src/main/scala/org/apache/spark/util/RateLimitedOutputStream.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 import scala.annotation.tailrec
 
diff --git a/core/src/main/scala/spark/util/SerializableBuffer.scala b/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala
similarity index 98%
rename from core/src/main/scala/spark/util/SerializableBuffer.scala
rename to core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala
index 7e6842628a6379f2b4969006ea4e0074c11e0ba7..f2b1ad7d0e91d667cd01b7afc16e91eb78d3fcea 100644
--- a/core/src/main/scala/spark/util/SerializableBuffer.scala
+++ b/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 import java.nio.ByteBuffer
 import java.io.{IOException, ObjectOutputStream, EOFException, ObjectInputStream}
diff --git a/core/src/main/scala/spark/SizeEstimator.scala b/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
similarity index 99%
rename from core/src/main/scala/spark/SizeEstimator.scala
rename to core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
index 6cc57566d79c3023c63a1bbde4917f7c8f58ce28..a25b37a2a96a9eb2eb567c2cfadad22e7e062582 100644
--- a/core/src/main/scala/spark/SizeEstimator.scala
+++ b/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.util
 
 import java.lang.reflect.Field
 import java.lang.reflect.Modifier
@@ -30,6 +30,7 @@ import java.lang.management.ManagementFactory
 import scala.collection.mutable.ArrayBuffer
 
 import it.unimi.dsi.fastutil.ints.IntOpenHashSet
+import org.apache.spark.Logging
 
 /**
  * Estimates the sizes of Java objects (number of bytes of memory they occupy), for use in 
diff --git a/core/src/main/scala/spark/util/StatCounter.scala b/core/src/main/scala/org/apache/spark/util/StatCounter.scala
similarity index 99%
rename from core/src/main/scala/spark/util/StatCounter.scala
rename to core/src/main/scala/org/apache/spark/util/StatCounter.scala
index 76358d4151bc9bd53e7cd0892a678087691b0462..020d5edba9530d4eefee2e80579b795b0af73860 100644
--- a/core/src/main/scala/spark/util/StatCounter.scala
+++ b/core/src/main/scala/org/apache/spark/util/StatCounter.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 /**
  * A class for tracking the statistics of a set of numbers (count, mean and variance) in a
diff --git a/core/src/main/scala/spark/util/TimeStampedHashMap.scala b/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala
similarity index 95%
rename from core/src/main/scala/spark/util/TimeStampedHashMap.scala
rename to core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala
index 07772a0afbefe3e95ff0ddd1b50c6bad27fb1b99..277de2f8a6caa941ca15c4ce7d3d67b663bd7ed8 100644
--- a/core/src/main/scala/spark/util/TimeStampedHashMap.scala
+++ b/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala
@@ -15,13 +15,14 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 import java.util.concurrent.ConcurrentHashMap
 import scala.collection.JavaConversions
 import scala.collection.mutable.Map
 import scala.collection.immutable
-import spark.scheduler.MapStatus
+import org.apache.spark.scheduler.MapStatus
+import org.apache.spark.Logging
 
 /**
  * This is a custom implementation of scala.collection.mutable.Map which stores the insertion
@@ -29,7 +30,7 @@ import spark.scheduler.MapStatus
  * threshold time can them be removed using the clearOldValues method. This is intended to be a drop-in
  * replacement of scala.collection.mutable.HashMap.
  */
-class TimeStampedHashMap[A, B] extends Map[A, B]() with spark.Logging {
+class TimeStampedHashMap[A, B] extends Map[A, B]() with Logging {
   val internalMap = new ConcurrentHashMap[A, (B, Long)]()
 
   def get(key: A): Option[B] = {
diff --git a/core/src/main/scala/spark/util/TimeStampedHashSet.scala b/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala
similarity index 98%
rename from core/src/main/scala/spark/util/TimeStampedHashSet.scala
rename to core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala
index 41e3fd8cba1cf4bb67611da21e5c3baffaabeee1..26983138ff0da984c3e97b4149ec4ebd16b712e8 100644
--- a/core/src/main/scala/spark/util/TimeStampedHashSet.scala
+++ b/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 import scala.collection.mutable.Set
 import scala.collection.JavaConversions
diff --git a/core/src/main/scala/spark/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
similarity index 98%
rename from core/src/main/scala/spark/Utils.scala
rename to core/src/main/scala/org/apache/spark/util/Utils.scala
index bb8aad3f4ce0a316085be423152f266375fdd4fb..bb47fc0a2ca3b90c3a26aa94f67e017c3ddfb4c0 100644
--- a/core/src/main/scala/spark/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.util
 
 import java.io._
 import java.net.{InetAddress, URL, URI, NetworkInterface, Inet4Address, ServerSocket}
@@ -33,15 +33,16 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder
 
 import org.apache.hadoop.fs.{Path, FileSystem, FileUtil}
 
-import spark.serializer.{DeserializationStream, SerializationStream, SerializerInstance}
-import spark.deploy.SparkHadoopUtil
+import org.apache.spark.serializer.{DeserializationStream, SerializationStream, SerializerInstance}
+import org.apache.spark.deploy.SparkHadoopUtil
 import java.nio.ByteBuffer
+import org.apache.spark.{SparkEnv, SparkException, Logging}
 
 
 /**
  * Various utility methods used by Spark.
  */
-private object Utils extends Logging {
+private[spark] object Utils extends Logging {
 
   /** Serialize an object using Java serialization */
   def serialize[T](o: T): Array[Byte] = {
diff --git a/core/src/main/scala/spark/util/Vector.scala b/core/src/main/scala/org/apache/spark/util/Vector.scala
similarity index 97%
rename from core/src/main/scala/spark/util/Vector.scala
rename to core/src/main/scala/org/apache/spark/util/Vector.scala
index a47cac3b96830301c62f1fc0cdba19204bc3f4b7..fe710c58acc44b53a0b962033fe3f8ad6435572c 100644
--- a/core/src/main/scala/spark/util/Vector.scala
+++ b/core/src/main/scala/org/apache/spark/util/Vector.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 class Vector(val elements: Array[Double]) extends Serializable {
   def length = elements.length
@@ -130,7 +130,7 @@ object Vector {
 
   implicit def doubleToMultiplier(num: Double) = new Multiplier(num)
 
-  implicit object VectorAccumParam extends spark.AccumulatorParam[Vector] {
+  implicit object VectorAccumParam extends org.apache.spark.AccumulatorParam[Vector] {
     def addInPlace(t1: Vector, t2: Vector) = t1 + t2
 
     def zero(initialValue: Vector) = Vector.zeros(initialValue.length)
diff --git a/core/src/main/scala/spark/package.scala b/core/src/main/scala/spark/package.scala
deleted file mode 100644
index b244bfbf069b71affb990159baa44cbd8f2d06f3..0000000000000000000000000000000000000000
--- a/core/src/main/scala/spark/package.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Core Spark functionality. [[spark.SparkContext]] serves as the main entry point to Spark, while
- * [[spark.RDD]] is the data type representing a distributed collection, and provides most
- * parallel operations. 
- *
- * In addition, [[spark.PairRDDFunctions]] contains operations available only on RDDs of key-value
- * pairs, such as `groupByKey` and `join`; [[spark.DoubleRDDFunctions]] contains operations
- * available only on RDDs of Doubles; and [[spark.SequenceFileRDDFunctions]] contains operations
- * available on RDDs that can be saved as SequenceFiles. These operations are automatically
- * available on any RDD of the right type (e.g. RDD[(Int, Int)] through implicit conversions when
- * you `import spark.SparkContext._`.
- */
-package object spark { 
-  // For package docs only
-}
diff --git a/core/src/test/resources/test_metrics_config.properties b/core/src/test/resources/test_metrics_config.properties
index 2b31ddf2ebca651d7e2273c013e1e2dd9345b056..056a15845698c99acfdcc707cf8fe50ff3af51dd 100644
--- a/core/src/test/resources/test_metrics_config.properties
+++ b/core/src/test/resources/test_metrics_config.properties
@@ -1,6 +1,6 @@
 *.sink.console.period = 10
 *.sink.console.unit = seconds
-*.source.jvm.class = spark.metrics.source.JvmSource
+*.source.jvm.class = org.apache.spark.metrics.source.JvmSource
 master.sink.console.period = 20
 master.sink.console.unit = minutes
 
diff --git a/core/src/test/resources/test_metrics_system.properties b/core/src/test/resources/test_metrics_system.properties
index d5479f02980af9c96bc3202e109277a435ea69d4..6f5ecea93a395af5140a868befd56ff267b667a5 100644
--- a/core/src/test/resources/test_metrics_system.properties
+++ b/core/src/test/resources/test_metrics_system.properties
@@ -1,7 +1,7 @@
 *.sink.console.period = 10
 *.sink.console.unit = seconds
-test.sink.console.class = spark.metrics.sink.ConsoleSink
-test.sink.dummy.class = spark.metrics.sink.DummySink
-test.source.dummy.class = spark.metrics.source.DummySource
+test.sink.console.class = org.apache.spark.metrics.sink.ConsoleSink
+test.sink.dummy.class = org.apache.spark.metrics.sink.DummySink
+test.source.dummy.class = org.apache.spark.metrics.source.DummySource
 test.sink.console.period = 20
 test.sink.console.unit = minutes
diff --git a/core/src/test/scala/spark/AccumulatorSuite.scala b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
similarity index 98%
rename from core/src/test/scala/spark/AccumulatorSuite.scala
rename to core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
index 0af175f3168d5d7b7f7b159f45d10f9179719747..4434f3b87c920aa9b9142b3fa1cb77753d63b2da 100644
--- a/core/src/test/scala/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.scalatest.FunSuite
 import org.scalatest.matchers.ShouldMatchers
@@ -23,7 +23,7 @@ import collection.mutable
 import java.util.Random
 import scala.math.exp
 import scala.math.signum
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
 
 class AccumulatorSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
 
diff --git a/core/src/test/scala/spark/BroadcastSuite.scala b/core/src/test/scala/org/apache/spark/BroadcastSuite.scala
similarity index 98%
rename from core/src/test/scala/spark/BroadcastSuite.scala
rename to core/src/test/scala/org/apache/spark/BroadcastSuite.scala
index 785721ece86ac2b4b557a97fbb5b254075e7d0d4..b3a53d928b46e2b9dc6802c3eb78e70e4f8514c8 100644
--- a/core/src/test/scala/spark/BroadcastSuite.scala
+++ b/core/src/test/scala/org/apache/spark/BroadcastSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.scalatest.FunSuite
 
diff --git a/core/src/test/scala/spark/CheckpointSuite.scala b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
similarity index 99%
rename from core/src/test/scala/spark/CheckpointSuite.scala
rename to core/src/test/scala/org/apache/spark/CheckpointSuite.scala
index 966dede2be12a8ba626f49c69320e76057bb59d6..d9103aebb7f5405c2aee75b081b58b7bcfd69e19 100644
--- a/core/src/test/scala/spark/CheckpointSuite.scala
+++ b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
@@ -15,13 +15,14 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.scalatest.FunSuite
 import java.io.File
-import spark.rdd._
-import spark.SparkContext._
+import org.apache.spark.rdd._
+import org.apache.spark.SparkContext._
 import storage.StorageLevel
+import org.apache.spark.util.Utils
 
 class CheckpointSuite extends FunSuite with LocalSparkContext with Logging {
   initLogging()
diff --git a/core/src/test/scala/spark/DistributedSuite.scala b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
similarity index 99%
rename from core/src/test/scala/spark/DistributedSuite.scala
rename to core/src/test/scala/org/apache/spark/DistributedSuite.scala
index e11efe459c8adb74ffba3ee84c45f57529f44220..7a856d40817ee608151079a3224e4867c4285b70 100644
--- a/core/src/test/scala/spark/DistributedSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import network.ConnectionManagerId
 import org.scalatest.FunSuite
diff --git a/core/src/test/scala/spark/DriverSuite.scala b/core/src/test/scala/org/apache/spark/DriverSuite.scala
similarity index 92%
rename from core/src/test/scala/spark/DriverSuite.scala
rename to core/src/test/scala/org/apache/spark/DriverSuite.scala
index 553c0309f66fa5d4c5ca58202e3f4723d5fdd018..01a72d8401636f5f47f027312601a5fa642a6441 100644
--- a/core/src/test/scala/spark/DriverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DriverSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import java.io.File
 
@@ -26,6 +26,7 @@ import org.scalatest.FunSuite
 import org.scalatest.concurrent.Timeouts
 import org.scalatest.prop.TableDrivenPropertyChecks._
 import org.scalatest.time.SpanSugar._
+import org.apache.spark.util.Utils
 
 class DriverSuite extends FunSuite with Timeouts {
   test("driver should exit after finishing") {
@@ -34,7 +35,7 @@ class DriverSuite extends FunSuite with Timeouts {
     val masters = Table(("master"), ("local"), ("local-cluster[2,1,512]"))
     forAll(masters) { (master: String) =>
       failAfter(30 seconds) {
-        Utils.execute(Seq("./spark-class", "spark.DriverWithoutCleanup", master),
+        Utils.execute(Seq("./spark-class", "org.apache.spark.DriverWithoutCleanup", master),
           new File(System.getenv("SPARK_HOME")))
       }
     }
diff --git a/core/src/test/scala/spark/FailureSuite.scala b/core/src/test/scala/org/apache/spark/FailureSuite.scala
similarity index 98%
rename from core/src/test/scala/spark/FailureSuite.scala
rename to core/src/test/scala/org/apache/spark/FailureSuite.scala
index 5b133cdd6e867947bb324b068fe352cefd3c1178..af448fcb37a1f2676ea2a68e8b5f48f6c492e609 100644
--- a/core/src/test/scala/spark/FailureSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FailureSuite.scala
@@ -15,11 +15,12 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.scalatest.FunSuite
 
 import SparkContext._
+import org.apache.spark.util.NonSerializable
 
 // Common state shared by FailureSuite-launched tasks. We use a global object
 // for this because any local variables used in the task closures will rightfully
diff --git a/core/src/test/scala/spark/FileServerSuite.scala b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
similarity index 99%
rename from core/src/test/scala/spark/FileServerSuite.scala
rename to core/src/test/scala/org/apache/spark/FileServerSuite.scala
index 242ae971f859b5015252e16f6fb9109559c58c06..35d1d41af175b419f1fa18cbff60cdc5bf98eeff 100644
--- a/core/src/test/scala/spark/FileServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import com.google.common.io.Files
 import org.scalatest.FunSuite
diff --git a/core/src/test/scala/spark/FileSuite.scala b/core/src/test/scala/org/apache/spark/FileSuite.scala
similarity index 99%
rename from core/src/test/scala/spark/FileSuite.scala
rename to core/src/test/scala/org/apache/spark/FileSuite.scala
index 1e2c257c4b224e6c8bd89f0b894433b3b171d423..7b82a4cdd9cfd2b5f5b06239d5fc96d15bcade08 100644
--- a/core/src/test/scala/spark/FileSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import java.io.{FileWriter, PrintWriter, File}
 
diff --git a/core/src/test/scala/spark/JavaAPISuite.java b/core/src/test/scala/org/apache/spark/JavaAPISuite.java
similarity index 98%
rename from core/src/test/scala/spark/JavaAPISuite.java
rename to core/src/test/scala/org/apache/spark/JavaAPISuite.java
index c337c49268ad759deaee29bf52f2fdbadd727e93..8a869c9005fb61280adfec2550bc3f12253e12b4 100644
--- a/core/src/test/scala/spark/JavaAPISuite.java
+++ b/core/src/test/scala/org/apache/spark/JavaAPISuite.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark;
+package org.apache.spark;
 
 import java.io.File;
 import java.io.IOException;
@@ -38,15 +38,15 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import spark.api.java.JavaDoubleRDD;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.*;
-import spark.partial.BoundedDouble;
-import spark.partial.PartialResult;
-import spark.storage.StorageLevel;
-import spark.util.StatCounter;
+import org.apache.spark.api.java.JavaDoubleRDD;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.*;
+import org.apache.spark.partial.BoundedDouble;
+import org.apache.spark.partial.PartialResult;
+import org.apache.spark.storage.StorageLevel;
+import org.apache.spark.util.StatCounter;
 
 
 // The test suite itself is Serializable so that anonymous Function implementations can be
diff --git a/core/src/test/scala/spark/LocalSparkContext.scala b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
similarity index 98%
rename from core/src/test/scala/spark/LocalSparkContext.scala
rename to core/src/test/scala/org/apache/spark/LocalSparkContext.scala
index ddc212d290dcca421c27bbac0f19ce51e64ab7fa..6ec124da9c7b17a933717e0f195119fc558b9f5f 100644
--- a/core/src/test/scala/spark/LocalSparkContext.scala
+++ b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.scalatest.Suite
 import org.scalatest.BeforeAndAfterEach
diff --git a/core/src/test/scala/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
similarity index 97%
rename from core/src/test/scala/spark/MapOutputTrackerSuite.scala
rename to core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
index c21f3331d0876b769092e3454f496b96b7cda49a..6013320eaab730c202acce87de87714c99af65fb 100644
--- a/core/src/test/scala/spark/MapOutputTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.scalatest.FunSuite
 
 import akka.actor._
-import spark.scheduler.MapStatus
-import spark.storage.BlockManagerId
-import spark.util.AkkaUtils
+import org.apache.spark.scheduler.MapStatus
+import org.apache.spark.storage.BlockManagerId
+import org.apache.spark.util.AkkaUtils
 
 class MapOutputTrackerSuite extends FunSuite with LocalSparkContext {
 
diff --git a/core/src/test/scala/spark/PartitionPruningRDDSuite.scala b/core/src/test/scala/org/apache/spark/PartitionPruningRDDSuite.scala
similarity index 86%
rename from core/src/test/scala/spark/PartitionPruningRDDSuite.scala
rename to core/src/test/scala/org/apache/spark/PartitionPruningRDDSuite.scala
index 88352b639f9b1d953246bc438a82a0e364a7f1bf..5a18dd13ff57c322ef56a5ccf750ae0d2f217ba3 100644
--- a/core/src/test/scala/spark/PartitionPruningRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PartitionPruningRDDSuite.scala
@@ -1,8 +1,8 @@
-package spark
+package org.apache.spark
 
 import org.scalatest.FunSuite
-import spark.SparkContext._
-import spark.rdd.PartitionPruningRDD
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.{RDD, PartitionPruningRDD}
 
 
 class PartitionPruningRDDSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/spark/PartitioningSuite.scala b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
similarity index 97%
rename from core/src/test/scala/spark/PartitioningSuite.scala
rename to core/src/test/scala/org/apache/spark/PartitioningSuite.scala
index b1e0b2b4d061cb3af2265d6acc081f03ed8a80cb..7d938917f2650754f93ed019ace4d02a4cf6d349 100644
--- a/core/src/test/scala/spark/PartitioningSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
@@ -15,13 +15,16 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
-import org.scalatest.FunSuite
-import scala.collection.mutable.ArrayBuffer
-import SparkContext._
-import spark.util.StatCounter
 import scala.math.abs
+import scala.collection.mutable.ArrayBuffer
+
+import org.scalatest.FunSuite
+
+import org.apache.spark.SparkContext._
+import org.apache.spark.util.StatCounter
+import org.apache.spark.rdd.RDD
 
 class PartitioningSuite extends FunSuite with SharedSparkContext {
 
diff --git a/core/src/test/scala/spark/PipedRDDSuite.scala b/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala
similarity index 99%
rename from core/src/test/scala/spark/PipedRDDSuite.scala
rename to core/src/test/scala/org/apache/spark/PipedRDDSuite.scala
index 35c04710a39556aad7057dc507c24618fa8129b3..2e851d892dd0d5f4ec3628a2d945c172d3b16d0b 100644
--- a/core/src/test/scala/spark/PipedRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.scalatest.FunSuite
 import SparkContext._
diff --git a/core/src/test/scala/spark/SharedSparkContext.scala b/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
similarity index 98%
rename from core/src/test/scala/spark/SharedSparkContext.scala
rename to core/src/test/scala/org/apache/spark/SharedSparkContext.scala
index 70c24515be1571defaefc6c6605c9bad7b77ae02..97cbca09bfa2687939894b8cf5281b4cbbfa6c95 100644
--- a/core/src/test/scala/spark/SharedSparkContext.scala
+++ b/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.scalatest.Suite
 import org.scalatest.BeforeAndAfterAll
diff --git a/core/src/test/scala/spark/ShuffleNettySuite.scala b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
similarity index 98%
rename from core/src/test/scala/spark/ShuffleNettySuite.scala
rename to core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
index 6bad6c1d13deaffdfecea2c2e2179b95ad9c8926..e121b162ad9e6243c437a32473603e496e9efd9e 100644
--- a/core/src/test/scala/spark/ShuffleNettySuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.scalatest.BeforeAndAfterAll
 
diff --git a/core/src/test/scala/spark/ShuffleSuite.scala b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
similarity index 94%
rename from core/src/test/scala/spark/ShuffleSuite.scala
rename to core/src/test/scala/org/apache/spark/ShuffleSuite.scala
index 8745689c70bca55f8b11a6579cfdbc0ac9e00e2a..db717865db5d00403463da020dbc78d0188dd5fd 100644
--- a/core/src/test/scala/spark/ShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
@@ -15,15 +15,16 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.scalatest.FunSuite
 import org.scalatest.matchers.ShouldMatchers
 
-import spark.SparkContext._
-import spark.ShuffleSuite.NonJavaSerializableClass
-import spark.rdd.{SubtractedRDD, CoGroupedRDD, OrderedRDDFunctions, ShuffledRDD}
-import spark.util.MutablePair
+import org.apache.spark.SparkContext._
+import org.apache.spark.ShuffleSuite.NonJavaSerializableClass
+import org.apache.spark.rdd.{RDD, SubtractedRDD, CoGroupedRDD, OrderedRDDFunctions, ShuffledRDD}
+import org.apache.spark.util.MutablePair
+import org.apache.spark.serializer.KryoSerializer
 
 
 class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
@@ -54,7 +55,7 @@ class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
     // If the Kryo serializer is not used correctly, the shuffle would fail because the
     // default Java serializer cannot handle the non serializable class.
     val c = new ShuffledRDD[Int, NonJavaSerializableClass, (Int, NonJavaSerializableClass)](
-      b, new HashPartitioner(NUM_BLOCKS)).setSerializer(classOf[spark.KryoSerializer].getName)
+      b, new HashPartitioner(NUM_BLOCKS)).setSerializer(classOf[KryoSerializer].getName)
     val shuffleId = c.dependencies.head.asInstanceOf[ShuffleDependency[Int, Int]].shuffleId
 
     assert(c.count === 10)
@@ -76,7 +77,7 @@ class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
     // If the Kryo serializer is not used correctly, the shuffle would fail because the
     // default Java serializer cannot handle the non serializable class.
     val c = new ShuffledRDD[Int, NonJavaSerializableClass, (Int, NonJavaSerializableClass)](
-      b, new HashPartitioner(3)).setSerializer(classOf[spark.KryoSerializer].getName)
+      b, new HashPartitioner(3)).setSerializer(classOf[KryoSerializer].getName)
     assert(c.count === 10)
   }
 
@@ -92,7 +93,7 @@ class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
     // NOTE: The default Java serializer doesn't create zero-sized blocks.
     //       So, use Kryo
     val c = new ShuffledRDD[Int, Int, (Int, Int)](b, new HashPartitioner(10))
-      .setSerializer(classOf[spark.KryoSerializer].getName)
+      .setSerializer(classOf[KryoSerializer].getName)
 
     val shuffleId = c.dependencies.head.asInstanceOf[ShuffleDependency[Int, Int]].shuffleId
     assert(c.count === 4)
diff --git a/core/src/test/scala/spark/SparkContextInfoSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
similarity index 96%
rename from core/src/test/scala/spark/SparkContextInfoSuite.scala
rename to core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
index 6d50bf5e1b31b10718edcbc5f50394a501135fe7..939fe518012d0c17c9b319c2d94c08f9ffa7e7f6 100644
--- a/core/src/test/scala/spark/SparkContextInfoSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.scalatest.FunSuite
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
 
 class SparkContextInfoSuite extends FunSuite with LocalSparkContext {
   test("getPersistentRDDs only returns RDDs that are marked as cached") {
@@ -57,4 +57,4 @@ class SparkContextInfoSuite extends FunSuite with LocalSparkContext {
     rdd.collect()
     assert(sc.getRDDStorageInfo.size === 1)
   }
-}
\ No newline at end of file
+}
diff --git a/core/src/test/scala/spark/ThreadingSuite.scala b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
similarity index 99%
rename from core/src/test/scala/spark/ThreadingSuite.scala
rename to core/src/test/scala/org/apache/spark/ThreadingSuite.scala
index f2acd0bd3c1db30b2dd56de2b7b5ad9dea9b97f0..69383ddfb8bc7471653ad5d9234fa45f9bbb9737 100644
--- a/core/src/test/scala/spark/ThreadingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import java.util.concurrent.Semaphore
 import java.util.concurrent.atomic.AtomicBoolean
diff --git a/core/src/test/scala/spark/UnpersistSuite.scala b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
similarity index 96%
rename from core/src/test/scala/spark/UnpersistSuite.scala
rename to core/src/test/scala/org/apache/spark/UnpersistSuite.scala
index 93977d16f4ccd1361eda641e5d356846c07ea7f2..46a2da172407f009f4895e5d4d7efa362b0bf398 100644
--- a/core/src/test/scala/spark/UnpersistSuite.scala
+++ b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.scalatest.FunSuite
 import org.scalatest.concurrent.Timeouts._
 import org.scalatest.time.{Span, Millis}
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
 
 class UnpersistSuite extends FunSuite with LocalSparkContext {
   test("unpersist RDD") {
diff --git a/core/src/test/scala/spark/ZippedPartitionsSuite.scala b/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala
similarity index 98%
rename from core/src/test/scala/spark/ZippedPartitionsSuite.scala
rename to core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala
index bb5d379273b3db9ffa2f7aa0ae3d1f093daff7d3..618b9c113b849d765ee1fad04ec4ac415b6c8a96 100644
--- a/core/src/test/scala/spark/ZippedPartitionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import scala.collection.immutable.NumericRange
 
diff --git a/core/src/test/scala/spark/io/CompressionCodecSuite.scala b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
similarity index 98%
rename from core/src/test/scala/spark/io/CompressionCodecSuite.scala
rename to core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
index 1ba82fe2b92b0b53d27c4ab0a7d1cc9cf1b6b828..fd6f69041a94fc3cb0d2108869c62785bd12a397 100644
--- a/core/src/test/scala/spark/io/CompressionCodecSuite.scala
+++ b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.io
+package org.apache.spark.io
 
 import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
 
diff --git a/core/src/test/scala/spark/metrics/MetricsConfigSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
similarity index 82%
rename from core/src/test/scala/spark/metrics/MetricsConfigSuite.scala
rename to core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
index b0213b62d9c993168a4e3385d801134e239df4e7..58c94a162d4a4122fe0dcef9a0defedc773c5cda 100644
--- a/core/src/test/scala/spark/metrics/MetricsConfigSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.metrics
+package org.apache.spark.metrics
 
 import org.scalatest.{BeforeAndAfter, FunSuite}
 
@@ -35,7 +35,7 @@ class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
 
     val property = conf.getInstance("random")
     assert(property.size() === 3)
-    assert(property.getProperty("sink.servlet.class") === "spark.metrics.sink.MetricsServlet")
+    assert(property.getProperty("sink.servlet.class") === "org.apache.spark.metrics.sink.MetricsServlet")
     assert(property.getProperty("sink.servlet.uri") === "/metrics/json")
     assert(property.getProperty("sink.servlet.sample") === "false")
   }
@@ -48,8 +48,8 @@ class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
     assert(masterProp.size() === 6)
     assert(masterProp.getProperty("sink.console.period") === "20")
     assert(masterProp.getProperty("sink.console.unit") === "minutes")
-    assert(masterProp.getProperty("source.jvm.class") === "spark.metrics.source.JvmSource")
-    assert(masterProp.getProperty("sink.servlet.class") === "spark.metrics.sink.MetricsServlet")
+    assert(masterProp.getProperty("source.jvm.class") === "org.apache.spark.metrics.source.JvmSource")
+    assert(masterProp.getProperty("sink.servlet.class") === "org.apache.spark.metrics.sink.MetricsServlet")
     assert(masterProp.getProperty("sink.servlet.uri") === "/metrics/master/json")
     assert(masterProp.getProperty("sink.servlet.sample") === "false")
 
@@ -57,8 +57,8 @@ class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
     assert(workerProp.size() === 6)
     assert(workerProp.getProperty("sink.console.period") === "10")
     assert(workerProp.getProperty("sink.console.unit") === "seconds")
-    assert(workerProp.getProperty("source.jvm.class") === "spark.metrics.source.JvmSource")
-    assert(workerProp.getProperty("sink.servlet.class") === "spark.metrics.sink.MetricsServlet")
+    assert(workerProp.getProperty("source.jvm.class") === "org.apache.spark.metrics.source.JvmSource")
+    assert(workerProp.getProperty("sink.servlet.class") === "org.apache.spark.metrics.sink.MetricsServlet")
     assert(workerProp.getProperty("sink.servlet.uri") === "/metrics/json")
     assert(workerProp.getProperty("sink.servlet.sample") === "false")
   }
@@ -73,7 +73,7 @@ class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
     val masterProp = conf.getInstance("master")
     val sourceProps = conf.subProperties(masterProp, MetricsSystem.SOURCE_REGEX)
     assert(sourceProps.size === 1)
-    assert(sourceProps("jvm").getProperty("class") === "spark.metrics.source.JvmSource")
+    assert(sourceProps("jvm").getProperty("class") === "org.apache.spark.metrics.source.JvmSource")
 
     val sinkProps = conf.subProperties(masterProp, MetricsSystem.SINK_REGEX)
     assert(sinkProps.size === 2)
diff --git a/core/src/test/scala/spark/metrics/MetricsSystemSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
similarity index 93%
rename from core/src/test/scala/spark/metrics/MetricsSystemSuite.scala
rename to core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
index dc65ac6994b74e8d5e7290ee34860d6667512a07..7181333adfe689461d7ac93d131c8dd5dd4f4872 100644
--- a/core/src/test/scala/spark/metrics/MetricsSystemSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
@@ -15,9 +15,10 @@
  * limitations under the License.
  */
 
-package spark.metrics
+package org.apache.spark.metrics
 
 import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.apache.spark.deploy.master.MasterSource
 
 class MetricsSystemSuite extends FunSuite with BeforeAndAfter {
   var filePath: String = _
@@ -46,7 +47,7 @@ class MetricsSystemSuite extends FunSuite with BeforeAndAfter {
     assert(sinks.length === 1)
     assert(!metricsSystem.getServletHandlers.isEmpty)
 
-    val source = new spark.deploy.master.MasterSource(null)
+    val source = new MasterSource(null)
     metricsSystem.registerSource(source)
     assert(sources.length === 1)
   }
diff --git a/core/src/test/scala/spark/rdd/JdbcRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala
similarity index 95%
rename from core/src/test/scala/spark/rdd/JdbcRDDSuite.scala
rename to core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala
index dc8ca941c105a3c43e59888cff7b0ae98a64a300..3d39a31252e5e7a4cd5c09657a692d7b58dcc01c 100644
--- a/core/src/test/scala/spark/rdd/JdbcRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark
 
 import org.scalatest.{ BeforeAndAfter, FunSuite }
-import spark.SparkContext._
-import spark.rdd.JdbcRDD
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.JdbcRDD
 import java.sql._
 
 class JdbcRDDSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
diff --git a/core/src/test/scala/spark/PairRDDFunctionsSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
similarity index 98%
rename from core/src/test/scala/spark/PairRDDFunctionsSuite.scala
rename to core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
index 328b3b5497ea884f4c9039c672f24398b4ca2fd5..31f97fc1391e862a04a71aee36e0438aba4245bb 100644
--- a/core/src/test/scala/spark/PairRDDFunctionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.rdd
 
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.HashSet
@@ -23,7 +23,8 @@ import scala.collection.mutable.HashSet
 import org.scalatest.FunSuite
 
 import com.google.common.io.Files
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
+import org.apache.spark.{Partitioner, SharedSparkContext}
 
 
 class PairRDDFunctionsSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/spark/rdd/ParallelCollectionSplitSuite.scala b/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala
similarity index 99%
rename from core/src/test/scala/spark/rdd/ParallelCollectionSplitSuite.scala
rename to core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala
index d1276d541f7336fb530d7736bfccddf7ef417b2b..a80afdee7e76943b2e6a2591aa8d38fe86543337 100644
--- a/core/src/test/scala/spark/rdd/ParallelCollectionSplitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.rdd
+package org.apache.spark.rdd
 
 import scala.collection.immutable.NumericRange
 
diff --git a/core/src/test/scala/spark/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
similarity index 98%
rename from core/src/test/scala/spark/RDDSuite.scala
rename to core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
index e306952bbd63fdc53c54f38db74f278d2080b0ea..adc971050e120b7eb3e51153122a578809b1bee5 100644
--- a/core/src/test/scala/spark/RDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
@@ -15,15 +15,17 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.rdd
 
 import scala.collection.mutable.HashMap
 import org.scalatest.FunSuite
 import org.scalatest.concurrent.Timeouts._
 import org.scalatest.time.{Span, Millis}
-import spark.SparkContext._
-import spark.rdd._
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd._
 import scala.collection.parallel.mutable
+import org.apache.spark._
+import org.apache.spark.rdd.CoalescedRDDPartition
 
 class RDDSuite extends FunSuite with SharedSparkContext {
 
diff --git a/core/src/test/scala/spark/SortingSuite.scala b/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
similarity index 97%
rename from core/src/test/scala/spark/SortingSuite.scala
rename to core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
index b933c4aab835ce6a971c00b6564b99934c366bad..2f7bd370fc4ab1e4e601a044d71bfd90561beca0 100644
--- a/core/src/test/scala/spark/SortingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
@@ -15,12 +15,14 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.rdd
 
 import org.scalatest.FunSuite
 import org.scalatest.BeforeAndAfter
 import org.scalatest.matchers.ShouldMatchers
-import SparkContext._
+
+import org.apache.spark.{Logging, SharedSparkContext}
+import org.apache.spark.SparkContext._
 
 class SortingSuite extends FunSuite with SharedSparkContext with ShouldMatchers with Logging {
 
diff --git a/core/src/test/scala/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
similarity index 96%
rename from core/src/test/scala/spark/scheduler/DAGSchedulerSuite.scala
rename to core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
index 3b4a0d52fc79e98ba4bf0d8e28078dae53cf07d1..94f66c94c685a0bcbc057eb831adacfbc5f7c6f7 100644
--- a/core/src/test/scala/spark/scheduler/DAGSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
@@ -15,26 +15,26 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import scala.collection.mutable.{Map, HashMap}
 
 import org.scalatest.FunSuite
 import org.scalatest.BeforeAndAfter
 
-import spark.LocalSparkContext
-import spark.MapOutputTracker
-import spark.RDD
-import spark.SparkContext
-import spark.Partition
-import spark.TaskContext
-import spark.{Dependency, ShuffleDependency, OneToOneDependency}
-import spark.{FetchFailed, Success, TaskEndReason}
-import spark.storage.{BlockManagerId, BlockManagerMaster}
-
-import spark.scheduler.cluster.Pool
-import spark.scheduler.cluster.SchedulingMode
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark.LocalSparkContext
+import org.apache.spark.MapOutputTracker
+import org.apache.spark.rdd.RDD
+import org.apache.spark.SparkContext
+import org.apache.spark.Partition
+import org.apache.spark.TaskContext
+import org.apache.spark.{Dependency, ShuffleDependency, OneToOneDependency}
+import org.apache.spark.{FetchFailed, Success, TaskEndReason}
+import org.apache.spark.storage.{BlockManagerId, BlockManagerMaster}
+
+import org.apache.spark.scheduler.cluster.Pool
+import org.apache.spark.scheduler.cluster.SchedulingMode
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
 
 /**
  * Tests for DAGScheduler. These tests directly call the event processing functions in DAGScheduler
diff --git a/core/src/test/scala/spark/scheduler/JobLoggerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala
similarity index 97%
rename from core/src/test/scala/spark/scheduler/JobLoggerSuite.scala
rename to core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala
index bb9e715f95e217bd2c2b7c5f7a3bcbe90c782ae9..cece60dda726b30d49091cc7950e6d8fd39fa215 100644
--- a/core/src/test/scala/spark/scheduler/JobLoggerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala
@@ -15,15 +15,19 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import java.util.Properties
 import java.util.concurrent.LinkedBlockingQueue
+
+import scala.collection.mutable
+
 import org.scalatest.FunSuite
 import org.scalatest.matchers.ShouldMatchers
-import scala.collection.mutable
-import spark._
-import spark.SparkContext._
+
+import org.apache.spark._
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
 
 
 class JobLoggerSuite extends FunSuite with LocalSparkContext with ShouldMatchers {
diff --git a/core/src/test/scala/spark/scheduler/SparkListenerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
similarity index 96%
rename from core/src/test/scala/spark/scheduler/SparkListenerSuite.scala
rename to core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
index 392d67d67bfdf65baeb3b03f4dd4b1b3e98fc625..aac7c207cbe0c8a6c3f3be75649815c9f57d058a 100644
--- a/core/src/test/scala/spark/scheduler/SparkListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import org.scalatest.FunSuite
-import spark.{SparkContext, LocalSparkContext}
+import org.apache.spark.{SparkContext, LocalSparkContext}
 import scala.collection.mutable
 import org.scalatest.matchers.ShouldMatchers
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
 
 /**
  *
diff --git a/core/src/test/scala/spark/scheduler/TaskContextSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
similarity index 88%
rename from core/src/test/scala/spark/scheduler/TaskContextSuite.scala
rename to core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
index 95a6eee2fcf2137eea38a083c842814681351a6a..e31a116a75bf39450612d0a84ba39987a9cafd6a 100644
--- a/core/src/test/scala/spark/scheduler/TaskContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
@@ -15,15 +15,15 @@
  * limitations under the License.
  */
 
-package spark.scheduler
+package org.apache.spark.scheduler
 
 import org.scalatest.FunSuite
 import org.scalatest.BeforeAndAfter
-import spark.TaskContext
-import spark.RDD
-import spark.SparkContext
-import spark.Partition
-import spark.LocalSparkContext
+import org.apache.spark.TaskContext
+import org.apache.spark.rdd.RDD
+import org.apache.spark.SparkContext
+import org.apache.spark.Partition
+import org.apache.spark.LocalSparkContext
 
 class TaskContextSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
 
diff --git a/core/src/test/scala/spark/scheduler/cluster/ClusterSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterSchedulerSuite.scala
similarity index 98%
rename from core/src/test/scala/spark/scheduler/cluster/ClusterSchedulerSuite.scala
rename to core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterSchedulerSuite.scala
index abfdabf5fe634f5c135a7f1c04467120a4c8cc4c..92ad9f09b2d89b2d212d2944e1905d12b89f30bd 100644
--- a/core/src/test/scala/spark/scheduler/cluster/ClusterSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterSchedulerSuite.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
 import org.scalatest.FunSuite
 import org.scalatest.BeforeAndAfter
 
-import spark._
-import spark.scheduler._
-import spark.scheduler.cluster._
+import org.apache.spark._
+import org.apache.spark.scheduler._
+import org.apache.spark.scheduler.cluster._
 import scala.collection.mutable.ArrayBuffer
 
 import java.util.Properties
diff --git a/core/src/test/scala/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala
similarity index 98%
rename from core/src/test/scala/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala
rename to core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala
index 5a0b949ef5f65e87595b294d18df0e7e14e19ebd..ff70a2cdf07b2aaac5c274cc574c3d4d77d98d28 100644
--- a/core/src/test/scala/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala
@@ -15,18 +15,18 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable
 
 import org.scalatest.FunSuite
 
-import spark._
-import spark.scheduler._
-import spark.executor.TaskMetrics
+import org.apache.spark._
+import org.apache.spark.scheduler._
+import org.apache.spark.executor.TaskMetrics
 import java.nio.ByteBuffer
-import spark.util.FakeClock
+import org.apache.spark.util.{Utils, FakeClock}
 
 /**
  * A mock ClusterScheduler implementation that just remembers information about tasks started and
diff --git a/core/src/test/scala/spark/scheduler/cluster/FakeTask.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/FakeTask.scala
similarity index 91%
rename from core/src/test/scala/spark/scheduler/cluster/FakeTask.scala
rename to core/src/test/scala/org/apache/spark/scheduler/cluster/FakeTask.scala
index de9e66be20ae6116828bae17650dd0d0bb06f768..2f12aaed18c6c99f485363b3d60e330feb795114 100644
--- a/core/src/test/scala/spark/scheduler/cluster/FakeTask.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/FakeTask.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
-import spark.scheduler.{TaskLocation, Task}
+import org.apache.spark.scheduler.{TaskLocation, Task}
 
 class FakeTask(stageId: Int, prefLocs: Seq[TaskLocation] = Nil) extends Task[Int](stageId) {
   override def run(attemptId: Long): Int = 0
diff --git a/core/src/test/scala/spark/scheduler/local/LocalSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/local/LocalSchedulerSuite.scala
similarity index 98%
rename from core/src/test/scala/spark/scheduler/local/LocalSchedulerSuite.scala
rename to core/src/test/scala/org/apache/spark/scheduler/local/LocalSchedulerSuite.scala
index d28ee47fa335ef81e512b943f674c8a4f6ea5a8d..111340a65ce1a9ea65950fd7c059bc133e96417a 100644
--- a/core/src/test/scala/spark/scheduler/local/LocalSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/local/LocalSchedulerSuite.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.scheduler.local
+package org.apache.spark.scheduler.local
 
 import org.scalatest.FunSuite
 import org.scalatest.BeforeAndAfter
 
-import spark._
-import spark.scheduler._
-import spark.scheduler.cluster._
+import org.apache.spark._
+import org.apache.spark.scheduler._
+import org.apache.spark.scheduler.cluster._
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.{ConcurrentMap, HashMap}
 import java.util.concurrent.Semaphore
diff --git a/core/src/test/scala/spark/KryoSerializerSuite.scala b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
similarity index 96%
rename from core/src/test/scala/spark/KryoSerializerSuite.scala
rename to core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
index 7568a0bf6591cf4dd8ab5d42e0d32c119b3ef778..0164dda0ba5e3ec7feb78813528cfda171ba3ca7 100644
--- a/core/src/test/scala/spark/KryoSerializerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
@@ -15,14 +15,15 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.serializer
 
 import scala.collection.mutable
 
-import org.scalatest.FunSuite
-import com.esotericsoftware.kryo._
+import com.esotericsoftware.kryo.Kryo
 
-import KryoTest._
+import org.scalatest.FunSuite
+import org.apache.spark.SharedSparkContext
+import org.apache.spark.serializer.KryoTest._
 
 class KryoSerializerSuite extends FunSuite with SharedSparkContext {
   test("basic types") {
@@ -103,7 +104,6 @@ class KryoSerializerSuite extends FunSuite with SharedSparkContext {
   }
 
   test("custom registrator") {
-    import KryoTest._
     System.setProperty("spark.kryo.registrator", classOf[MyRegistrator].getName)
 
     val ser = (new KryoSerializer).newInstance()
@@ -167,7 +167,7 @@ class KryoSerializerSuite extends FunSuite with SharedSparkContext {
   }
 
   override def beforeAll() {
-    System.setProperty("spark.serializer", "spark.KryoSerializer")
+    System.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
     System.setProperty("spark.kryo.registrator", classOf[MyRegistrator].getName)
     super.beforeAll()
   }
diff --git a/core/src/test/scala/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
similarity index 97%
rename from core/src/test/scala/spark/storage/BlockManagerSuite.scala
rename to core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
index b719d65342de6c25ef2ded153f9294f8a2dc7913..038a9acb8593c07f45c0e660bf0b719e97a8b888 100644
--- a/core/src/test/scala/spark/storage/BlockManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.storage
+package org.apache.spark.storage
 
 import java.nio.ByteBuffer
 
@@ -29,11 +29,8 @@ import org.scalatest.concurrent.Timeouts._
 import org.scalatest.matchers.ShouldMatchers._
 import org.scalatest.time.SpanSugar._
 
-import spark.JavaSerializer
-import spark.KryoSerializer
-import spark.SizeEstimator
-import spark.util.AkkaUtils
-import spark.util.ByteBufferInputStream
+import org.apache.spark.util.{SizeEstimator, Utils, AkkaUtils, ByteBufferInputStream}
+import org.apache.spark.serializer.{JavaSerializer, KryoSerializer}
 
 
 class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodTester {
@@ -56,7 +53,7 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
     System.setProperty("spark.hostPort", "localhost:" + boundPort)
 
     master = new BlockManagerMaster(
-      actorSystem.actorOf(Props(new spark.storage.BlockManagerMasterActor(true))))
+      actorSystem.actorOf(Props(new BlockManagerMasterActor(true))))
 
     // Set the arch to 64-bit and compressedOops to true to get a deterministic test-case
     oldArch = System.setProperty("os.arch", "amd64")
@@ -65,7 +62,7 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
     val initialize = PrivateMethod[Unit]('initialize)
     SizeEstimator invokePrivate initialize()
     // Set some value ...
-    System.setProperty("spark.hostPort", spark.Utils.localHostName() + ":" + 1111)
+    System.setProperty("spark.hostPort", Utils.localHostName() + ":" + 1111)
   }
 
   after {
@@ -105,10 +102,10 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
     assert(level2 === level1, "level2 is not same as level1")
     assert(level2.eq(level1), "level2 is not the same object as level1")
     assert(level3 != level1, "level3 is same as level1")
-    val bytes1 = spark.Utils.serialize(level1)
-    val level1_ = spark.Utils.deserialize[StorageLevel](bytes1)
-    val bytes2 = spark.Utils.serialize(level2)
-    val level2_ = spark.Utils.deserialize[StorageLevel](bytes2)
+    val bytes1 = Utils.serialize(level1)
+    val level1_ = Utils.deserialize[StorageLevel](bytes1)
+    val bytes2 = Utils.serialize(level2)
+    val level2_ = Utils.deserialize[StorageLevel](bytes2)
     assert(level1_ === level1, "Deserialized level1 not same as original level1")
     assert(level1_.eq(level1), "Deserialized level1 not the same object as original level2")
     assert(level2_ === level2, "Deserialized level2 not same as original level2")
@@ -122,10 +119,10 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
     assert(id2 === id1, "id2 is not same as id1")
     assert(id2.eq(id1), "id2 is not the same object as id1")
     assert(id3 != id1, "id3 is same as id1")
-    val bytes1 = spark.Utils.serialize(id1)
-    val id1_ = spark.Utils.deserialize[BlockManagerId](bytes1)
-    val bytes2 = spark.Utils.serialize(id2)
-    val id2_ = spark.Utils.deserialize[BlockManagerId](bytes2)
+    val bytes1 = Utils.serialize(id1)
+    val id1_ = Utils.deserialize[BlockManagerId](bytes1)
+    val bytes2 = Utils.serialize(id2)
+    val id2_ = Utils.deserialize[BlockManagerId](bytes2)
     assert(id1_ === id1, "Deserialized id1 is not same as original id1")
     assert(id1_.eq(id1), "Deserialized id1 is not the same object as original id1")
     assert(id2_ === id2, "Deserialized id2 is not same as original id2")
diff --git a/core/src/test/scala/spark/ui/UISuite.scala b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
similarity index 98%
rename from core/src/test/scala/spark/ui/UISuite.scala
rename to core/src/test/scala/org/apache/spark/ui/UISuite.scala
index 735a794396fa247fe967b300d39ad3428e6e7556..3321fb5eb71b5315d207c6013324fc67d6f504ac 100644
--- a/core/src/test/scala/spark/ui/UISuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.ui
+package org.apache.spark.ui
 
 import scala.util.{Failure, Success, Try}
 import java.net.ServerSocket
diff --git a/core/src/test/scala/spark/ClosureCleanerSuite.scala b/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala
similarity index 97%
rename from core/src/test/scala/spark/ClosureCleanerSuite.scala
rename to core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala
index 7d2831e19c9e75c31063cc9533fd25f21ebf58c5..0ed366fb707b7d960e504a240eadc51cd654311a 100644
--- a/core/src/test/scala/spark/ClosureCleanerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala
@@ -15,13 +15,14 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.util
 
 import java.io.NotSerializableException
 
 import org.scalatest.FunSuite
-import spark.LocalSparkContext._
-import SparkContext._
+
+import org.apache.spark.SparkContext
+import org.apache.spark.LocalSparkContext._
 
 class ClosureCleanerSuite extends FunSuite {
   test("closures inside an object") {
diff --git a/core/src/test/scala/spark/util/DistributionSuite.scala b/core/src/test/scala/org/apache/spark/util/DistributionSuite.scala
similarity index 97%
rename from core/src/test/scala/spark/util/DistributionSuite.scala
rename to core/src/test/scala/org/apache/spark/util/DistributionSuite.scala
index 6578b55e8243ded61abba4a5bae94ca538cadaa8..63642461e4465a4d764120eeb5bdab2409295023 100644
--- a/core/src/test/scala/spark/util/DistributionSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/DistributionSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 import org.scalatest.FunSuite
 import org.scalatest.matchers.ShouldMatchers
diff --git a/core/src/test/scala/spark/util/FakeClock.scala b/core/src/test/scala/org/apache/spark/util/FakeClock.scala
similarity index 96%
rename from core/src/test/scala/spark/util/FakeClock.scala
rename to core/src/test/scala/org/apache/spark/util/FakeClock.scala
index 236706317e351c7b5fadbf90754473a82cc6e95d..0a45917b08dd250fcd3cafe3456592d5fb0fb152 100644
--- a/core/src/test/scala/spark/util/FakeClock.scala
+++ b/core/src/test/scala/org/apache/spark/util/FakeClock.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 class FakeClock extends Clock {
   private var time = 0L
diff --git a/core/src/test/scala/spark/util/NextIteratorSuite.scala b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
similarity index 98%
rename from core/src/test/scala/spark/util/NextIteratorSuite.scala
rename to core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
index fdbd43d941b1e645feb35f7ba70372c031a31643..45867463a5132a7a8a004372912b4b6896c6a0b9 100644
--- a/core/src/test/scala/spark/util/NextIteratorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 import org.scalatest.FunSuite
 import org.scalatest.matchers.ShouldMatchers
diff --git a/core/src/test/scala/spark/util/RateLimitedOutputStreamSuite.scala b/core/src/test/scala/org/apache/spark/util/RateLimitedOutputStreamSuite.scala
similarity index 97%
rename from core/src/test/scala/spark/util/RateLimitedOutputStreamSuite.scala
rename to core/src/test/scala/org/apache/spark/util/RateLimitedOutputStreamSuite.scala
index 4c0044202f033d801270f6d47b17e72698ac011a..a9dd0b1a5b61554b8bf8eb0d5f22bfc9156707fa 100644
--- a/core/src/test/scala/spark/util/RateLimitedOutputStreamSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/RateLimitedOutputStreamSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.util
+package org.apache.spark.util
 
 import org.scalatest.FunSuite
 import java.io.ByteArrayOutputStream
diff --git a/core/src/test/scala/spark/SizeEstimatorSuite.scala b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
similarity index 99%
rename from core/src/test/scala/spark/SizeEstimatorSuite.scala
rename to core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
index 1ef812dfbddae6aeb56f18b2ee02487c74ca744f..4e40dcbdeebe8ca764b620946d74de25050c9df7 100644
--- a/core/src/test/scala/spark/SizeEstimatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.util
 
 import org.scalatest.FunSuite
 import org.scalatest.BeforeAndAfterAll
diff --git a/core/src/test/scala/spark/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
similarity index 99%
rename from core/src/test/scala/spark/UtilsSuite.scala
rename to core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index 98a6c1a1c9e336189fdca53c27cb59b5b6cb52d6..e2859caf58d5a9fbfbce963cd496e3bf740ad87b 100644
--- a/core/src/test/scala/spark/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark
+package org.apache.spark.util
 
 import com.google.common.base.Charsets
 import com.google.common.io.Files
diff --git a/docs/README.md b/docs/README.md
index c2b3497bb374ecc8b811d895b59032a7ab68fcbb..dfcf7535538f06c99ec145d03a18a8a75fda9c8b 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -1,6 +1,6 @@
 Welcome to the Spark documentation!
 
-This readme will walk you through navigating and building the Spark documentation, which is included here with the Spark source code. You can also find documentation specific to release versions of Spark at http://spark-project.org/documentation.html.
+This readme will walk you through navigating and building the Spark documentation, which is included here with the Spark source code. You can also find documentation specific to release versions of Spark at http://spark.incubator.apache.org/documentation.html.
 
 Read on to learn more about viewing documentation in plain text (i.e., markdown) or building the documentation yourself. Why build it yourself? So that you have the docs that corresponds to whichever version of Spark you currently have checked out of revision control.
 
diff --git a/docs/bagel-programming-guide.md b/docs/bagel-programming-guide.md
index c526da3ca0bca75d0919f9655fb09e353073b285..583684913d69f0a8a2279b5e33c5ee3afaa6a949 100644
--- a/docs/bagel-programming-guide.md
+++ b/docs/bagel-programming-guide.md
@@ -9,16 +9,15 @@ In the Pregel programming model, jobs run as a sequence of iterations called _su
 
 This guide shows the programming model and features of Bagel by walking through an example implementation of PageRank on Bagel.
 
-## Linking with Bagel
+# Linking with Bagel
 
-To write a Bagel application, you will need to add Spark, its dependencies, and Bagel to your CLASSPATH:
+To use Bagel in your program, add the following SBT or Maven dependency:
 
-1. Run `sbt/sbt update` to fetch Spark's dependencies, if you haven't already done so.
-2. Run `sbt/sbt assembly` to build Spark and its dependencies into one JAR (`core/target/spark-core-assembly-{{site.SPARK_VERSION}}.jar`) 
-3. Run `sbt/sbt package` build the Bagel JAR (`bagel/target/scala_{{site.SCALA_VERSION}}/spark-bagel_{{site.SCALA_VERSION}}-{{site.SPARK_VERSION}}.jar`).
-4. Add these two JARs to your CLASSPATH.
+    groupId = org.apache.spark
+    artifactId = spark-bagel_{{site.SCALA_VERSION}}
+    version = {{site.SPARK_VERSION}}
 
-## Programming Model
+# Programming Model
 
 Bagel operates on a graph represented as a [distributed dataset](scala-programming-guide.html) of (K, V) pairs, where keys are vertex IDs and values are vertices plus their associated state. In each superstep, Bagel runs a user-specified compute function on each vertex that takes as input the current vertex state and a list of messages sent to that vertex during the previous superstep, and returns the new vertex state and a list of outgoing messages.
 
@@ -29,8 +28,8 @@ representing the current PageRank of the vertex, and similarly extend
 the `Message` and `Edge` classes. Note that these need to be marked `@serializable` to allow Spark to transfer them across machines. We also import the Bagel types and implicit conversions.
 
 {% highlight scala %}
-import spark.bagel._
-import spark.bagel.Bagel._
+import org.apache.spark.bagel._
+import org.apache.spark.bagel.Bagel._
 
 @serializable class PREdge(val targetId: String) extends Edge
 
@@ -89,7 +88,7 @@ Finally, we print the results.
 println(result.map(v => "%s\t%s\n".format(v.id, v.rank)).collect.mkString)
 {% endhighlight %}
 
-### Combiners
+## Combiners
 
 Sending a message to another vertex generally involves expensive communication over the network. For certain algorithms, it's possible to reduce the amount of communication using _combiners_. For example, if the compute function receives integer messages and only uses their sum, it's possible for Bagel to combine multiple messages to the same vertex by summing them.
 
@@ -97,7 +96,7 @@ For combiner support, Bagel can optionally take a set of combiner functions that
 
 _Example: PageRank with combiners_
 
-### Aggregators
+## Aggregators
 
 Aggregators perform a reduce across all vertices after each superstep, and provide the result to each vertex in the next superstep.
 
@@ -105,11 +104,11 @@ For aggregator support, Bagel can optionally take an aggregator function that re
 
 _Example_
 
-### Operations
+## Operations
 
-Here are the actions and types in the Bagel API. See [Bagel.scala](https://github.com/mesos/spark/blob/master/bagel/src/main/scala/spark/bagel/Bagel.scala) for details.
+Here are the actions and types in the Bagel API. See [Bagel.scala](https://github.com/apache/incubator-spark/blob/master/bagel/src/main/scala/spark/bagel/Bagel.scala) for details.
 
-#### Actions
+### Actions
 
 {% highlight scala %}
 /*** Full form ***/
@@ -133,7 +132,7 @@ Bagel.run(sc, vertices, messages, numSplits)(compute)
 // and returns (newVertex: V, outMessages: Array[M])
 {% endhighlight %}
 
-#### Types
+### Types
 
 {% highlight scala %}
 trait Combiner[M, C] {
@@ -156,6 +155,10 @@ trait Message[K] {
 }
 {% endhighlight %}
 
-## Where to Go from Here
+# Where to Go from Here
 
-Two example jobs, PageRank and shortest path, are included in `examples/src/main/scala/spark/examples/bagel`. You can run them by passing the class name to the `run-example` script included in Spark -- for example, `./run-example spark.examples.bagel.WikipediaPageRank`. Each example program prints usage help when run without any arguments.
+Two example jobs, PageRank and shortest path, are included in `examples/src/main/scala/org/apache/spark/examples/bagel`. You can run them by passing the class name to the `run-example` script included in Spark; e.g.:
+
+    ./run-example org.apache.spark.examples.bagel.WikipediaPageRank
+
+Each example program prints usage help when run without any arguments.
diff --git a/docs/configuration.md b/docs/configuration.md
index 1c0492efb36dbc836be76e4c2a253d7cfc7a9f4a..58e9434bdc086c783ef550d2959bec90eed51016 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -36,13 +36,13 @@ there are at least five properties that you will commonly want to control:
 </tr>
 <tr>
   <td>spark.serializer</td>
-  <td>spark.JavaSerializer</td>
+  <td>org.apache.spark.serializer.<br />JavaSerializer</td>
   <td>
     Class to use for serializing objects that will be sent over the network or need to be cached
     in serialized form. The default of Java serialization works with any Serializable Java object but is
-    quite slow, so we recommend <a href="tuning.html">using <code>spark.KryoSerializer</code>
+    quite slow, so we recommend <a href="tuning.html">using <code>org.apache.spark.serializer.KryoSerializer</code>
     and configuring Kryo serialization</a> when speed is necessary. Can be any subclass of
-    <a href="api/core/index.html#spark.Serializer"><code>spark.Serializer</code></a>.
+    <a href="api/core/index.html#org.apache.spark.serializer.Serializer"><code>org.apache.spark.Serializer</code></a>.
   </td>
 </tr>
 <tr>
@@ -50,8 +50,8 @@ there are at least five properties that you will commonly want to control:
   <td>(none)</td>
   <td>
     If you use Kryo serialization, set this class to register your custom classes with Kryo.
-    You need to set it to a class that extends
-    <a href="api/core/index.html#spark.KryoRegistrator"><code>spark.KryoRegistrator</code></a>.
+    It should be set to a class that extends
+    <a href="api/core/index.html#org.apache.spark.serializer.KryoRegistrator"><code>KryoRegistrator</code></a>.
     See the <a href="tuning.html#data-serialization">tuning guide</a> for more details.
   </td>
 </tr>
@@ -147,10 +147,10 @@ Apart from these, the following properties are also available, and may be useful
 </tr>
 <tr>
   <td>spark.io.compression.codec</td>
-  <td>spark.io.SnappyCompressionCodec</td>
+  <td>org.apache.spark.io.<br />SnappyCompressionCodec</td>
   <td>
     The compression codec class to use for various compressions. By default, Spark provides two
-    codecs: <code>spark.io.LZFCompressionCodec</code> and <code>spark.io.SnappyCompressionCodec</code>.
+    codecs: <code>org.apache.spark.io.LZFCompressionCodec</code> and <code>org.apache.spark.io.SnappyCompressionCodec</code>.
   </td>
 </tr>
 <tr>
@@ -171,7 +171,7 @@ Apart from these, the following properties are also available, and may be useful
 </tr>
 <tr>
   <td>spark.closure.serializer</td>
-  <td>spark.JavaSerializer</td>
+  <td>org.apache.spark.serializer.<br />JavaSerializer</td>
   <td>
     Serializer class to use for closures. Generally Java is fine unless your distributed functions
     (e.g. map functions) reference large objects in the driver program.
@@ -198,7 +198,7 @@ Apart from these, the following properties are also available, and may be useful
 </tr>
 <tr>
   <td>spark.broadcast.factory</td>
-  <td>spark.broadcast.HttpBroadcastFactory</td>
+  <td>org.apache.spark.broadcast.<br />HttpBroadcastFactory</td>
   <td>
     Which broadcast implementation to use.
   </td>
diff --git a/docs/css/bootstrap.min.css b/docs/css/bootstrap.min.css
index cf4c519cdc7e6222cacdbd772c68ace87ab1ace9..3444ed79e24d16e83c5abfcb3626faa9655cad81 100644
--- a/docs/css/bootstrap.min.css
+++ b/docs/css/bootstrap.min.css
@@ -6,4 +6,4 @@
  * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Designed and built with all the love in the world @twitter by @mdo and @fat.
- */article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}audio:not([controls]){display:none}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}a:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}a:hover,a:active{outline:0}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}img{height:auto;max-width:100%;vertical-align:middle;border:0;-ms-interpolation-mode:bicubic}#map_canvas img{max-width:none}button,input,select,textarea{margin:0;font-size:100%;vertical-align:middle}button,input{*overflow:visible;line-height:normal}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}button,input[type="button"],input[type="reset"],input[type="submit"]{cursor:pointer;-webkit-appearance:button}input[type="search"]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type="search"]::-webkit-search-decoration,input[type="search"]::-webkit-search-cancel-button{-webkit-appearance:none}textarea{overflow:auto;vertical-align:top}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;line-height:0;content:""}.clearfix:after{clear:both}.hide-text{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.input-block-level{display:block;width:100%;min-height:30px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}body{margin:0;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:20px;color:#333;background-color:#fff}a{color:#08c;text-decoration:none}a:hover{color:#005580;text-decoration:underline}.img-rounded{-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.img-polaroid{padding:4px;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);-webkit-box-shadow:0 1px 3px rgba(0,0,0,0.1);-moz-box-shadow:0 1px 3px rgba(0,0,0,0.1);box-shadow:0 1px 3px rgba(0,0,0,0.1)}.img-circle{-webkit-border-radius:500px;-moz-border-radius:500px;border-radius:500px}.row{margin-left:-20px;*zoom:1}.row:before,.row:after{display:table;line-height:0;content:""}.row:after{clear:both}[class*="span"]{float:left;margin-left:20px}.container,.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:940px}.span12{width:940px}.span11{width:860px}.span10{width:780px}.span9{width:700px}.span8{width:620px}.span7{width:540px}.span6{width:460px}.span5{width:380px}.span4{width:300px}.span3{width:220px}.span2{width:140px}.span1{width:60px}.offset12{margin-left:980px}.offset11{margin-left:900px}.offset10{margin-left:820px}.offset9{margin-left:740px}.offset8{margin-left:660px}.offset7{margin-left:580px}.offset6{margin-left:500px}.offset5{margin-left:420px}.offset4{margin-left:340px}.offset3{margin-left:260px}.offset2{margin-left:180px}.offset1{margin-left:100px}.row-fluid{width:100%;*zoom:1}.row-fluid:before,.row-fluid:after{display:table;line-height:0;content:""}.row-fluid:after{clear:both}.row-fluid [class*="span"]{display:block;float:left;width:100%;min-height:30px;margin-left:2.127659574468085%;*margin-left:2.074468085106383%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.row-fluid [class*="span"]:first-child{margin-left:0}.row-fluid .span12{width:100%;*width:99.94680851063829%}.row-fluid .span11{width:91.48936170212765%;*width:91.43617021276594%}.row-fluid .span10{width:82.97872340425532%;*width:82.92553191489361%}.row-fluid .span9{width:74.46808510638297%;*width:74.41489361702126%}.row-fluid .span8{width:65.95744680851064%;*width:65.90425531914893%}.row-fluid .span7{width:57.44680851063829%;*width:57.39361702127659%}.row-fluid .span6{width:48.93617021276595%;*width:48.88297872340425%}.row-fluid .span5{width:40.42553191489362%;*width:40.37234042553192%}.row-fluid .span4{width:31.914893617021278%;*width:31.861702127659576%}.row-fluid .span3{width:23.404255319148934%;*width:23.351063829787233%}.row-fluid .span2{width:14.893617021276595%;*width:14.840425531914894%}.row-fluid .span1{width:6.382978723404255%;*width:6.329787234042553%}.row-fluid .offset12{margin-left:104.25531914893617%;*margin-left:104.14893617021275%}.row-fluid .offset12:first-child{margin-left:102.12765957446808%;*margin-left:102.02127659574467%}.row-fluid .offset11{margin-left:95.74468085106382%;*margin-left:95.6382978723404%}.row-fluid .offset11:first-child{margin-left:93.61702127659574%;*margin-left:93.51063829787232%}.row-fluid .offset10{margin-left:87.23404255319149%;*margin-left:87.12765957446807%}.row-fluid .offset10:first-child{margin-left:85.1063829787234%;*margin-left:84.99999999999999%}.row-fluid .offset9{margin-left:78.72340425531914%;*margin-left:78.61702127659572%}.row-fluid .offset9:first-child{margin-left:76.59574468085106%;*margin-left:76.48936170212764%}.row-fluid .offset8{margin-left:70.2127659574468%;*margin-left:70.10638297872339%}.row-fluid .offset8:first-child{margin-left:68.08510638297872%;*margin-left:67.9787234042553%}.row-fluid .offset7{margin-left:61.70212765957446%;*margin-left:61.59574468085106%}.row-fluid .offset7:first-child{margin-left:59.574468085106375%;*margin-left:59.46808510638297%}.row-fluid .offset6{margin-left:53.191489361702125%;*margin-left:53.085106382978715%}.row-fluid .offset6:first-child{margin-left:51.063829787234035%;*margin-left:50.95744680851063%}.row-fluid .offset5{margin-left:44.68085106382979%;*margin-left:44.57446808510638%}.row-fluid .offset5:first-child{margin-left:42.5531914893617%;*margin-left:42.4468085106383%}.row-fluid .offset4{margin-left:36.170212765957444%;*margin-left:36.06382978723405%}.row-fluid .offset4:first-child{margin-left:34.04255319148936%;*margin-left:33.93617021276596%}.row-fluid .offset3{margin-left:27.659574468085104%;*margin-left:27.5531914893617%}.row-fluid .offset3:first-child{margin-left:25.53191489361702%;*margin-left:25.425531914893618%}.row-fluid .offset2{margin-left:19.148936170212764%;*margin-left:19.04255319148936%}.row-fluid .offset2:first-child{margin-left:17.02127659574468%;*margin-left:16.914893617021278%}.row-fluid .offset1{margin-left:10.638297872340425%;*margin-left:10.53191489361702%}.row-fluid .offset1:first-child{margin-left:8.51063829787234%;*margin-left:8.404255319148938%}[class*="span"].hide,.row-fluid [class*="span"].hide{display:none}[class*="span"].pull-right,.row-fluid [class*="span"].pull-right{float:right}.container{margin-right:auto;margin-left:auto;*zoom:1}.container:before,.container:after{display:table;line-height:0;content:""}.container:after{clear:both}.container-fluid{padding-right:20px;padding-left:20px;*zoom:1}.container-fluid:before,.container-fluid:after{display:table;line-height:0;content:""}.container-fluid:after{clear:both}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:20px;font-weight:200;line-height:30px}small{font-size:85%}strong{font-weight:bold}em{font-style:italic}cite{font-style:normal}.muted{color:#999}h1,h2,h3,h4,h5,h6{margin:10px 0;font-family:inherit;font-weight:bold;line-height:1;color:inherit;text-rendering:optimizelegibility}h1 small,h2 small,h3 small,h4 small,h5 small,h6 small{font-weight:normal;line-height:1;color:#999}h1{font-size:36px;line-height:40px}h2{font-size:30px;line-height:40px}h3{font-size:24px;line-height:40px}h4{font-size:18px;line-height:20px}h5{font-size:14px;line-height:20px}h6{font-size:12px;line-height:20px}h1 small{font-size:24px}h2 small{font-size:18px}h3 small{font-size:14px}h4 small{font-size:14px}.page-header{padding-bottom:9px;margin:20px 0 30px;border-bottom:1px solid #eee}ul,ol{padding:0;margin:0 0 10px 25px}ul ul,ul ol,ol ol,ol ul{margin-bottom:0}li{line-height:20px}ul.unstyled,ol.unstyled{margin-left:0;list-style:none}dl{margin-bottom:20px}dt,dd{line-height:20px}dt{font-weight:bold}dd{margin-left:10px}.dl-horizontal dt{float:left;width:120px;overflow:hidden;clear:left;text-align:right;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:130px}hr{margin:20px 0;border:0;border-top:1px solid #eee;border-bottom:1px solid #fff}abbr[title]{cursor:help;border-bottom:1px dotted #999}abbr.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:0 0 0 15px;margin:0 0 20px;border-left:5px solid #eee}blockquote p{margin-bottom:0;font-size:16px;font-weight:300;line-height:25px}blockquote small{display:block;line-height:20px;color:#999}blockquote small:before{content:'\2014 \00A0'}blockquote.pull-right{float:right;padding-right:15px;padding-left:0;border-right:5px solid #eee;border-left:0}blockquote.pull-right p,blockquote.pull-right small{text-align:right}blockquote.pull-right small:before{content:''}blockquote.pull-right small:after{content:'\00A0 \2014'}q:before,q:after,blockquote:before,blockquote:after{content:""}address{display:block;margin-bottom:20px;font-style:normal;line-height:20px}code,pre{padding:0 3px 2px;font-family:Monaco,Menlo,Consolas,"Courier New",monospace;font-size:12px;color:#333;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}code{padding:2px 4px;color:#d14;background-color:#f7f7f9;border:1px solid #e1e1e8}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:20px;word-break:break-all;word-wrap:break-word;white-space:pre;white-space:pre-wrap;background-color:#f5f5f5;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}pre.prettyprint{margin-bottom:20px}pre code{padding:0;color:inherit;background-color:transparent;border:0}.pre-scrollable{max-height:340px;overflow-y:scroll}form{margin:0 0 20px}fieldset{padding:0;margin:0;border:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:40px;color:#333;border:0;border-bottom:1px solid #e5e5e5}legend small{font-size:15px;color:#999}label,input,button,select,textarea{font-size:14px;font-weight:normal;line-height:20px}input,button,select,textarea{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif}label{display:block;margin-bottom:5px}select,textarea,input[type="text"],input[type="password"],input[type="datetime"],input[type="datetime-local"],input[type="date"],input[type="month"],input[type="time"],input[type="week"],input[type="number"],input[type="email"],input[type="url"],input[type="search"],input[type="tel"],input[type="color"],.uneditable-input{display:inline-block;height:20px;padding:4px 6px;margin-bottom:9px;font-size:14px;line-height:20px;color:#555;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}input,textarea{width:210px}textarea{height:auto}textarea,input[type="text"],input[type="password"],input[type="datetime"],input[type="datetime-local"],input[type="date"],input[type="month"],input[type="time"],input[type="week"],input[type="number"],input[type="email"],input[type="url"],input[type="search"],input[type="tel"],input[type="color"],.uneditable-input{background-color:#fff;border:1px solid #ccc;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-webkit-transition:border linear .2s,box-shadow linear .2s;-moz-transition:border linear .2s,box-shadow linear .2s;-o-transition:border linear .2s,box-shadow linear .2s;transition:border linear .2s,box-shadow linear .2s}textarea:focus,input[type="text"]:focus,input[type="password"]:focus,input[type="datetime"]:focus,input[type="datetime-local"]:focus,input[type="date"]:focus,input[type="month"]:focus,input[type="time"]:focus,input[type="week"]:focus,input[type="number"]:focus,input[type="email"]:focus,input[type="url"]:focus,input[type="search"]:focus,input[type="tel"]:focus,input[type="color"]:focus,.uneditable-input:focus{border-color:rgba(82,168,236,0.8);outline:0;outline:thin dotted \9;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6)}input[type="radio"],input[type="checkbox"]{margin:4px 0 0;margin-top:1px \9;*margin-top:0;line-height:normal;cursor:pointer}input[type="file"],input[type="image"],input[type="submit"],input[type="reset"],input[type="button"],input[type="radio"],input[type="checkbox"]{width:auto}select,input[type="file"]{height:30px;*margin-top:4px;line-height:30px}select{width:220px;background-color:#fff;border:1px solid #bbb}select[multiple],select[size]{height:auto}select:focus,input[type="file"]:focus,input[type="radio"]:focus,input[type="checkbox"]:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.uneditable-input,.uneditable-textarea{color:#999;cursor:not-allowed;background-color:#fcfcfc;border-color:#ccc;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.025);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.025);box-shadow:inset 0 1px 2px rgba(0,0,0,0.025)}.uneditable-input{overflow:hidden;white-space:nowrap}.uneditable-textarea{width:auto;height:auto}input:-moz-placeholder,textarea:-moz-placeholder{color:#999}input:-ms-input-placeholder,textarea:-ms-input-placeholder{color:#999}input::-webkit-input-placeholder,textarea::-webkit-input-placeholder{color:#999}.radio,.checkbox{min-height:18px;padding-left:18px}.radio input[type="radio"],.checkbox input[type="checkbox"]{float:left;margin-left:-18px}.controls>.radio:first-child,.controls>.checkbox:first-child{padding-top:5px}.radio.inline,.checkbox.inline{display:inline-block;padding-top:5px;margin-bottom:0;vertical-align:middle}.radio.inline+.radio.inline,.checkbox.inline+.checkbox.inline{margin-left:10px}.input-mini{width:60px}.input-small{width:90px}.input-medium{width:150px}.input-large{width:210px}.input-xlarge{width:270px}.input-xxlarge{width:530px}input[class*="span"],select[class*="span"],textarea[class*="span"],.uneditable-input[class*="span"],.row-fluid input[class*="span"],.row-fluid select[class*="span"],.row-fluid textarea[class*="span"],.row-fluid .uneditable-input[class*="span"]{float:none;margin-left:0}.input-append input[class*="span"],.input-append .uneditable-input[class*="span"],.input-prepend input[class*="span"],.input-prepend .uneditable-input[class*="span"],.row-fluid input[class*="span"],.row-fluid select[class*="span"],.row-fluid textarea[class*="span"],.row-fluid .uneditable-input[class*="span"],.row-fluid .input-prepend [class*="span"],.row-fluid .input-append [class*="span"]{display:inline-block}input,textarea,.uneditable-input{margin-left:0}.controls-row [class*="span"]+[class*="span"]{margin-left:20px}input.span12,textarea.span12,.uneditable-input.span12{width:926px}input.span11,textarea.span11,.uneditable-input.span11{width:846px}input.span10,textarea.span10,.uneditable-input.span10{width:766px}input.span9,textarea.span9,.uneditable-input.span9{width:686px}input.span8,textarea.span8,.uneditable-input.span8{width:606px}input.span7,textarea.span7,.uneditable-input.span7{width:526px}input.span6,textarea.span6,.uneditable-input.span6{width:446px}input.span5,textarea.span5,.uneditable-input.span5{width:366px}input.span4,textarea.span4,.uneditable-input.span4{width:286px}input.span3,textarea.span3,.uneditable-input.span3{width:206px}input.span2,textarea.span2,.uneditable-input.span2{width:126px}input.span1,textarea.span1,.uneditable-input.span1{width:46px}.controls-row{*zoom:1}.controls-row:before,.controls-row:after{display:table;line-height:0;content:""}.controls-row:after{clear:both}.controls-row [class*="span"]{float:left}input[disabled],select[disabled],textarea[disabled],input[readonly],select[readonly],textarea[readonly]{cursor:not-allowed;background-color:#eee}input[type="radio"][disabled],input[type="checkbox"][disabled],input[type="radio"][readonly],input[type="checkbox"][readonly]{background-color:transparent}.control-group.warning>label,.control-group.warning .help-block,.control-group.warning .help-inline{color:#c09853}.control-group.warning .checkbox,.control-group.warning .radio,.control-group.warning input,.control-group.warning select,.control-group.warning textarea{color:#c09853;border-color:#c09853;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.warning .checkbox:focus,.control-group.warning .radio:focus,.control-group.warning input:focus,.control-group.warning select:focus,.control-group.warning textarea:focus{border-color:#a47e3c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e}.control-group.warning .input-prepend .add-on,.control-group.warning .input-append .add-on{color:#c09853;background-color:#fcf8e3;border-color:#c09853}.control-group.error>label,.control-group.error .help-block,.control-group.error .help-inline{color:#b94a48}.control-group.error .checkbox,.control-group.error .radio,.control-group.error input,.control-group.error select,.control-group.error textarea{color:#b94a48;border-color:#b94a48;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.error .checkbox:focus,.control-group.error .radio:focus,.control-group.error input:focus,.control-group.error select:focus,.control-group.error textarea:focus{border-color:#953b39;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392}.control-group.error .input-prepend .add-on,.control-group.error .input-append .add-on{color:#b94a48;background-color:#f2dede;border-color:#b94a48}.control-group.success>label,.control-group.success .help-block,.control-group.success .help-inline{color:#468847}.control-group.success .checkbox,.control-group.success .radio,.control-group.success input,.control-group.success select,.control-group.success textarea{color:#468847;border-color:#468847;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.success .checkbox:focus,.control-group.success .radio:focus,.control-group.success input:focus,.control-group.success select:focus,.control-group.success textarea:focus{border-color:#356635;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b}.control-group.success .input-prepend .add-on,.control-group.success .input-append .add-on{color:#468847;background-color:#dff0d8;border-color:#468847}input:focus:required:invalid,textarea:focus:required:invalid,select:focus:required:invalid{color:#b94a48;border-color:#ee5f5b}input:focus:required:invalid:focus,textarea:focus:required:invalid:focus,select:focus:required:invalid:focus{border-color:#e9322d;-webkit-box-shadow:0 0 6px #f8b9b7;-moz-box-shadow:0 0 6px #f8b9b7;box-shadow:0 0 6px #f8b9b7}.form-actions{padding:19px 20px 20px;margin-top:20px;margin-bottom:20px;background-color:#f5f5f5;border-top:1px solid #e5e5e5;*zoom:1}.form-actions:before,.form-actions:after{display:table;line-height:0;content:""}.form-actions:after{clear:both}.help-block,.help-inline{color:#595959}.help-block{display:block;margin-bottom:10px}.help-inline{display:inline-block;*display:inline;padding-left:5px;vertical-align:middle;*zoom:1}.input-append,.input-prepend{margin-bottom:5px;font-size:0;white-space:nowrap}.input-append input,.input-prepend input,.input-append select,.input-prepend select,.input-append .uneditable-input,.input-prepend .uneditable-input{position:relative;margin-bottom:0;*margin-left:0;font-size:14px;vertical-align:top;-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0}.input-append input:focus,.input-prepend input:focus,.input-append select:focus,.input-prepend select:focus,.input-append .uneditable-input:focus,.input-prepend .uneditable-input:focus{z-index:2}.input-append .add-on,.input-prepend .add-on{display:inline-block;width:auto;height:20px;min-width:16px;padding:4px 5px;font-size:14px;font-weight:normal;line-height:20px;text-align:center;text-shadow:0 1px 0 #fff;background-color:#eee;border:1px solid #ccc}.input-append .add-on,.input-prepend .add-on,.input-append .btn,.input-prepend .btn{margin-left:-1px;vertical-align:top;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.input-append .active,.input-prepend .active{background-color:#a9dba9;border-color:#46a546}.input-prepend .add-on,.input-prepend .btn{margin-right:-1px}.input-prepend .add-on:first-child,.input-prepend .btn:first-child{-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px}.input-append input,.input-append select,.input-append .uneditable-input{-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px}.input-append .add-on:last-child,.input-append .btn:last-child{-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0}.input-prepend.input-append input,.input-prepend.input-append select,.input-prepend.input-append .uneditable-input{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.input-prepend.input-append .add-on:first-child,.input-prepend.input-append .btn:first-child{margin-right:-1px;-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px}.input-prepend.input-append .add-on:last-child,.input-prepend.input-append .btn:last-child{margin-left:-1px;-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0}input.search-query{padding-right:14px;padding-right:4px \9;padding-left:14px;padding-left:4px \9;margin-bottom:0;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.form-search .input-append .search-query,.form-search .input-prepend .search-query{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.form-search .input-append .search-query{-webkit-border-radius:14px 0 0 14px;-moz-border-radius:14px 0 0 14px;border-radius:14px 0 0 14px}.form-search .input-append .btn{-webkit-border-radius:0 14px 14px 0;-moz-border-radius:0 14px 14px 0;border-radius:0 14px 14px 0}.form-search .input-prepend .search-query{-webkit-border-radius:0 14px 14px 0;-moz-border-radius:0 14px 14px 0;border-radius:0 14px 14px 0}.form-search .input-prepend .btn{-webkit-border-radius:14px 0 0 14px;-moz-border-radius:14px 0 0 14px;border-radius:14px 0 0 14px}.form-search input,.form-inline input,.form-horizontal input,.form-search textarea,.form-inline textarea,.form-horizontal textarea,.form-search select,.form-inline select,.form-horizontal select,.form-search .help-inline,.form-inline .help-inline,.form-horizontal .help-inline,.form-search .uneditable-input,.form-inline .uneditable-input,.form-horizontal .uneditable-input,.form-search .input-prepend,.form-inline .input-prepend,.form-horizontal .input-prepend,.form-search .input-append,.form-inline .input-append,.form-horizontal .input-append{display:inline-block;*display:inline;margin-bottom:0;vertical-align:middle;*zoom:1}.form-search .hide,.form-inline .hide,.form-horizontal .hide{display:none}.form-search label,.form-inline label,.form-search .btn-group,.form-inline .btn-group{display:inline-block}.form-search .input-append,.form-inline .input-append,.form-search .input-prepend,.form-inline .input-prepend{margin-bottom:0}.form-search .radio,.form-search .checkbox,.form-inline .radio,.form-inline .checkbox{padding-left:0;margin-bottom:0;vertical-align:middle}.form-search .radio input[type="radio"],.form-search .checkbox input[type="checkbox"],.form-inline .radio input[type="radio"],.form-inline .checkbox input[type="checkbox"]{float:left;margin-right:3px;margin-left:0}.control-group{margin-bottom:10px}legend+.control-group{margin-top:20px;-webkit-margin-top-collapse:separate}.form-horizontal .control-group{margin-bottom:20px;*zoom:1}.form-horizontal .control-group:before,.form-horizontal .control-group:after{display:table;line-height:0;content:""}.form-horizontal .control-group:after{clear:both}.form-horizontal .control-label{float:left;width:140px;padding-top:5px;text-align:right}.form-horizontal .controls{*display:inline-block;*padding-left:20px;margin-left:160px;*margin-left:0}.form-horizontal .controls:first-child{*padding-left:160px}.form-horizontal .help-block{margin-top:10px;margin-bottom:0}.form-horizontal .form-actions{padding-left:160px}table{max-width:100%;background-color:transparent;border-collapse:collapse;border-spacing:0}.table{width:100%;margin-bottom:20px}.table th,.table td{padding:8px;line-height:20px;text-align:left;vertical-align:top;border-top:1px solid #ddd}.table th{font-weight:bold}.table thead th{vertical-align:bottom}.table caption+thead tr:first-child th,.table caption+thead tr:first-child td,.table colgroup+thead tr:first-child th,.table colgroup+thead tr:first-child td,.table thead:first-child tr:first-child th,.table thead:first-child tr:first-child td{border-top:0}.table tbody+tbody{border-top:2px solid #ddd}.table-condensed th,.table-condensed td{padding:4px 5px}.table-bordered{border:1px solid #ddd;border-collapse:separate;*border-collapse:collapse;border-left:0;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.table-bordered th,.table-bordered td{border-left:1px solid #ddd}.table-bordered caption+thead tr:first-child th,.table-bordered caption+tbody tr:first-child th,.table-bordered caption+tbody tr:first-child td,.table-bordered colgroup+thead tr:first-child th,.table-bordered colgroup+tbody tr:first-child th,.table-bordered colgroup+tbody tr:first-child td,.table-bordered thead:first-child tr:first-child th,.table-bordered tbody:first-child tr:first-child th,.table-bordered tbody:first-child tr:first-child td{border-top:0}.table-bordered thead:first-child tr:first-child th:first-child,.table-bordered tbody:first-child tr:first-child td:first-child{-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topleft:4px}.table-bordered thead:first-child tr:first-child th:last-child,.table-bordered tbody:first-child tr:first-child td:last-child{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-moz-border-radius-topright:4px}.table-bordered thead:last-child tr:last-child th:first-child,.table-bordered tbody:last-child tr:last-child td:first-child,.table-bordered tfoot:last-child tr:last-child td:first-child{-webkit-border-radius:0 0 0 4px;-moz-border-radius:0 0 0 4px;border-radius:0 0 0 4px;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomleft:4px}.table-bordered thead:last-child tr:last-child th:last-child,.table-bordered tbody:last-child tr:last-child td:last-child,.table-bordered tfoot:last-child tr:last-child td:last-child{-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-moz-border-radius-bottomright:4px}.table-bordered caption+thead tr:first-child th:first-child,.table-bordered caption+tbody tr:first-child td:first-child,.table-bordered colgroup+thead tr:first-child th:first-child,.table-bordered colgroup+tbody tr:first-child td:first-child{-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topleft:4px}.table-bordered caption+thead tr:first-child th:last-child,.table-bordered caption+tbody tr:first-child td:last-child,.table-bordered colgroup+thead tr:first-child th:last-child,.table-bordered colgroup+tbody tr:first-child td:last-child{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-moz-border-right-topleft:4px}.table-striped tbody tr:nth-child(odd) td,.table-striped tbody tr:nth-child(odd) th{background-color:#f9f9f9}.table-hover tbody tr:hover td,.table-hover tbody tr:hover th{background-color:#f5f5f5}table [class*=span],.row-fluid table [class*=span]{display:table-cell;float:none;margin-left:0}table .span1{float:none;width:44px;margin-left:0}table .span2{float:none;width:124px;margin-left:0}table .span3{float:none;width:204px;margin-left:0}table .span4{float:none;width:284px;margin-left:0}table .span5{float:none;width:364px;margin-left:0}table .span6{float:none;width:444px;margin-left:0}table .span7{float:none;width:524px;margin-left:0}table .span8{float:none;width:604px;margin-left:0}table .span9{float:none;width:684px;margin-left:0}table .span10{float:none;width:764px;margin-left:0}table .span11{float:none;width:844px;margin-left:0}table .span12{float:none;width:924px;margin-left:0}table .span13{float:none;width:1004px;margin-left:0}table .span14{float:none;width:1084px;margin-left:0}table .span15{float:none;width:1164px;margin-left:0}table .span16{float:none;width:1244px;margin-left:0}table .span17{float:none;width:1324px;margin-left:0}table .span18{float:none;width:1404px;margin-left:0}table .span19{float:none;width:1484px;margin-left:0}table .span20{float:none;width:1564px;margin-left:0}table .span21{float:none;width:1644px;margin-left:0}table .span22{float:none;width:1724px;margin-left:0}table .span23{float:none;width:1804px;margin-left:0}table .span24{float:none;width:1884px;margin-left:0}.table tbody tr.success td{background-color:#dff0d8}.table tbody tr.error td{background-color:#f2dede}.table tbody tr.info td{background-color:#d9edf7}[class^="icon-"],[class*=" icon-"]{display:inline-block;width:14px;height:14px;margin-top:1px;*margin-right:.3em;line-height:14px;vertical-align:text-top;background-image:url("../img/glyphicons-halflings.png");background-position:14px 14px;background-repeat:no-repeat}.icon-white,.nav>.active>a>[class^="icon-"],.nav>.active>a>[class*=" icon-"],.dropdown-menu>li>a:hover>[class^="icon-"],.dropdown-menu>li>a:hover>[class*=" icon-"],.dropdown-menu>.active>a>[class^="icon-"],.dropdown-menu>.active>a>[class*=" icon-"]{background-image:url("../img/glyphicons-halflings-white.png")}.icon-glass{background-position:0 0}.icon-music{background-position:-24px 0}.icon-search{background-position:-48px 0}.icon-envelope{background-position:-72px 0}.icon-heart{background-position:-96px 0}.icon-star{background-position:-120px 0}.icon-star-empty{background-position:-144px 0}.icon-user{background-position:-168px 0}.icon-film{background-position:-192px 0}.icon-th-large{background-position:-216px 0}.icon-th{background-position:-240px 0}.icon-th-list{background-position:-264px 0}.icon-ok{background-position:-288px 0}.icon-remove{background-position:-312px 0}.icon-zoom-in{background-position:-336px 0}.icon-zoom-out{background-position:-360px 0}.icon-off{background-position:-384px 0}.icon-signal{background-position:-408px 0}.icon-cog{background-position:-432px 0}.icon-trash{background-position:-456px 0}.icon-home{background-position:0 -24px}.icon-file{background-position:-24px -24px}.icon-time{background-position:-48px -24px}.icon-road{background-position:-72px -24px}.icon-download-alt{background-position:-96px -24px}.icon-download{background-position:-120px -24px}.icon-upload{background-position:-144px -24px}.icon-inbox{background-position:-168px -24px}.icon-play-circle{background-position:-192px -24px}.icon-repeat{background-position:-216px -24px}.icon-refresh{background-position:-240px -24px}.icon-list-alt{background-position:-264px -24px}.icon-lock{background-position:-287px -24px}.icon-flag{background-position:-312px -24px}.icon-headphones{background-position:-336px -24px}.icon-volume-off{background-position:-360px -24px}.icon-volume-down{background-position:-384px -24px}.icon-volume-up{background-position:-408px -24px}.icon-qrcode{background-position:-432px -24px}.icon-barcode{background-position:-456px -24px}.icon-tag{background-position:0 -48px}.icon-tags{background-position:-25px -48px}.icon-book{background-position:-48px -48px}.icon-bookmark{background-position:-72px -48px}.icon-print{background-position:-96px -48px}.icon-camera{background-position:-120px -48px}.icon-font{background-position:-144px -48px}.icon-bold{background-position:-167px -48px}.icon-italic{background-position:-192px -48px}.icon-text-height{background-position:-216px -48px}.icon-text-width{background-position:-240px -48px}.icon-align-left{background-position:-264px -48px}.icon-align-center{background-position:-288px -48px}.icon-align-right{background-position:-312px -48px}.icon-align-justify{background-position:-336px -48px}.icon-list{background-position:-360px -48px}.icon-indent-left{background-position:-384px -48px}.icon-indent-right{background-position:-408px -48px}.icon-facetime-video{background-position:-432px -48px}.icon-picture{background-position:-456px -48px}.icon-pencil{background-position:0 -72px}.icon-map-marker{background-position:-24px -72px}.icon-adjust{background-position:-48px -72px}.icon-tint{background-position:-72px -72px}.icon-edit{background-position:-96px -72px}.icon-share{background-position:-120px -72px}.icon-check{background-position:-144px -72px}.icon-move{background-position:-168px -72px}.icon-step-backward{background-position:-192px -72px}.icon-fast-backward{background-position:-216px -72px}.icon-backward{background-position:-240px -72px}.icon-play{background-position:-264px -72px}.icon-pause{background-position:-288px -72px}.icon-stop{background-position:-312px -72px}.icon-forward{background-position:-336px -72px}.icon-fast-forward{background-position:-360px -72px}.icon-step-forward{background-position:-384px -72px}.icon-eject{background-position:-408px -72px}.icon-chevron-left{background-position:-432px -72px}.icon-chevron-right{background-position:-456px -72px}.icon-plus-sign{background-position:0 -96px}.icon-minus-sign{background-position:-24px -96px}.icon-remove-sign{background-position:-48px -96px}.icon-ok-sign{background-position:-72px -96px}.icon-question-sign{background-position:-96px -96px}.icon-info-sign{background-position:-120px -96px}.icon-screenshot{background-position:-144px -96px}.icon-remove-circle{background-position:-168px -96px}.icon-ok-circle{background-position:-192px -96px}.icon-ban-circle{background-position:-216px -96px}.icon-arrow-left{background-position:-240px -96px}.icon-arrow-right{background-position:-264px -96px}.icon-arrow-up{background-position:-289px -96px}.icon-arrow-down{background-position:-312px -96px}.icon-share-alt{background-position:-336px -96px}.icon-resize-full{background-position:-360px -96px}.icon-resize-small{background-position:-384px -96px}.icon-plus{background-position:-408px -96px}.icon-minus{background-position:-433px -96px}.icon-asterisk{background-position:-456px -96px}.icon-exclamation-sign{background-position:0 -120px}.icon-gift{background-position:-24px -120px}.icon-leaf{background-position:-48px -120px}.icon-fire{background-position:-72px -120px}.icon-eye-open{background-position:-96px -120px}.icon-eye-close{background-position:-120px -120px}.icon-warning-sign{background-position:-144px -120px}.icon-plane{background-position:-168px -120px}.icon-calendar{background-position:-192px -120px}.icon-random{width:16px;background-position:-216px -120px}.icon-comment{background-position:-240px -120px}.icon-magnet{background-position:-264px -120px}.icon-chevron-up{background-position:-288px -120px}.icon-chevron-down{background-position:-313px -119px}.icon-retweet{background-position:-336px -120px}.icon-shopping-cart{background-position:-360px -120px}.icon-folder-close{background-position:-384px -120px}.icon-folder-open{width:16px;background-position:-408px -120px}.icon-resize-vertical{background-position:-432px -119px}.icon-resize-horizontal{background-position:-456px -118px}.icon-hdd{background-position:0 -144px}.icon-bullhorn{background-position:-24px -144px}.icon-bell{background-position:-48px -144px}.icon-certificate{background-position:-72px -144px}.icon-thumbs-up{background-position:-96px -144px}.icon-thumbs-down{background-position:-120px -144px}.icon-hand-right{background-position:-144px -144px}.icon-hand-left{background-position:-168px -144px}.icon-hand-up{background-position:-192px -144px}.icon-hand-down{background-position:-216px -144px}.icon-circle-arrow-right{background-position:-240px -144px}.icon-circle-arrow-left{background-position:-264px -144px}.icon-circle-arrow-up{background-position:-288px -144px}.icon-circle-arrow-down{background-position:-312px -144px}.icon-globe{background-position:-336px -144px}.icon-wrench{background-position:-360px -144px}.icon-tasks{background-position:-384px -144px}.icon-filter{background-position:-408px -144px}.icon-briefcase{background-position:-432px -144px}.icon-fullscreen{background-position:-456px -144px}.dropup,.dropdown{position:relative}.dropdown-toggle{*margin-bottom:-3px}.dropdown-toggle:active,.open .dropdown-toggle{outline:0}.caret{display:inline-block;width:0;height:0;vertical-align:top;border-top:4px solid #000;border-right:4px solid transparent;border-left:4px solid transparent;content:""}.dropdown .caret{margin-top:8px;margin-left:2px}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;list-style:none;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);*border-right-width:2px;*border-bottom-width:2px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);-moz-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2);-webkit-background-clip:padding-box;-moz-background-clip:padding;background-clip:padding-box}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{*width:100%;height:1px;margin:9px 1px;*margin:-5px 0 5px;overflow:hidden;background-color:#e5e5e5;border-bottom:1px solid #fff}.dropdown-menu a{display:block;padding:3px 20px;clear:both;font-weight:normal;line-height:20px;color:#333;white-space:nowrap}.dropdown-menu li>a:hover,.dropdown-menu li>a:focus,.dropdown-submenu:hover>a{color:#fff;text-decoration:none;background-color:#0088cc;background-color:#0088cc;background-image:-moz-linear-gradient(top,#0088cc,#0087b3);background-image:-webkit-gradient(linear,0 0,0 100%,from(#0088cc),to(#0087b3));background-image:-webkit-linear-gradient(top,#0088cc,#0087b3);background-image:-o-linear-gradient(top,#0088cc,#0087b3);background-image:linear-gradient(to bottom,#0088cc,#0087b3);background-repeat:repeat-x;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0087b3',GradientType=0)}.dropdown-menu .active>a,.dropdown-menu .active>a:hover{color:#fff;text-decoration:none;background-color:#0088cc;background-color:#0081c2;background-image:linear-gradient(to bottom,#0088cc,#0087b3);background-image:-moz-linear-gradient(top,#0088cc,#0087b3);background-image:-webkit-gradient(linear,0 0,0 100%,from(#0088cc),to(#0087b3));background-image:-webkit-linear-gradient(top,#0088cc,#0087b3);background-image:-o-linear-gradient(top,#0088cc,#0087b3);background-repeat:repeat-x;outline:0;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0087b3',GradientType=0)}.dropdown-menu .disabled>a,.dropdown-menu .disabled>a:hover{color:#999}.dropdown-menu .disabled>a:hover{text-decoration:none;cursor:default;background-color:transparent}.open{*z-index:1000}.open>.dropdown-menu{display:block}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{border-top:0;border-bottom:4px solid #000;content:"\2191"}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:1px}.dropdown-submenu{position:relative}.dropdown-submenu>.dropdown-menu{top:0;left:100%;margin-top:-6px;margin-left:-1px;-webkit-border-radius:0 6px 6px 6px;-moz-border-radius:0 6px 6px 6px;border-radius:0 6px 6px 6px}.dropdown-submenu:hover .dropdown-menu{display:block}.dropdown-submenu>a:after{display:block;float:right;width:0;height:0;margin-top:5px;margin-right:-10px;border-color:transparent;border-left-color:#ccc;border-style:solid;border-width:5px 0 5px 5px;content:" "}.dropdown-submenu:hover>a:after{border-left-color:#fff}.dropdown .dropdown-menu .nav-header{padding-right:20px;padding-left:20px}.typeahead{margin-top:2px;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.05);box-shadow:inset 0 1px 1px rgba(0,0,0,0.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,0.15)}.well-large{padding:24px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.well-small{padding:9px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.fade{opacity:0;-webkit-transition:opacity .15s linear;-moz-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{position:relative;height:0;overflow:hidden;overflow:visible \9;-webkit-transition:height .35s ease;-moz-transition:height .35s ease;-o-transition:height .35s ease;transition:height .35s ease}.collapse.in{height:auto}.close{float:right;font-size:20px;font-weight:bold;line-height:20px;color:#000;text-shadow:0 1px 0 #fff;opacity:.2;filter:alpha(opacity=20)}.close:hover{color:#000;text-decoration:none;cursor:pointer;opacity:.4;filter:alpha(opacity=40)}button.close{padding:0;cursor:pointer;background:transparent;border:0;-webkit-appearance:none}.btn{display:inline-block;*display:inline;padding:4px 14px;margin-bottom:0;*margin-left:.3em;font-size:14px;line-height:20px;*line-height:20px;color:#333;text-align:center;text-shadow:0 1px 1px rgba(255,255,255,0.75);vertical-align:middle;cursor:pointer;background-color:#f5f5f5;*background-color:#e6e6e6;background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#e6e6e6));background-image:-webkit-linear-gradient(top,#fff,#e6e6e6);background-image:-o-linear-gradient(top,#fff,#e6e6e6);background-image:linear-gradient(to bottom,#fff,#e6e6e6);background-image:-moz-linear-gradient(top,#fff,#e6e6e6);background-repeat:repeat-x;border:1px solid #bbb;*border:0;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);border-color:#e6e6e6 #e6e6e6 #bfbfbf;border-bottom-color:#a2a2a2;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ffffffff',endColorstr='#ffe6e6e6',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false);*zoom:1;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05)}.btn:hover,.btn:active,.btn.active,.btn.disabled,.btn[disabled]{color:#333;background-color:#e6e6e6;*background-color:#d9d9d9}.btn:active,.btn.active{background-color:#ccc \9}.btn:first-child{*margin-left:0}.btn:hover{color:#333;text-decoration:none;background-color:#e6e6e6;*background-color:#d9d9d9;background-position:0 -15px;-webkit-transition:background-position .1s linear;-moz-transition:background-position .1s linear;-o-transition:background-position .1s linear;transition:background-position .1s linear}.btn:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn.active,.btn:active{background-color:#e6e6e6;background-color:#d9d9d9 \9;background-image:none;outline:0;-webkit-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05)}.btn.disabled,.btn[disabled]{cursor:default;background-color:#e6e6e6;background-image:none;opacity:.65;filter:alpha(opacity=65);-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none}.btn-large{padding:9px 14px;font-size:16px;line-height:normal;-webkit-border-radius:5px;-moz-border-radius:5px;border-radius:5px}.btn-large [class^="icon-"]{margin-top:2px}.btn-small{padding:3px 9px;font-size:12px;line-height:18px}.btn-small [class^="icon-"]{margin-top:0}.btn-mini{padding:2px 6px;font-size:11px;line-height:16px}.btn-block{display:block;width:100%;padding-right:0;padding-left:0;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.btn-block+.btn-block{margin-top:5px}.btn-primary.active,.btn-warning.active,.btn-danger.active,.btn-success.active,.btn-info.active,.btn-inverse.active{color:rgba(255,255,255,0.75)}.btn{border-color:#c5c5c5;border-color:rgba(0,0,0,0.15) rgba(0,0,0,0.15) rgba(0,0,0,0.25)}.btn-primary{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#006dcc;*background-color:#04c;background-image:-webkit-gradient(linear,0 0,0 100%,from(#08c),to(#04c));background-image:-webkit-linear-gradient(top,#08c,#04c);background-image:-o-linear-gradient(top,#08c,#04c);background-image:linear-gradient(to bottom,#08c,#04c);background-image:-moz-linear-gradient(top,#08c,#04c);background-repeat:repeat-x;border-color:#04c #04c #002a80;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0044cc',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false)}.btn-primary:hover,.btn-primary:active,.btn-primary.active,.btn-primary.disabled,.btn-primary[disabled]{color:#fff;background-color:#04c;*background-color:#003bb3}.btn-primary:active,.btn-primary.active{background-color:#039 \9}.btn-warning{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#faa732;*background-color:#f89406;background-image:-webkit-gradient(linear,0 0,0 100%,from(#fbb450),to(#f89406));background-image:-webkit-linear-gradient(top,#fbb450,#f89406);background-image:-o-linear-gradient(top,#fbb450,#f89406);background-image:linear-gradient(to bottom,#fbb450,#f89406);background-image:-moz-linear-gradient(top,#fbb450,#f89406);background-repeat:repeat-x;border-color:#f89406 #f89406 #ad6704;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#fffbb450',endColorstr='#fff89406',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false)}.btn-warning:hover,.btn-warning:active,.btn-warning.active,.btn-warning.disabled,.btn-warning[disabled]{color:#fff;background-color:#f89406;*background-color:#df8505}.btn-warning:active,.btn-warning.active{background-color:#c67605 \9}.btn-danger{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#da4f49;*background-color:#bd362f;background-image:-webkit-gradient(linear,0 0,0 100%,from(#ee5f5b),to(#bd362f));background-image:-webkit-linear-gradient(top,#ee5f5b,#bd362f);background-image:-o-linear-gradient(top,#ee5f5b,#bd362f);background-image:linear-gradient(to bottom,#ee5f5b,#bd362f);background-image:-moz-linear-gradient(top,#ee5f5b,#bd362f);background-repeat:repeat-x;border-color:#bd362f #bd362f #802420;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ffee5f5b',endColorstr='#ffbd362f',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false)}.btn-danger:hover,.btn-danger:active,.btn-danger.active,.btn-danger.disabled,.btn-danger[disabled]{color:#fff;background-color:#bd362f;*background-color:#a9302a}.btn-danger:active,.btn-danger.active{background-color:#942a25 \9}.btn-success{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#5bb75b;*background-color:#51a351;background-image:-webkit-gradient(linear,0 0,0 100%,from(#62c462),to(#51a351));background-image:-webkit-linear-gradient(top,#62c462,#51a351);background-image:-o-linear-gradient(top,#62c462,#51a351);background-image:linear-gradient(to bottom,#62c462,#51a351);background-image:-moz-linear-gradient(top,#62c462,#51a351);background-repeat:repeat-x;border-color:#51a351 #51a351 #387038;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff62c462',endColorstr='#ff51a351',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false)}.btn-success:hover,.btn-success:active,.btn-success.active,.btn-success.disabled,.btn-success[disabled]{color:#fff;background-color:#51a351;*background-color:#499249}.btn-success:active,.btn-success.active{background-color:#408140 \9}.btn-info{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#49afcd;*background-color:#2f96b4;background-image:-webkit-gradient(linear,0 0,0 100%,from(#5bc0de),to(#2f96b4));background-image:-webkit-linear-gradient(top,#5bc0de,#2f96b4);background-image:-o-linear-gradient(top,#5bc0de,#2f96b4);background-image:linear-gradient(to bottom,#5bc0de,#2f96b4);background-image:-moz-linear-gradient(top,#5bc0de,#2f96b4);background-repeat:repeat-x;border-color:#2f96b4 #2f96b4 #1f6377;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff5bc0de',endColorstr='#ff2f96b4',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false)}.btn-info:hover,.btn-info:active,.btn-info.active,.btn-info.disabled,.btn-info[disabled]{color:#fff;background-color:#2f96b4;*background-color:#2a85a0}.btn-info:active,.btn-info.active{background-color:#24748c \9}.btn-inverse{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#363636;*background-color:#222;background-image:-webkit-gradient(linear,0 0,0 100%,from(#444),to(#222));background-image:-webkit-linear-gradient(top,#444,#222);background-image:-o-linear-gradient(top,#444,#222);background-image:linear-gradient(to bottom,#444,#222);background-image:-moz-linear-gradient(top,#444,#222);background-repeat:repeat-x;border-color:#222 #222 #000;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff444444',endColorstr='#ff222222',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false)}.btn-inverse:hover,.btn-inverse:active,.btn-inverse.active,.btn-inverse.disabled,.btn-inverse[disabled]{color:#fff;background-color:#222;*background-color:#151515}.btn-inverse:active,.btn-inverse.active{background-color:#080808 \9}button.btn,input[type="submit"].btn{*padding-top:3px;*padding-bottom:3px}button.btn::-moz-focus-inner,input[type="submit"].btn::-moz-focus-inner{padding:0;border:0}button.btn.btn-large,input[type="submit"].btn.btn-large{*padding-top:7px;*padding-bottom:7px}button.btn.btn-small,input[type="submit"].btn.btn-small{*padding-top:3px;*padding-bottom:3px}button.btn.btn-mini,input[type="submit"].btn.btn-mini{*padding-top:1px;*padding-bottom:1px}.btn-link,.btn-link:active{background-color:transparent;background-image:none;-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none}.btn-link{color:#08c;cursor:pointer;border-color:transparent;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-link:hover{color:#005580;text-decoration:underline;background-color:transparent}.btn-group{position:relative;*margin-left:.3em;font-size:0;white-space:nowrap}.btn-group:first-child{*margin-left:0}.btn-group+.btn-group{margin-left:5px}.btn-toolbar{margin-top:10px;margin-bottom:10px;font-size:0}.btn-toolbar .btn-group{display:inline-block;*display:inline;*zoom:1}.btn-toolbar .btn+.btn,.btn-toolbar .btn-group+.btn,.btn-toolbar .btn+.btn-group{margin-left:5px}.btn-group>.btn{position:relative;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-group>.btn+.btn{margin-left:-1px}.btn-group>.btn,.btn-group>.dropdown-menu{font-size:14px}.btn-group>.btn-mini{font-size:11px}.btn-group>.btn-small{font-size:12px}.btn-group>.btn-large{font-size:16px}.btn-group>.btn:first-child{margin-left:0;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-bottomleft:4px;-moz-border-radius-topleft:4px}.btn-group>.btn:last-child,.btn-group>.dropdown-toggle{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-moz-border-radius-topright:4px;-moz-border-radius-bottomright:4px}.btn-group>.btn.large:first-child{margin-left:0;-webkit-border-bottom-left-radius:6px;border-bottom-left-radius:6px;-webkit-border-top-left-radius:6px;border-top-left-radius:6px;-moz-border-radius-bottomleft:6px;-moz-border-radius-topleft:6px}.btn-group>.btn.large:last-child,.btn-group>.large.dropdown-toggle{-webkit-border-top-right-radius:6px;border-top-right-radius:6px;-webkit-border-bottom-right-radius:6px;border-bottom-right-radius:6px;-moz-border-radius-topright:6px;-moz-border-radius-bottomright:6px}.btn-group>.btn:hover,.btn-group>.btn:focus,.btn-group>.btn:active,.btn-group>.btn.active{z-index:2}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{*padding-top:5px;padding-right:8px;*padding-bottom:5px;padding-left:8px;-webkit-box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05)}.btn-group>.btn-mini+.dropdown-toggle{*padding-top:2px;padding-right:5px;*padding-bottom:2px;padding-left:5px}.btn-group>.btn-small+.dropdown-toggle{*padding-top:5px;*padding-bottom:4px}.btn-group>.btn-large+.dropdown-toggle{*padding-top:7px;padding-right:12px;*padding-bottom:7px;padding-left:12px}.btn-group.open .dropdown-toggle{background-image:none;-webkit-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05)}.btn-group.open .btn.dropdown-toggle{background-color:#e6e6e6}.btn-group.open .btn-primary.dropdown-toggle{background-color:#04c}.btn-group.open .btn-warning.dropdown-toggle{background-color:#f89406}.btn-group.open .btn-danger.dropdown-toggle{background-color:#bd362f}.btn-group.open .btn-success.dropdown-toggle{background-color:#51a351}.btn-group.open .btn-info.dropdown-toggle{background-color:#2f96b4}.btn-group.open .btn-inverse.dropdown-toggle{background-color:#222}.btn .caret{margin-top:8px;margin-left:0}.btn-mini .caret,.btn-small .caret,.btn-large .caret{margin-top:6px}.btn-large .caret{border-top-width:5px;border-right-width:5px;border-left-width:5px}.dropup .btn-large .caret{border-top:0;border-bottom:5px solid #000}.btn-primary .caret,.btn-warning .caret,.btn-danger .caret,.btn-info .caret,.btn-success .caret,.btn-inverse .caret{border-top-color:#fff;border-bottom-color:#fff}.btn-group-vertical{display:inline-block;*display:inline;*zoom:1}.btn-group-vertical .btn{display:block;float:none;width:100%;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-group-vertical .btn+.btn{margin-top:-1px;margin-left:0}.btn-group-vertical .btn:first-child{-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0}.btn-group-vertical .btn:last-child{-webkit-border-radius:0 0 4px 4px;-moz-border-radius:0 0 4px 4px;border-radius:0 0 4px 4px}.btn-group-vertical .btn-large:first-child{-webkit-border-radius:6px 6px 0 0;-moz-border-radius:6px 6px 0 0;border-radius:6px 6px 0 0}.btn-group-vertical .btn-large:last-child{-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px}.alert{padding:8px 35px 8px 14px;margin-bottom:20px;color:#c09853;text-shadow:0 1px 0 rgba(255,255,255,0.5);background-color:#fcf8e3;border:1px solid #fbeed5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.alert h4{margin:0}.alert .close{position:relative;top:-2px;right:-21px;line-height:20px}.alert-success{color:#468847;background-color:#dff0d8;border-color:#d6e9c6}.alert-danger,.alert-error{color:#b94a48;background-color:#f2dede;border-color:#eed3d7}.alert-info{color:#3a87ad;background-color:#d9edf7;border-color:#bce8f1}.alert-block{padding-top:14px;padding-bottom:14px}.alert-block>p,.alert-block>ul{margin-bottom:0}.alert-block p+p{margin-top:5px}.nav{margin-bottom:20px;margin-left:0;list-style:none}.nav>li>a{display:block}.nav>li>a:hover{text-decoration:none;background-color:#eee}.nav>.pull-right{float:right}.nav-header{display:block;padding:3px 15px;font-size:11px;font-weight:bold;line-height:20px;color:#999;text-shadow:0 1px 0 rgba(255,255,255,0.5);text-transform:uppercase}.nav li+.nav-header{margin-top:9px}.nav-list{padding-right:15px;padding-left:15px;margin-bottom:0}.nav-list>li>a,.nav-list .nav-header{margin-right:-15px;margin-left:-15px;text-shadow:0 1px 0 rgba(255,255,255,0.5)}.nav-list>li>a{padding:3px 15px}.nav-list>.active>a,.nav-list>.active>a:hover{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.2);background-color:#08c}.nav-list [class^="icon-"]{margin-right:2px}.nav-list .divider{*width:100%;height:1px;margin:9px 1px;*margin:-5px 0 5px;overflow:hidden;background-color:#e5e5e5;border-bottom:1px solid #fff}.nav-tabs,.nav-pills{*zoom:1}.nav-tabs:before,.nav-pills:before,.nav-tabs:after,.nav-pills:after{display:table;line-height:0;content:""}.nav-tabs:after,.nav-pills:after{clear:both}.nav-tabs>li,.nav-pills>li{float:left}.nav-tabs>li>a,.nav-pills>li>a{padding-right:12px;padding-left:12px;margin-right:2px;line-height:14px}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{margin-bottom:-1px}.nav-tabs>li>a{padding-top:8px;padding-bottom:8px;line-height:20px;border:1px solid transparent;-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover{border-color:#eee #eee #ddd}.nav-tabs>.active>a,.nav-tabs>.active>a:hover{color:#555;cursor:default;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent}.nav-pills>li>a{padding-top:8px;padding-bottom:8px;margin-top:2px;margin-bottom:2px;-webkit-border-radius:5px;-moz-border-radius:5px;border-radius:5px}.nav-pills>.active>a,.nav-pills>.active>a:hover{color:#fff;background-color:#08c}.nav-stacked>li{float:none}.nav-stacked>li>a{margin-right:0}.nav-tabs.nav-stacked{border-bottom:0}.nav-tabs.nav-stacked>li>a{border:1px solid #ddd;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.nav-tabs.nav-stacked>li:first-child>a{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topright:4px;-moz-border-radius-topleft:4px}.nav-tabs.nav-stacked>li:last-child>a{-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomright:4px;-moz-border-radius-bottomleft:4px}.nav-tabs.nav-stacked>li>a:hover{z-index:2;border-color:#ddd}.nav-pills.nav-stacked>li>a{margin-bottom:3px}.nav-pills.nav-stacked>li:last-child>a{margin-bottom:1px}.nav-tabs .dropdown-menu{-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px}.nav-pills .dropdown-menu{-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.nav .dropdown-toggle .caret{margin-top:6px;border-top-color:#08c;border-bottom-color:#08c}.nav .dropdown-toggle:hover .caret{border-top-color:#005580;border-bottom-color:#005580}.nav-tabs .dropdown-toggle .caret{margin-top:8px}.nav .active .dropdown-toggle .caret{border-top-color:#fff;border-bottom-color:#fff}.nav-tabs .active .dropdown-toggle .caret{border-top-color:#555;border-bottom-color:#555}.nav>.dropdown.active>a:hover{cursor:pointer}.nav-tabs .open .dropdown-toggle,.nav-pills .open .dropdown-toggle,.nav>li.dropdown.open.active>a:hover{color:#fff;background-color:#999;border-color:#999}.nav li.dropdown.open .caret,.nav li.dropdown.open.active .caret,.nav li.dropdown.open a:hover .caret{border-top-color:#fff;border-bottom-color:#fff;opacity:1;filter:alpha(opacity=100)}.tabs-stacked .open>a:hover{border-color:#999}.tabbable{*zoom:1}.tabbable:before,.tabbable:after{display:table;line-height:0;content:""}.tabbable:after{clear:both}.tab-content{overflow:auto}.tabs-below>.nav-tabs,.tabs-right>.nav-tabs,.tabs-left>.nav-tabs{border-bottom:0}.tab-content>.tab-pane,.pill-content>.pill-pane{display:none}.tab-content>.active,.pill-content>.active{display:block}.tabs-below>.nav-tabs{border-top:1px solid #ddd}.tabs-below>.nav-tabs>li{margin-top:-1px;margin-bottom:0}.tabs-below>.nav-tabs>li>a{-webkit-border-radius:0 0 4px 4px;-moz-border-radius:0 0 4px 4px;border-radius:0 0 4px 4px}.tabs-below>.nav-tabs>li>a:hover{border-top-color:#ddd;border-bottom-color:transparent}.tabs-below>.nav-tabs>.active>a,.tabs-below>.nav-tabs>.active>a:hover{border-color:transparent #ddd #ddd #ddd}.tabs-left>.nav-tabs>li,.tabs-right>.nav-tabs>li{float:none}.tabs-left>.nav-tabs>li>a,.tabs-right>.nav-tabs>li>a{min-width:74px;margin-right:0;margin-bottom:3px}.tabs-left>.nav-tabs{float:left;margin-right:19px;border-right:1px solid #ddd}.tabs-left>.nav-tabs>li>a{margin-right:-1px;-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px}.tabs-left>.nav-tabs>li>a:hover{border-color:#eee #ddd #eee #eee}.tabs-left>.nav-tabs .active>a,.tabs-left>.nav-tabs .active>a:hover{border-color:#ddd transparent #ddd #ddd;*border-right-color:#fff}.tabs-right>.nav-tabs{float:right;margin-left:19px;border-left:1px solid #ddd}.tabs-right>.nav-tabs>li>a{margin-left:-1px;-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.tabs-right>.nav-tabs>li>a:hover{border-color:#eee #eee #eee #ddd}.tabs-right>.nav-tabs .active>a,.tabs-right>.nav-tabs .active>a:hover{border-color:#ddd #ddd #ddd transparent;*border-left-color:#fff}.nav>.disabled>a{color:#999}.nav>.disabled>a:hover{text-decoration:none;cursor:default;background-color:transparent}.navbar{*position:relative;*z-index:2;margin-bottom:20px;overflow:visible;color:#555}.navbar-inner{min-height:40px;padding-right:20px;padding-left:20px;background-color:#fafafa;background-image:-moz-linear-gradient(top,#fff,#e2f2e2);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#e2f2e2));background-image:-webkit-linear-gradient(top,#fff,#d8e8f8);background-image:-o-linear-gradient(top,#fff,#d8e8f8);background-image:linear-gradient(to bottom,#fff,#d8e8f8);background-repeat:repeat-x;border:1px solid #d4d4d4;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ffffddff',endColorstr='#ffe2f2e2',GradientType=0);-webkit-box-shadow:0 1px 4px rgba(0,0,0,0.065);-moz-box-shadow:0 1px 4px rgba(0,0,0,0.065);box-shadow:0 1px 4px rgba(0,0,0,0.065)}.navbar .container{width:auto}.nav-collapse.collapse{height:auto}.navbar .brand{display:block;float:left;padding:10px 20px 10px;margin-left:-20px;font-size:20px;font-weight:200;color:#555;text-shadow:0 1px 0 #fff}.navbar .brand:hover{text-decoration:none}.navbar-text{margin-bottom:0;line-height:40px}.navbar-link{color:#555}.navbar-link:hover{color:#333}.navbar .divider-vertical{height:40px;margin:0 9px;border-right:1px solid #fff;border-left:1px solid #f2f2f2}.navbar .btn,.navbar .btn-group{margin-top:6px}.navbar .btn-group .btn{margin:0}.navbar-form{margin-bottom:0;*zoom:1}.navbar-form:before,.navbar-form:after{display:table;line-height:0;content:""}.navbar-form:after{clear:both}.navbar-form input,.navbar-form select,.navbar-form .radio,.navbar-form .checkbox{margin-top:5px}.navbar-form input,.navbar-form select,.navbar-form .btn{display:inline-block;margin-bottom:0}.navbar-form input[type="image"],.navbar-form input[type="checkbox"],.navbar-form input[type="radio"]{margin-top:3px}.navbar-form .input-append,.navbar-form .input-prepend{margin-top:6px;white-space:nowrap}.navbar-form .input-append input,.navbar-form .input-prepend input{margin-top:0}.navbar-search{position:relative;float:left;margin-top:5px;margin-bottom:0}.navbar-search .search-query{padding:4px 14px;margin-bottom:0;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:13px;font-weight:normal;line-height:1;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.navbar-static-top{position:static;width:100%;margin-bottom:0}.navbar-static-top .navbar-inner{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.navbar-fixed-top,.navbar-fixed-bottom{position:fixed;right:0;left:0;z-index:1030;margin-bottom:0}.navbar-fixed-top .navbar-inner,.navbar-fixed-bottom .navbar-inner,.navbar-static-top .navbar-inner{border:0}.navbar-fixed-top .navbar-inner,.navbar-fixed-bottom .navbar-inner{padding-right:0;padding-left:0;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:940px}.navbar-fixed-top{top:0}.navbar-fixed-top .navbar-inner,.navbar-static-top .navbar-inner{-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.1),0 1px 10px rgba(0,0,0,0.1);-moz-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.1),0 1px 10px rgba(0,0,0,0.1);box-shadow:inset 0 -1px 0 rgba(0,0,0,0.1),0 1px 10px rgba(0,0,0,0.1)}.navbar-fixed-bottom{bottom:0}.navbar-fixed-bottom .navbar-inner{-webkit-box-shadow:inset 0 1px 0 rgba(0,0,0,0.1),0 -1px 10px rgba(0,0,0,0.1);-moz-box-shadow:inset 0 1px 0 rgba(0,0,0,0.1),0 -1px 10px rgba(0,0,0,0.1);box-shadow:inset 0 1px 0 rgba(0,0,0,0.1),0 -1px 10px rgba(0,0,0,0.1)}.navbar .nav{position:relative;left:0;display:block;float:left;margin:0 10px 0 0}.navbar .nav.pull-right{float:right}.navbar .nav>li{float:left}.navbar .nav>li>a{float:none;padding:10px 15px 10px;color:#555;text-decoration:none;text-shadow:0 1px 0 #fff}.navbar .nav .dropdown-toggle .caret{margin-top:8px}.navbar .nav>li>a:focus,.navbar .nav>li>a:hover{color:#333;text-decoration:none;background-color:transparent}.navbar .nav>.active>a,.navbar .nav>.active>a:hover,.navbar .nav>.active>a:focus{color:#555;text-decoration:none;background-color:#e5e5e5;-webkit-box-shadow:inset 0 3px 8px rgba(0,0,0,0.125);-moz-box-shadow:inset 0 3px 8px rgba(0,0,0,0.125);box-shadow:inset 0 3px 8px rgba(0,0,0,0.125)}.navbar .btn-navbar{display:none;float:right;padding:7px 10px;margin-right:5px;margin-left:5px;color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#ededed;*background-color:#e5e5e5;background-image:-webkit-gradient(linear,0 0,0 100%,from(#f2f2f2),to(#e5e5e5));background-image:-webkit-linear-gradient(top,#f2f2f2,#e5e5e5);background-image:-o-linear-gradient(top,#f2f2f2,#e5e5e5);background-image:linear-gradient(to bottom,#f2f2f2,#e5e5e5);background-image:-moz-linear-gradient(top,#f2f2f2,#e5e5e5);background-repeat:repeat-x;border-color:#e5e5e5 #e5e5e5 #bfbfbf;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#fff2f2f2',endColorstr='#ffe5e5e5',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false);-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075);-moz-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075);box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075)}.navbar .btn-navbar:hover,.navbar .btn-navbar:active,.navbar .btn-navbar.active,.navbar .btn-navbar.disabled,.navbar .btn-navbar[disabled]{color:#fff;background-color:#e5e5e5;*background-color:#d9d9d9}.navbar .btn-navbar:active,.navbar .btn-navbar.active{background-color:#ccc \9}.navbar .btn-navbar .icon-bar{display:block;width:18px;height:2px;background-color:#f5f5f5;-webkit-border-radius:1px;-moz-border-radius:1px;border-radius:1px;-webkit-box-shadow:0 1px 0 rgba(0,0,0,0.25);-moz-box-shadow:0 1px 0 rgba(0,0,0,0.25);box-shadow:0 1px 0 rgba(0,0,0,0.25)}.btn-navbar .icon-bar+.icon-bar{margin-top:3px}.navbar .nav>li>.dropdown-menu:before{position:absolute;top:-7px;left:9px;display:inline-block;border-right:7px solid transparent;border-bottom:7px solid #ccc;border-left:7px solid transparent;border-bottom-color:rgba(0,0,0,0.2);content:''}.navbar .nav>li>.dropdown-menu:after{position:absolute;top:-6px;left:10px;display:inline-block;border-right:6px solid transparent;border-bottom:6px solid #fff;border-left:6px solid transparent;content:''}.navbar-fixed-bottom .nav>li>.dropdown-menu:before{top:auto;bottom:-7px;border-top:7px solid #ccc;border-bottom:0;border-top-color:rgba(0,0,0,0.2)}.navbar-fixed-bottom .nav>li>.dropdown-menu:after{top:auto;bottom:-6px;border-top:6px solid #fff;border-bottom:0}.navbar .nav li.dropdown.open>.dropdown-toggle,.navbar .nav li.dropdown.active>.dropdown-toggle,.navbar .nav li.dropdown.open.active>.dropdown-toggle{color:#555;background-color:#e5e5e5}.navbar .nav li.dropdown>.dropdown-toggle .caret{border-top-color:#555;border-bottom-color:#555}.navbar .nav li.dropdown.open>.dropdown-toggle .caret,.navbar .nav li.dropdown.active>.dropdown-toggle .caret,.navbar .nav li.dropdown.open.active>.dropdown-toggle .caret{border-top-color:#555;border-bottom-color:#555}.navbar .pull-right>li>.dropdown-menu,.navbar .nav>li>.dropdown-menu.pull-right{right:0;left:auto}.navbar .pull-right>li>.dropdown-menu:before,.navbar .nav>li>.dropdown-menu.pull-right:before{right:12px;left:auto}.navbar .pull-right>li>.dropdown-menu:after,.navbar .nav>li>.dropdown-menu.pull-right:after{right:13px;left:auto}.navbar .pull-right>li>.dropdown-menu .dropdown-menu,.navbar .nav>li>.dropdown-menu.pull-right .dropdown-menu{right:100%;left:auto;margin-right:-1px;margin-left:0;-webkit-border-radius:6px 0 6px 6px;-moz-border-radius:6px 0 6px 6px;border-radius:6px 0 6px 6px}.navbar-inverse{color:#999}.navbar-inverse .navbar-inner{background-color:#1b1b1b;background-image:-moz-linear-gradient(top,#222,#111);background-image:-webkit-gradient(linear,0 0,0 100%,from(#222),to(#111));background-image:-webkit-linear-gradient(top,#222,#111);background-image:-o-linear-gradient(top,#222,#111);background-image:linear-gradient(to bottom,#222,#111);background-repeat:repeat-x;border-color:#252525;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff222222',endColorstr='#ff111111',GradientType=0)}.navbar-inverse .brand,.navbar-inverse .nav>li>a{color:#999;text-shadow:0 -1px 0 rgba(0,0,0,0.25)}.navbar-inverse .brand:hover,.navbar-inverse .nav>li>a:hover{color:#fff}.navbar-inverse .nav>li>a:focus,.navbar-inverse .nav>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .nav .active>a,.navbar-inverse .nav .active>a:hover,.navbar-inverse .nav .active>a:focus{color:#fff;background-color:#111}.navbar-inverse .navbar-link{color:#999}.navbar-inverse .navbar-link:hover{color:#fff}.navbar-inverse .divider-vertical{border-right-color:#222;border-left-color:#111}.navbar-inverse .nav li.dropdown.open>.dropdown-toggle,.navbar-inverse .nav li.dropdown.active>.dropdown-toggle,.navbar-inverse .nav li.dropdown.open.active>.dropdown-toggle{color:#fff;background-color:#111}.navbar-inverse .nav li.dropdown>.dropdown-toggle .caret{border-top-color:#999;border-bottom-color:#999}.navbar-inverse .nav li.dropdown.open>.dropdown-toggle .caret,.navbar-inverse .nav li.dropdown.active>.dropdown-toggle .caret,.navbar-inverse .nav li.dropdown.open.active>.dropdown-toggle .caret{border-top-color:#fff;border-bottom-color:#fff}.navbar-inverse .navbar-search .search-query{color:#fff;background-color:#515151;border-color:#111;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);-webkit-transition:none;-moz-transition:none;-o-transition:none;transition:none}.navbar-inverse .navbar-search .search-query:-moz-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query:-ms-input-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query::-webkit-input-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query:focus,.navbar-inverse .navbar-search .search-query.focused{padding:5px 15px;color:#333;text-shadow:0 1px 0 #fff;background-color:#fff;border:0;outline:0;-webkit-box-shadow:0 0 3px rgba(0,0,0,0.15);-moz-box-shadow:0 0 3px rgba(0,0,0,0.15);box-shadow:0 0 3px rgba(0,0,0,0.15)}.navbar-inverse .btn-navbar{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#0e0e0e;*background-color:#040404;background-image:-webkit-gradient(linear,0 0,0 100%,from(#151515),to(#040404));background-image:-webkit-linear-gradient(top,#151515,#040404);background-image:-o-linear-gradient(top,#151515,#040404);background-image:linear-gradient(to bottom,#151515,#040404);background-image:-moz-linear-gradient(top,#151515,#040404);background-repeat:repeat-x;border-color:#040404 #040404 #000;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff151515',endColorstr='#ff040404',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false)}.navbar-inverse .btn-navbar:hover,.navbar-inverse .btn-navbar:active,.navbar-inverse .btn-navbar.active,.navbar-inverse .btn-navbar.disabled,.navbar-inverse .btn-navbar[disabled]{color:#fff;background-color:#040404;*background-color:#000}.navbar-inverse .btn-navbar:active,.navbar-inverse .btn-navbar.active{background-color:#000 \9}.breadcrumb{padding:8px 15px;margin:0 0 20px;list-style:none;background-color:#f5f5f5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.breadcrumb li{display:inline-block;*display:inline;text-shadow:0 1px 0 #fff;*zoom:1}.breadcrumb .divider{padding:0 5px;color:#ccc}.breadcrumb .active{color:#999}.pagination{height:40px;margin:20px 0}.pagination ul{display:inline-block;*display:inline;margin-bottom:0;margin-left:0;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;*zoom:1;-webkit-box-shadow:0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:0 1px 2px rgba(0,0,0,0.05);box-shadow:0 1px 2px rgba(0,0,0,0.05)}.pagination li{display:inline}.pagination a,.pagination span{float:left;padding:0 14px;line-height:38px;text-decoration:none;background-color:#fff;border:1px solid #ddd;border-left-width:0}.pagination a:hover,.pagination .active a,.pagination .active span{background-color:#f5f5f5}.pagination .active a,.pagination .active span{color:#999;cursor:default}.pagination .disabled span,.pagination .disabled a,.pagination .disabled a:hover{color:#999;cursor:default;background-color:transparent}.pagination li:first-child a,.pagination li:first-child span{border-left-width:1px;-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px}.pagination li:last-child a,.pagination li:last-child span{-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0}.pagination-centered{text-align:center}.pagination-right{text-align:right}.pager{margin:20px 0;text-align:center;list-style:none;*zoom:1}.pager:before,.pager:after{display:table;line-height:0;content:""}.pager:after{clear:both}.pager li{display:inline}.pager a{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.pager a:hover{text-decoration:none;background-color:#f5f5f5}.pager .next a{float:right}.pager .previous a{float:left}.pager .disabled a,.pager .disabled a:hover{color:#999;cursor:default;background-color:#fff}.modal-open .dropdown-menu{z-index:2050}.modal-open .dropdown.open{*z-index:2050}.modal-open .popover{z-index:2060}.modal-open .tooltip{z-index:2080}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{opacity:0}.modal-backdrop,.modal-backdrop.fade.in{opacity:.8;filter:alpha(opacity=80)}.modal{position:fixed;top:50%;left:50%;z-index:1050;width:560px;margin:-250px 0 0 -280px;overflow:auto;background-color:#fff;border:1px solid #999;border:1px solid rgba(0,0,0,0.3);*border:1px solid #999;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 3px 7px rgba(0,0,0,0.3);-moz-box-shadow:0 3px 7px rgba(0,0,0,0.3);box-shadow:0 3px 7px rgba(0,0,0,0.3);-webkit-background-clip:padding-box;-moz-background-clip:padding-box;background-clip:padding-box}.modal.fade{top:-25%;-webkit-transition:opacity .3s linear,top .3s ease-out;-moz-transition:opacity .3s linear,top .3s ease-out;-o-transition:opacity .3s linear,top .3s ease-out;transition:opacity .3s linear,top .3s ease-out}.modal.fade.in{top:50%}.modal-header{padding:9px 15px;border-bottom:1px solid #eee}.modal-header .close{margin-top:2px}.modal-header h3{margin:0;line-height:30px}.modal-body{max-height:400px;padding:15px;overflow-y:auto}.modal-form{margin-bottom:0}.modal-footer{padding:14px 15px 15px;margin-bottom:0;text-align:right;background-color:#f5f5f5;border-top:1px solid #ddd;-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px;*zoom:1;-webkit-box-shadow:inset 0 1px 0 #fff;-moz-box-shadow:inset 0 1px 0 #fff;box-shadow:inset 0 1px 0 #fff}.modal-footer:before,.modal-footer:after{display:table;line-height:0;content:""}.modal-footer:after{clear:both}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.tooltip{position:absolute;z-index:1030;display:block;padding:5px;font-size:11px;opacity:0;filter:alpha(opacity=0);visibility:visible}.tooltip.in{opacity:.8;filter:alpha(opacity=80)}.tooltip.top{margin-top:-3px}.tooltip.right{margin-left:3px}.tooltip.bottom{margin-top:3px}.tooltip.left{margin-left:-3px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;text-decoration:none;background-color:#000;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-top-color:#000;border-width:5px 5px 0}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-right-color:#000;border-width:5px 5px 5px 0}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-left-color:#000;border-width:5px 0 5px 5px}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-bottom-color:#000;border-width:0 5px 5px}.popover{position:absolute;top:0;left:0;z-index:1010;display:none;width:236px;padding:1px;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);-moz-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2);-webkit-background-clip:padding-box;-moz-background-clip:padding;background-clip:padding-box}.popover.top{margin-bottom:10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-right:10px}.popover-title{padding:8px 14px;margin:0;font-size:14px;font-weight:normal;line-height:18px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;-webkit-border-radius:5px 5px 0 0;-moz-border-radius:5px 5px 0 0;border-radius:5px 5px 0 0}.popover-content{padding:9px 14px}.popover-content p,.popover-content ul,.popover-content ol{margin-bottom:0}.popover .arrow,.popover .arrow:after{position:absolute;display:inline-block;width:0;height:0;border-color:transparent;border-style:solid}.popover .arrow:after{z-index:-1;content:""}.popover.top .arrow{bottom:-10px;left:50%;margin-left:-10px;border-top-color:#fff;border-width:10px 10px 0}.popover.top .arrow:after{bottom:-1px;left:-11px;border-top-color:rgba(0,0,0,0.25);border-width:11px 11px 0}.popover.right .arrow{top:50%;left:-10px;margin-top:-10px;border-right-color:#fff;border-width:10px 10px 10px 0}.popover.right .arrow:after{bottom:-11px;left:-1px;border-right-color:rgba(0,0,0,0.25);border-width:11px 11px 11px 0}.popover.bottom .arrow{top:-10px;left:50%;margin-left:-10px;border-bottom-color:#fff;border-width:0 10px 10px}.popover.bottom .arrow:after{top:-1px;left:-11px;border-bottom-color:rgba(0,0,0,0.25);border-width:0 11px 11px}.popover.left .arrow{top:50%;right:-10px;margin-top:-10px;border-left-color:#fff;border-width:10px 0 10px 10px}.popover.left .arrow:after{right:-1px;bottom:-11px;border-left-color:rgba(0,0,0,0.25);border-width:11px 0 11px 11px}.thumbnails{margin-left:-20px;list-style:none;*zoom:1}.thumbnails:before,.thumbnails:after{display:table;line-height:0;content:""}.thumbnails:after{clear:both}.row-fluid .thumbnails{margin-left:0}.thumbnails>li{float:left;margin-bottom:20px;margin-left:20px}.thumbnail{display:block;padding:4px;line-height:20px;border:1px solid #ddd;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:0 1px 3px rgba(0,0,0,0.055);-moz-box-shadow:0 1px 3px rgba(0,0,0,0.055);box-shadow:0 1px 3px rgba(0,0,0,0.055);-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out}a.thumbnail:hover{border-color:#08c;-webkit-box-shadow:0 1px 4px rgba(0,105,214,0.25);-moz-box-shadow:0 1px 4px rgba(0,105,214,0.25);box-shadow:0 1px 4px rgba(0,105,214,0.25)}.thumbnail>img{display:block;max-width:100%;margin-right:auto;margin-left:auto}.thumbnail .caption{padding:9px;color:#555}.label,.badge{font-size:11.844px;font-weight:bold;line-height:14px;color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);white-space:nowrap;vertical-align:baseline;background-color:#999}.label{padding:1px 4px 2px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.badge{padding:1px 9px 2px;-webkit-border-radius:9px;-moz-border-radius:9px;border-radius:9px}a.label:hover,a.badge:hover{color:#fff;text-decoration:none;cursor:pointer}.label-important,.badge-important{background-color:#b94a48}.label-important[href],.badge-important[href]{background-color:#953b39}.label-warning,.badge-warning{background-color:#f89406}.label-warning[href],.badge-warning[href]{background-color:#c67605}.label-success,.badge-success{background-color:#468847}.label-success[href],.badge-success[href]{background-color:#356635}.label-info,.badge-info{background-color:#3a87ad}.label-info[href],.badge-info[href]{background-color:#2d6987}.label-inverse,.badge-inverse{background-color:#333}.label-inverse[href],.badge-inverse[href]{background-color:#1a1a1a}.btn .label,.btn .badge{position:relative;top:-1px}.btn-mini .label,.btn-mini .badge{top:0}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-moz-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-ms-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:0 0}to{background-position:40px 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;overflow:hidden;background-color:#f7f7f7;background-image:-moz-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:-webkit-gradient(linear,0 0,0 100%,from(#f5f5f5),to(#f9f9f9));background-image:-webkit-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:-o-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:linear-gradient(to bottom,#f5f5f5,#f9f9f9);background-repeat:repeat-x;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#fff5f5f5',endColorstr='#fff9f9f9',GradientType=0);-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1);box-shadow:inset 0 1px 2px rgba(0,0,0,0.1)}.progress .bar{float:left;width:0;height:100%;font-size:12px;color:#fff;text-align:center;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#0e90d2;background-image:-moz-linear-gradient(top,#149bdf,#0480be);background-image:-webkit-gradient(linear,0 0,0 100%,from(#149bdf),to(#0480be));background-image:-webkit-linear-gradient(top,#149bdf,#0480be);background-image:-o-linear-gradient(top,#149bdf,#0480be);background-image:linear-gradient(to bottom,#149bdf,#0480be);background-repeat:repeat-x;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff149bdf',endColorstr='#ff0480be',GradientType=0);-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);-moz-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;-webkit-transition:width .6s ease;-moz-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress .bar+.bar{-webkit-box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15);-moz-box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15);box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15)}.progress-striped .bar{background-color:#149bdf;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);-webkit-background-size:40px 40px;-moz-background-size:40px 40px;-o-background-size:40px 40px;background-size:40px 40px}.progress.active .bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-moz-animation:progress-bar-stripes 2s linear infinite;-ms-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-danger .bar,.progress .bar-danger{background-color:#dd514c;background-image:-moz-linear-gradient(top,#ee5f5b,#c43c35);background-image:-webkit-gradient(linear,0 0,0 100%,from(#ee5f5b),to(#c43c35));background-image:-webkit-linear-gradient(top,#ee5f5b,#c43c35);background-image:-o-linear-gradient(top,#ee5f5b,#c43c35);background-image:linear-gradient(to bottom,#ee5f5b,#c43c35);background-repeat:repeat-x;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ffee5f5b',endColorstr='#ffc43c35',GradientType=0)}.progress-danger.progress-striped .bar,.progress-striped .bar-danger{background-color:#ee5f5b;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-success .bar,.progress .bar-success{background-color:#5eb95e;background-image:-moz-linear-gradient(top,#62c462,#57a957);background-image:-webkit-gradient(linear,0 0,0 100%,from(#62c462),to(#57a957));background-image:-webkit-linear-gradient(top,#62c462,#57a957);background-image:-o-linear-gradient(top,#62c462,#57a957);background-image:linear-gradient(to bottom,#62c462,#57a957);background-repeat:repeat-x;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff62c462',endColorstr='#ff57a957',GradientType=0)}.progress-success.progress-striped .bar,.progress-striped .bar-success{background-color:#62c462;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-info .bar,.progress .bar-info{background-color:#4bb1cf;background-image:-moz-linear-gradient(top,#5bc0de,#339bb9);background-image:-webkit-gradient(linear,0 0,0 100%,from(#5bc0de),to(#339bb9));background-image:-webkit-linear-gradient(top,#5bc0de,#339bb9);background-image:-o-linear-gradient(top,#5bc0de,#339bb9);background-image:linear-gradient(to bottom,#5bc0de,#339bb9);background-repeat:repeat-x;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff5bc0de',endColorstr='#ff339bb9',GradientType=0)}.progress-info.progress-striped .bar,.progress-striped .bar-info{background-color:#5bc0de;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-warning .bar,.progress .bar-warning{background-color:#faa732;background-image:-moz-linear-gradient(top,#fbb450,#f89406);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fbb450),to(#f89406));background-image:-webkit-linear-gradient(top,#fbb450,#f89406);background-image:-o-linear-gradient(top,#fbb450,#f89406);background-image:linear-gradient(to bottom,#fbb450,#f89406);background-repeat:repeat-x;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#fffbb450',endColorstr='#fff89406',GradientType=0)}.progress-warning.progress-striped .bar,.progress-striped .bar-warning{background-color:#fbb450;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.accordion{margin-bottom:20px}.accordion-group{margin-bottom:2px;border:1px solid #e5e5e5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.accordion-heading{border-bottom:0}.accordion-heading .accordion-toggle{display:block;padding:8px 15px}.accordion-toggle{cursor:pointer}.accordion-inner{padding:9px 15px;border-top:1px solid #e5e5e5}.carousel{position:relative;margin-bottom:20px;line-height:1}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel .item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;-moz-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel .item>img{display:block;line-height:1}.carousel .active,.carousel .next,.carousel .prev{display:block}.carousel .active{left:0}.carousel .next,.carousel .prev{position:absolute;top:0;width:100%}.carousel .next{left:100%}.carousel .prev{left:-100%}.carousel .next.left,.carousel .prev.right{left:0}.carousel .active.left{left:-100%}.carousel .active.right{left:100%}.carousel-control{position:absolute;top:40%;left:15px;width:40px;height:40px;margin-top:-20px;font-size:60px;font-weight:100;line-height:30px;color:#fff;text-align:center;background:#222;border:3px solid #fff;-webkit-border-radius:23px;-moz-border-radius:23px;border-radius:23px;opacity:.5;filter:alpha(opacity=50)}.carousel-control.right{right:15px;left:auto}.carousel-control:hover{color:#fff;text-decoration:none;opacity:.9;filter:alpha(opacity=90)}.carousel-caption{position:absolute;right:0;bottom:0;left:0;padding:15px;background:#333;background:rgba(0,0,0,0.75)}.carousel-caption h4,.carousel-caption p{line-height:20px;color:#fff}.carousel-caption h4{margin:0 0 5px}.carousel-caption p{margin-bottom:0}.hero-unit{padding:60px;margin-bottom:30px;background-color:#eee;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.hero-unit h1{margin-bottom:0;font-size:60px;line-height:1;letter-spacing:-1px;color:inherit}.hero-unit p{font-size:18px;font-weight:200;line-height:30px;color:inherit}.pull-right{float:right}.pull-left{float:left}.hide{display:none}.show{display:block}.invisible{visibility:hidden}.affix{position:fixed}
+ */article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}audio:not([controls]){display:none}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}a:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}a:hover,a:active{outline:0}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}img{height:auto;max-width:100%;vertical-align:middle;border:0;-ms-interpolation-mode:bicubic}#map_canvas img{max-width:none}button,input,select,textarea{margin:0;font-size:100%;vertical-align:middle}button,input{*overflow:visible;line-height:normal}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}button,input[type="button"],input[type="reset"],input[type="submit"]{cursor:pointer;-webkit-appearance:button}input[type="search"]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type="search"]::-webkit-search-decoration,input[type="search"]::-webkit-search-cancel-button{-webkit-appearance:none}textarea{overflow:auto;vertical-align:top}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;line-height:0;content:""}.clearfix:after{clear:both}.hide-text{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.input-block-level{display:block;width:100%;min-height:30px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}body{margin:0;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:20px;color:#333;background-color:#fff}a{color:#08c;text-decoration:none}a:hover{color:#005580;text-decoration:underline}.img-rounded{-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.img-polaroid{padding:4px;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);-webkit-box-shadow:0 1px 3px rgba(0,0,0,0.1);-moz-box-shadow:0 1px 3px rgba(0,0,0,0.1);box-shadow:0 1px 3px rgba(0,0,0,0.1)}.img-circle{-webkit-border-radius:500px;-moz-border-radius:500px;border-radius:500px}.row{margin-left:-20px;*zoom:1}.row:before,.row:after{display:table;line-height:0;content:""}.row:after{clear:both}[class*="span"]{float:left;margin-left:20px}.container,.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:940px}.span12{width:940px}.span11{width:860px}.span10{width:780px}.span9{width:700px}.span8{width:620px}.span7{width:540px}.span6{width:460px}.span5{width:380px}.span4{width:300px}.span3{width:220px}.span2{width:140px}.span1{width:60px}.offset12{margin-left:980px}.offset11{margin-left:900px}.offset10{margin-left:820px}.offset9{margin-left:740px}.offset8{margin-left:660px}.offset7{margin-left:580px}.offset6{margin-left:500px}.offset5{margin-left:420px}.offset4{margin-left:340px}.offset3{margin-left:260px}.offset2{margin-left:180px}.offset1{margin-left:100px}.row-fluid{width:100%;*zoom:1}.row-fluid:before,.row-fluid:after{display:table;line-height:0;content:""}.row-fluid:after{clear:both}.row-fluid [class*="span"]{display:block;float:left;width:100%;min-height:30px;margin-left:2.127659574468085%;*margin-left:2.074468085106383%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.row-fluid [class*="span"]:first-child{margin-left:0}.row-fluid .span12{width:100%;*width:99.94680851063829%}.row-fluid .span11{width:91.48936170212765%;*width:91.43617021276594%}.row-fluid .span10{width:82.97872340425532%;*width:82.92553191489361%}.row-fluid .span9{width:74.46808510638297%;*width:74.41489361702126%}.row-fluid .span8{width:65.95744680851064%;*width:65.90425531914893%}.row-fluid .span7{width:57.44680851063829%;*width:57.39361702127659%}.row-fluid .span6{width:48.93617021276595%;*width:48.88297872340425%}.row-fluid .span5{width:40.42553191489362%;*width:40.37234042553192%}.row-fluid .span4{width:31.914893617021278%;*width:31.861702127659576%}.row-fluid .span3{width:23.404255319148934%;*width:23.351063829787233%}.row-fluid .span2{width:14.893617021276595%;*width:14.840425531914894%}.row-fluid .span1{width:6.382978723404255%;*width:6.329787234042553%}.row-fluid .offset12{margin-left:104.25531914893617%;*margin-left:104.14893617021275%}.row-fluid .offset12:first-child{margin-left:102.12765957446808%;*margin-left:102.02127659574467%}.row-fluid .offset11{margin-left:95.74468085106382%;*margin-left:95.6382978723404%}.row-fluid .offset11:first-child{margin-left:93.61702127659574%;*margin-left:93.51063829787232%}.row-fluid .offset10{margin-left:87.23404255319149%;*margin-left:87.12765957446807%}.row-fluid .offset10:first-child{margin-left:85.1063829787234%;*margin-left:84.99999999999999%}.row-fluid .offset9{margin-left:78.72340425531914%;*margin-left:78.61702127659572%}.row-fluid .offset9:first-child{margin-left:76.59574468085106%;*margin-left:76.48936170212764%}.row-fluid .offset8{margin-left:70.2127659574468%;*margin-left:70.10638297872339%}.row-fluid .offset8:first-child{margin-left:68.08510638297872%;*margin-left:67.9787234042553%}.row-fluid .offset7{margin-left:61.70212765957446%;*margin-left:61.59574468085106%}.row-fluid .offset7:first-child{margin-left:59.574468085106375%;*margin-left:59.46808510638297%}.row-fluid .offset6{margin-left:53.191489361702125%;*margin-left:53.085106382978715%}.row-fluid .offset6:first-child{margin-left:51.063829787234035%;*margin-left:50.95744680851063%}.row-fluid .offset5{margin-left:44.68085106382979%;*margin-left:44.57446808510638%}.row-fluid .offset5:first-child{margin-left:42.5531914893617%;*margin-left:42.4468085106383%}.row-fluid .offset4{margin-left:36.170212765957444%;*margin-left:36.06382978723405%}.row-fluid .offset4:first-child{margin-left:34.04255319148936%;*margin-left:33.93617021276596%}.row-fluid .offset3{margin-left:27.659574468085104%;*margin-left:27.5531914893617%}.row-fluid .offset3:first-child{margin-left:25.53191489361702%;*margin-left:25.425531914893618%}.row-fluid .offset2{margin-left:19.148936170212764%;*margin-left:19.04255319148936%}.row-fluid .offset2:first-child{margin-left:17.02127659574468%;*margin-left:16.914893617021278%}.row-fluid .offset1{margin-left:10.638297872340425%;*margin-left:10.53191489361702%}.row-fluid .offset1:first-child{margin-left:8.51063829787234%;*margin-left:8.404255319148938%}[class*="span"].hide,.row-fluid [class*="span"].hide{display:none}[class*="span"].pull-right,.row-fluid [class*="span"].pull-right{float:right}.container{margin-right:auto;margin-left:auto;*zoom:1}.container:before,.container:after{display:table;line-height:0;content:""}.container:after{clear:both}.container-fluid{padding-right:20px;padding-left:20px;*zoom:1}.container-fluid:before,.container-fluid:after{display:table;line-height:0;content:""}.container-fluid:after{clear:both}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:20px;font-weight:200;line-height:30px}small{font-size:85%}strong{font-weight:bold}em{font-style:italic}cite{font-style:normal}.muted{color:#999}h1,h2,h3,h4,h5,h6{margin:10px 0;font-family:inherit;font-weight:bold;line-height:1;color:inherit;text-rendering:optimizelegibility}h1 small,h2 small,h3 small,h4 small,h5 small,h6 small{font-weight:normal;line-height:1;color:#999}h1{font-size:36px;line-height:40px}h2{font-size:30px;line-height:40px}h3{font-size:24px;line-height:40px}h4{font-size:18px;line-height:20px}h5{font-size:14px;line-height:20px}h6{font-size:12px;line-height:20px}h1 small{font-size:24px}h2 small{font-size:18px}h3 small{font-size:14px}h4 small{font-size:14px}.page-header{padding-bottom:9px;margin:20px 0 30px;border-bottom:1px solid #eee}ul,ol{padding:0;margin:0 0 10px 25px}ul ul,ul ol,ol ol,ol ul{margin-bottom:0}li{line-height:20px}ul.unstyled,ol.unstyled{margin-left:0;list-style:none}dl{margin-bottom:20px}dt,dd{line-height:20px}dt{font-weight:bold}dd{margin-left:10px}.dl-horizontal dt{float:left;width:120px;overflow:hidden;clear:left;text-align:right;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:130px}hr{margin:20px 0;border:0;border-top:1px solid #eee;border-bottom:1px solid #fff}abbr[title]{cursor:help;border-bottom:1px dotted #999}abbr.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:0 0 0 15px;margin:0 0 20px;border-left:5px solid #eee}blockquote p{margin-bottom:0;font-size:16px;font-weight:300;line-height:25px}blockquote small{display:block;line-height:20px;color:#999}blockquote small:before{content:'\2014 \00A0'}blockquote.pull-right{float:right;padding-right:15px;padding-left:0;border-right:5px solid #eee;border-left:0}blockquote.pull-right p,blockquote.pull-right small{text-align:right}blockquote.pull-right small:before{content:''}blockquote.pull-right small:after{content:'\00A0 \2014'}q:before,q:after,blockquote:before,blockquote:after{content:""}address{display:block;margin-bottom:20px;font-style:normal;line-height:20px}code,pre{padding:0 3px 2px;font-family:Monaco,Menlo,Consolas,"Courier New",monospace;font-size:12px;color:#333;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}code{padding:2px 4px;color:#d14;background-color:#f7f7f9;border:1px solid #e1e1e8}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:20px;word-break:break-all;word-wrap:break-word;white-space:pre;white-space:pre-wrap;background-color:#f5f5f5;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}pre.prettyprint{margin-bottom:20px}pre code{padding:0;color:inherit;background-color:transparent;border:0}.pre-scrollable{max-height:340px;overflow-y:scroll}form{margin:0 0 20px}fieldset{padding:0;margin:0;border:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:40px;color:#333;border:0;border-bottom:1px solid #e5e5e5}legend small{font-size:15px;color:#999}label,input,button,select,textarea{font-size:14px;font-weight:normal;line-height:20px}input,button,select,textarea{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif}label{display:block;margin-bottom:5px}select,textarea,input[type="text"],input[type="password"],input[type="datetime"],input[type="datetime-local"],input[type="date"],input[type="month"],input[type="time"],input[type="week"],input[type="number"],input[type="email"],input[type="url"],input[type="search"],input[type="tel"],input[type="color"],.uneditable-input{display:inline-block;height:20px;padding:4px 6px;margin-bottom:9px;font-size:14px;line-height:20px;color:#555;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}input,textarea{width:210px}textarea{height:auto}textarea,input[type="text"],input[type="password"],input[type="datetime"],input[type="datetime-local"],input[type="date"],input[type="month"],input[type="time"],input[type="week"],input[type="number"],input[type="email"],input[type="url"],input[type="search"],input[type="tel"],input[type="color"],.uneditable-input{background-color:#fff;border:1px solid #ccc;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-webkit-transition:border linear .2s,box-shadow linear .2s;-moz-transition:border linear .2s,box-shadow linear .2s;-o-transition:border linear .2s,box-shadow linear .2s;transition:border linear .2s,box-shadow linear .2s}textarea:focus,input[type="text"]:focus,input[type="password"]:focus,input[type="datetime"]:focus,input[type="datetime-local"]:focus,input[type="date"]:focus,input[type="month"]:focus,input[type="time"]:focus,input[type="week"]:focus,input[type="number"]:focus,input[type="email"]:focus,input[type="url"]:focus,input[type="search"]:focus,input[type="tel"]:focus,input[type="color"]:focus,.uneditable-input:focus{border-color:rgba(82,168,236,0.8);outline:0;outline:thin dotted \9;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6)}input[type="radio"],input[type="checkbox"]{margin:4px 0 0;margin-top:1px \9;*margin-top:0;line-height:normal;cursor:pointer}input[type="file"],input[type="image"],input[type="submit"],input[type="reset"],input[type="button"],input[type="radio"],input[type="checkbox"]{width:auto}select,input[type="file"]{height:30px;*margin-top:4px;line-height:30px}select{width:220px;background-color:#fff;border:1px solid #bbb}select[multiple],select[size]{height:auto}select:focus,input[type="file"]:focus,input[type="radio"]:focus,input[type="checkbox"]:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.uneditable-input,.uneditable-textarea{color:#999;cursor:not-allowed;background-color:#fcfcfc;border-color:#ccc;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.025);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.025);box-shadow:inset 0 1px 2px rgba(0,0,0,0.025)}.uneditable-input{overflow:hidden;white-space:nowrap}.uneditable-textarea{width:auto;height:auto}input:-moz-placeholder,textarea:-moz-placeholder{color:#999}input:-ms-input-placeholder,textarea:-ms-input-placeholder{color:#999}input::-webkit-input-placeholder,textarea::-webkit-input-placeholder{color:#999}.radio,.checkbox{min-height:18px;padding-left:18px}.radio input[type="radio"],.checkbox input[type="checkbox"]{float:left;margin-left:-18px}.controls>.radio:first-child,.controls>.checkbox:first-child{padding-top:5px}.radio.inline,.checkbox.inline{display:inline-block;padding-top:5px;margin-bottom:0;vertical-align:middle}.radio.inline+.radio.inline,.checkbox.inline+.checkbox.inline{margin-left:10px}.input-mini{width:60px}.input-small{width:90px}.input-medium{width:150px}.input-large{width:210px}.input-xlarge{width:270px}.input-xxlarge{width:530px}input[class*="span"],select[class*="span"],textarea[class*="span"],.uneditable-input[class*="span"],.row-fluid input[class*="span"],.row-fluid select[class*="span"],.row-fluid textarea[class*="span"],.row-fluid .uneditable-input[class*="span"]{float:none;margin-left:0}.input-append input[class*="span"],.input-append .uneditable-input[class*="span"],.input-prepend input[class*="span"],.input-prepend .uneditable-input[class*="span"],.row-fluid input[class*="span"],.row-fluid select[class*="span"],.row-fluid textarea[class*="span"],.row-fluid .uneditable-input[class*="span"],.row-fluid .input-prepend [class*="span"],.row-fluid .input-append [class*="span"]{display:inline-block}input,textarea,.uneditable-input{margin-left:0}.controls-row [class*="span"]+[class*="span"]{margin-left:20px}input.span12,textarea.span12,.uneditable-input.span12{width:926px}input.span11,textarea.span11,.uneditable-input.span11{width:846px}input.span10,textarea.span10,.uneditable-input.span10{width:766px}input.span9,textarea.span9,.uneditable-input.span9{width:686px}input.span8,textarea.span8,.uneditable-input.span8{width:606px}input.span7,textarea.span7,.uneditable-input.span7{width:526px}input.span6,textarea.span6,.uneditable-input.span6{width:446px}input.span5,textarea.span5,.uneditable-input.span5{width:366px}input.span4,textarea.span4,.uneditable-input.span4{width:286px}input.span3,textarea.span3,.uneditable-input.span3{width:206px}input.span2,textarea.span2,.uneditable-input.span2{width:126px}input.span1,textarea.span1,.uneditable-input.span1{width:46px}.controls-row{*zoom:1}.controls-row:before,.controls-row:after{display:table;line-height:0;content:""}.controls-row:after{clear:both}.controls-row [class*="span"]{float:left}input[disabled],select[disabled],textarea[disabled],input[readonly],select[readonly],textarea[readonly]{cursor:not-allowed;background-color:#eee}input[type="radio"][disabled],input[type="checkbox"][disabled],input[type="radio"][readonly],input[type="checkbox"][readonly]{background-color:transparent}.control-group.warning>label,.control-group.warning .help-block,.control-group.warning .help-inline{color:#c09853}.control-group.warning .checkbox,.control-group.warning .radio,.control-group.warning input,.control-group.warning select,.control-group.warning textarea{color:#c09853;border-color:#c09853;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.warning .checkbox:focus,.control-group.warning .radio:focus,.control-group.warning input:focus,.control-group.warning select:focus,.control-group.warning textarea:focus{border-color:#a47e3c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e}.control-group.warning .input-prepend .add-on,.control-group.warning .input-append .add-on{color:#c09853;background-color:#fcf8e3;border-color:#c09853}.control-group.error>label,.control-group.error .help-block,.control-group.error .help-inline{color:#b94a48}.control-group.error .checkbox,.control-group.error .radio,.control-group.error input,.control-group.error select,.control-group.error textarea{color:#b94a48;border-color:#b94a48;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.error .checkbox:focus,.control-group.error .radio:focus,.control-group.error input:focus,.control-group.error select:focus,.control-group.error textarea:focus{border-color:#953b39;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392}.control-group.error .input-prepend .add-on,.control-group.error .input-append .add-on{color:#b94a48;background-color:#f2dede;border-color:#b94a48}.control-group.success>label,.control-group.success .help-block,.control-group.success .help-inline{color:#468847}.control-group.success .checkbox,.control-group.success .radio,.control-group.success input,.control-group.success select,.control-group.success textarea{color:#468847;border-color:#468847;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.success .checkbox:focus,.control-group.success .radio:focus,.control-group.success input:focus,.control-group.success select:focus,.control-group.success textarea:focus{border-color:#356635;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b}.control-group.success .input-prepend .add-on,.control-group.success .input-append .add-on{color:#468847;background-color:#dff0d8;border-color:#468847}input:focus:required:invalid,textarea:focus:required:invalid,select:focus:required:invalid{color:#b94a48;border-color:#ee5f5b}input:focus:required:invalid:focus,textarea:focus:required:invalid:focus,select:focus:required:invalid:focus{border-color:#e9322d;-webkit-box-shadow:0 0 6px #f8b9b7;-moz-box-shadow:0 0 6px #f8b9b7;box-shadow:0 0 6px #f8b9b7}.form-actions{padding:19px 20px 20px;margin-top:20px;margin-bottom:20px;background-color:#f5f5f5;border-top:1px solid #e5e5e5;*zoom:1}.form-actions:before,.form-actions:after{display:table;line-height:0;content:""}.form-actions:after{clear:both}.help-block,.help-inline{color:#595959}.help-block{display:block;margin-bottom:10px}.help-inline{display:inline-block;*display:inline;padding-left:5px;vertical-align:middle;*zoom:1}.input-append,.input-prepend{margin-bottom:5px;font-size:0;white-space:nowrap}.input-append input,.input-prepend input,.input-append select,.input-prepend select,.input-append .uneditable-input,.input-prepend .uneditable-input{position:relative;margin-bottom:0;*margin-left:0;font-size:14px;vertical-align:top;-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0}.input-append input:focus,.input-prepend input:focus,.input-append select:focus,.input-prepend select:focus,.input-append .uneditable-input:focus,.input-prepend .uneditable-input:focus{z-index:2}.input-append .add-on,.input-prepend .add-on{display:inline-block;width:auto;height:20px;min-width:16px;padding:4px 5px;font-size:14px;font-weight:normal;line-height:20px;text-align:center;text-shadow:0 1px 0 #fff;background-color:#eee;border:1px solid #ccc}.input-append .add-on,.input-prepend .add-on,.input-append .btn,.input-prepend .btn{margin-left:-1px;vertical-align:top;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.input-append .active,.input-prepend .active{background-color:#a9dba9;border-color:#46a546}.input-prepend .add-on,.input-prepend .btn{margin-right:-1px}.input-prepend .add-on:first-child,.input-prepend .btn:first-child{-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px}.input-append input,.input-append select,.input-append .uneditable-input{-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px}.input-append .add-on:last-child,.input-append .btn:last-child{-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0}.input-prepend.input-append input,.input-prepend.input-append select,.input-prepend.input-append .uneditable-input{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.input-prepend.input-append .add-on:first-child,.input-prepend.input-append .btn:first-child{margin-right:-1px;-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px}.input-prepend.input-append .add-on:last-child,.input-prepend.input-append .btn:last-child{margin-left:-1px;-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0}input.search-query{padding-right:14px;padding-right:4px \9;padding-left:14px;padding-left:4px \9;margin-bottom:0;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.form-search .input-append .search-query,.form-search .input-prepend .search-query{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.form-search .input-append .search-query{-webkit-border-radius:14px 0 0 14px;-moz-border-radius:14px 0 0 14px;border-radius:14px 0 0 14px}.form-search .input-append .btn{-webkit-border-radius:0 14px 14px 0;-moz-border-radius:0 14px 14px 0;border-radius:0 14px 14px 0}.form-search .input-prepend .search-query{-webkit-border-radius:0 14px 14px 0;-moz-border-radius:0 14px 14px 0;border-radius:0 14px 14px 0}.form-search .input-prepend .btn{-webkit-border-radius:14px 0 0 14px;-moz-border-radius:14px 0 0 14px;border-radius:14px 0 0 14px}.form-search input,.form-inline input,.form-horizontal input,.form-search textarea,.form-inline textarea,.form-horizontal textarea,.form-search select,.form-inline select,.form-horizontal select,.form-search .help-inline,.form-inline .help-inline,.form-horizontal .help-inline,.form-search .uneditable-input,.form-inline .uneditable-input,.form-horizontal .uneditable-input,.form-search .input-prepend,.form-inline .input-prepend,.form-horizontal .input-prepend,.form-search .input-append,.form-inline .input-append,.form-horizontal .input-append{display:inline-block;*display:inline;margin-bottom:0;vertical-align:middle;*zoom:1}.form-search .hide,.form-inline .hide,.form-horizontal .hide{display:none}.form-search label,.form-inline label,.form-search .btn-group,.form-inline .btn-group{display:inline-block}.form-search .input-append,.form-inline .input-append,.form-search .input-prepend,.form-inline .input-prepend{margin-bottom:0}.form-search .radio,.form-search .checkbox,.form-inline .radio,.form-inline .checkbox{padding-left:0;margin-bottom:0;vertical-align:middle}.form-search .radio input[type="radio"],.form-search .checkbox input[type="checkbox"],.form-inline .radio input[type="radio"],.form-inline .checkbox input[type="checkbox"]{float:left;margin-right:3px;margin-left:0}.control-group{margin-bottom:10px}legend+.control-group{margin-top:20px;-webkit-margin-top-collapse:separate}.form-horizontal .control-group{margin-bottom:20px;*zoom:1}.form-horizontal .control-group:before,.form-horizontal .control-group:after{display:table;line-height:0;content:""}.form-horizontal .control-group:after{clear:both}.form-horizontal .control-label{float:left;width:140px;padding-top:5px;text-align:right}.form-horizontal .controls{*display:inline-block;*padding-left:20px;margin-left:160px;*margin-left:0}.form-horizontal .controls:first-child{*padding-left:160px}.form-horizontal .help-block{margin-top:10px;margin-bottom:0}.form-horizontal .form-actions{padding-left:160px}table{max-width:100%;background-color:transparent;border-collapse:collapse;border-spacing:0}.table{width:100%;margin-bottom:20px}.table th,.table td{padding:8px;line-height:20px;text-align:left;vertical-align:top;border-top:1px solid #ddd}.table th{font-weight:bold}.table thead th{vertical-align:bottom}.table caption+thead tr:first-child th,.table caption+thead tr:first-child td,.table colgroup+thead tr:first-child th,.table colgroup+thead tr:first-child td,.table thead:first-child tr:first-child th,.table thead:first-child tr:first-child td{border-top:0}.table tbody+tbody{border-top:2px solid #ddd}.table-condensed th,.table-condensed td{padding:4px 5px}.table-bordered{border:1px solid #ddd;border-collapse:separate;*border-collapse:collapse;border-left:0;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.table-bordered th,.table-bordered td{border-left:1px solid #ddd}.table-bordered caption+thead tr:first-child th,.table-bordered caption+tbody tr:first-child th,.table-bordered caption+tbody tr:first-child td,.table-bordered colgroup+thead tr:first-child th,.table-bordered colgroup+tbody tr:first-child th,.table-bordered colgroup+tbody tr:first-child td,.table-bordered thead:first-child tr:first-child th,.table-bordered tbody:first-child tr:first-child th,.table-bordered tbody:first-child tr:first-child td{border-top:0}.table-bordered thead:first-child tr:first-child th:first-child,.table-bordered tbody:first-child tr:first-child td:first-child{-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topleft:4px}.table-bordered thead:first-child tr:first-child th:last-child,.table-bordered tbody:first-child tr:first-child td:last-child{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-moz-border-radius-topright:4px}.table-bordered thead:last-child tr:last-child th:first-child,.table-bordered tbody:last-child tr:last-child td:first-child,.table-bordered tfoot:last-child tr:last-child td:first-child{-webkit-border-radius:0 0 0 4px;-moz-border-radius:0 0 0 4px;border-radius:0 0 0 4px;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomleft:4px}.table-bordered thead:last-child tr:last-child th:last-child,.table-bordered tbody:last-child tr:last-child td:last-child,.table-bordered tfoot:last-child tr:last-child td:last-child{-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-moz-border-radius-bottomright:4px}.table-bordered caption+thead tr:first-child th:first-child,.table-bordered caption+tbody tr:first-child td:first-child,.table-bordered colgroup+thead tr:first-child th:first-child,.table-bordered colgroup+tbody tr:first-child td:first-child{-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topleft:4px}.table-bordered caption+thead tr:first-child th:last-child,.table-bordered caption+tbody tr:first-child td:last-child,.table-bordered colgroup+thead tr:first-child th:last-child,.table-bordered colgroup+tbody tr:first-child td:last-child{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-moz-border-right-topleft:4px}.table-striped tbody tr:nth-child(odd) td,.table-striped tbody tr:nth-child(odd) th{background-color:#f9f9f9}.table-hover tbody tr:hover td,.table-hover tbody tr:hover th{background-color:#f5f5f5}table [class*=span],.row-fluid table [class*=span]{display:table-cell;float:none;margin-left:0}table .span1{float:none;width:44px;margin-left:0}table .span2{float:none;width:124px;margin-left:0}table .span3{float:none;width:204px;margin-left:0}table .span4{float:none;width:284px;margin-left:0}table .span5{float:none;width:364px;margin-left:0}table .span6{float:none;width:444px;margin-left:0}table .span7{float:none;width:524px;margin-left:0}table .span8{float:none;width:604px;margin-left:0}table .span9{float:none;width:684px;margin-left:0}table .span10{float:none;width:764px;margin-left:0}table .span11{float:none;width:844px;margin-left:0}table .span12{float:none;width:924px;margin-left:0}table .span13{float:none;width:1004px;margin-left:0}table .span14{float:none;width:1084px;margin-left:0}table .span15{float:none;width:1164px;margin-left:0}table .span16{float:none;width:1244px;margin-left:0}table .span17{float:none;width:1324px;margin-left:0}table .span18{float:none;width:1404px;margin-left:0}table .span19{float:none;width:1484px;margin-left:0}table .span20{float:none;width:1564px;margin-left:0}table .span21{float:none;width:1644px;margin-left:0}table .span22{float:none;width:1724px;margin-left:0}table .span23{float:none;width:1804px;margin-left:0}table .span24{float:none;width:1884px;margin-left:0}.table tbody tr.success td{background-color:#dff0d8}.table tbody tr.error td{background-color:#f2dede}.table tbody tr.info td{background-color:#d9edf7}[class^="icon-"],[class*=" icon-"]{display:inline-block;width:14px;height:14px;margin-top:1px;*margin-right:.3em;line-height:14px;vertical-align:text-top;background-image:url("../img/glyphicons-halflings.png");background-position:14px 14px;background-repeat:no-repeat}.icon-white,.nav>.active>a>[class^="icon-"],.nav>.active>a>[class*=" icon-"],.dropdown-menu>li>a:hover>[class^="icon-"],.dropdown-menu>li>a:hover>[class*=" icon-"],.dropdown-menu>.active>a>[class^="icon-"],.dropdown-menu>.active>a>[class*=" icon-"]{background-image:url("../img/glyphicons-halflings-white.png")}.icon-glass{background-position:0 0}.icon-music{background-position:-24px 0}.icon-search{background-position:-48px 0}.icon-envelope{background-position:-72px 0}.icon-heart{background-position:-96px 0}.icon-star{background-position:-120px 0}.icon-star-empty{background-position:-144px 0}.icon-user{background-position:-168px 0}.icon-film{background-position:-192px 0}.icon-th-large{background-position:-216px 0}.icon-th{background-position:-240px 0}.icon-th-list{background-position:-264px 0}.icon-ok{background-position:-288px 0}.icon-remove{background-position:-312px 0}.icon-zoom-in{background-position:-336px 0}.icon-zoom-out{background-position:-360px 0}.icon-off{background-position:-384px 0}.icon-signal{background-position:-408px 0}.icon-cog{background-position:-432px 0}.icon-trash{background-position:-456px 0}.icon-home{background-position:0 -24px}.icon-file{background-position:-24px -24px}.icon-time{background-position:-48px -24px}.icon-road{background-position:-72px -24px}.icon-download-alt{background-position:-96px -24px}.icon-download{background-position:-120px -24px}.icon-upload{background-position:-144px -24px}.icon-inbox{background-position:-168px -24px}.icon-play-circle{background-position:-192px -24px}.icon-repeat{background-position:-216px -24px}.icon-refresh{background-position:-240px -24px}.icon-list-alt{background-position:-264px -24px}.icon-lock{background-position:-287px -24px}.icon-flag{background-position:-312px -24px}.icon-headphones{background-position:-336px -24px}.icon-volume-off{background-position:-360px -24px}.icon-volume-down{background-position:-384px -24px}.icon-volume-up{background-position:-408px -24px}.icon-qrcode{background-position:-432px -24px}.icon-barcode{background-position:-456px -24px}.icon-tag{background-position:0 -48px}.icon-tags{background-position:-25px -48px}.icon-book{background-position:-48px -48px}.icon-bookmark{background-position:-72px -48px}.icon-print{background-position:-96px -48px}.icon-camera{background-position:-120px -48px}.icon-font{background-position:-144px -48px}.icon-bold{background-position:-167px -48px}.icon-italic{background-position:-192px -48px}.icon-text-height{background-position:-216px -48px}.icon-text-width{background-position:-240px -48px}.icon-align-left{background-position:-264px -48px}.icon-align-center{background-position:-288px -48px}.icon-align-right{background-position:-312px -48px}.icon-align-justify{background-position:-336px -48px}.icon-list{background-position:-360px -48px}.icon-indent-left{background-position:-384px -48px}.icon-indent-right{background-position:-408px -48px}.icon-facetime-video{background-position:-432px -48px}.icon-picture{background-position:-456px -48px}.icon-pencil{background-position:0 -72px}.icon-map-marker{background-position:-24px -72px}.icon-adjust{background-position:-48px -72px}.icon-tint{background-position:-72px -72px}.icon-edit{background-position:-96px -72px}.icon-share{background-position:-120px -72px}.icon-check{background-position:-144px -72px}.icon-move{background-position:-168px -72px}.icon-step-backward{background-position:-192px -72px}.icon-fast-backward{background-position:-216px -72px}.icon-backward{background-position:-240px -72px}.icon-play{background-position:-264px -72px}.icon-pause{background-position:-288px -72px}.icon-stop{background-position:-312px -72px}.icon-forward{background-position:-336px -72px}.icon-fast-forward{background-position:-360px -72px}.icon-step-forward{background-position:-384px -72px}.icon-eject{background-position:-408px -72px}.icon-chevron-left{background-position:-432px -72px}.icon-chevron-right{background-position:-456px -72px}.icon-plus-sign{background-position:0 -96px}.icon-minus-sign{background-position:-24px -96px}.icon-remove-sign{background-position:-48px -96px}.icon-ok-sign{background-position:-72px -96px}.icon-question-sign{background-position:-96px -96px}.icon-info-sign{background-position:-120px -96px}.icon-screenshot{background-position:-144px -96px}.icon-remove-circle{background-position:-168px -96px}.icon-ok-circle{background-position:-192px -96px}.icon-ban-circle{background-position:-216px -96px}.icon-arrow-left{background-position:-240px -96px}.icon-arrow-right{background-position:-264px -96px}.icon-arrow-up{background-position:-289px -96px}.icon-arrow-down{background-position:-312px -96px}.icon-share-alt{background-position:-336px -96px}.icon-resize-full{background-position:-360px -96px}.icon-resize-small{background-position:-384px -96px}.icon-plus{background-position:-408px -96px}.icon-minus{background-position:-433px -96px}.icon-asterisk{background-position:-456px -96px}.icon-exclamation-sign{background-position:0 -120px}.icon-gift{background-position:-24px -120px}.icon-leaf{background-position:-48px -120px}.icon-fire{background-position:-72px -120px}.icon-eye-open{background-position:-96px -120px}.icon-eye-close{background-position:-120px -120px}.icon-warning-sign{background-position:-144px -120px}.icon-plane{background-position:-168px -120px}.icon-calendar{background-position:-192px -120px}.icon-random{width:16px;background-position:-216px -120px}.icon-comment{background-position:-240px -120px}.icon-magnet{background-position:-264px -120px}.icon-chevron-up{background-position:-288px -120px}.icon-chevron-down{background-position:-313px -119px}.icon-retweet{background-position:-336px -120px}.icon-shopping-cart{background-position:-360px -120px}.icon-folder-close{background-position:-384px -120px}.icon-folder-open{width:16px;background-position:-408px -120px}.icon-resize-vertical{background-position:-432px -119px}.icon-resize-horizontal{background-position:-456px -118px}.icon-hdd{background-position:0 -144px}.icon-bullhorn{background-position:-24px -144px}.icon-bell{background-position:-48px -144px}.icon-certificate{background-position:-72px -144px}.icon-thumbs-up{background-position:-96px -144px}.icon-thumbs-down{background-position:-120px -144px}.icon-hand-right{background-position:-144px -144px}.icon-hand-left{background-position:-168px -144px}.icon-hand-up{background-position:-192px -144px}.icon-hand-down{background-position:-216px -144px}.icon-circle-arrow-right{background-position:-240px -144px}.icon-circle-arrow-left{background-position:-264px -144px}.icon-circle-arrow-up{background-position:-288px -144px}.icon-circle-arrow-down{background-position:-312px -144px}.icon-globe{background-position:-336px -144px}.icon-wrench{background-position:-360px -144px}.icon-tasks{background-position:-384px -144px}.icon-filter{background-position:-408px -144px}.icon-briefcase{background-position:-432px -144px}.icon-fullscreen{background-position:-456px -144px}.dropup,.dropdown{position:relative}.dropdown-toggle{*margin-bottom:-3px}.dropdown-toggle:active,.open .dropdown-toggle{outline:0}.caret{display:inline-block;width:0;height:0;vertical-align:top;border-top:4px solid #000;border-right:4px solid transparent;border-left:4px solid transparent;content:""}.dropdown .caret{margin-top:8px;margin-left:2px}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;list-style:none;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);*border-right-width:2px;*border-bottom-width:2px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);-moz-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2);-webkit-background-clip:padding-box;-moz-background-clip:padding;background-clip:padding-box}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{*width:100%;height:1px;margin:9px 1px;*margin:-5px 0 5px;overflow:hidden;background-color:#e5e5e5;border-bottom:1px solid #fff}.dropdown-menu a{display:block;padding:3px 20px;clear:both;font-weight:normal;line-height:20px;color:#333;white-space:nowrap}.dropdown-menu li>a:hover,.dropdown-menu li>a:focus,.dropdown-submenu:hover>a{color:#fff;text-decoration:none;background-color:#0088cc;background-color:#0088cc;background-image:-moz-linear-gradient(top,#0088cc,#0087b3);background-image:-webkit-gradient(linear,0 0,0 100%,from(#0088cc),to(#0087b3));background-image:-webkit-linear-gradient(top,#0088cc,#0087b3);background-image:-o-linear-gradient(top,#0088cc,#0087b3);background-image:linear-gradient(to bottom,#0088cc,#0087b3);background-repeat:repeat-x;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0087b3',GradientType=0)}.dropdown-menu .active>a,.dropdown-menu .active>a:hover{color:#fff;text-decoration:none;background-color:#0088cc;background-color:#0081c2;background-image:linear-gradient(to bottom,#0088cc,#0087b3);background-image:-moz-linear-gradient(top,#0088cc,#0087b3);background-image:-webkit-gradient(linear,0 0,0 100%,from(#0088cc),to(#0087b3));background-image:-webkit-linear-gradient(top,#0088cc,#0087b3);background-image:-o-linear-gradient(top,#0088cc,#0087b3);background-repeat:repeat-x;outline:0;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0087b3',GradientType=0)}.dropdown-menu .disabled>a,.dropdown-menu .disabled>a:hover{color:#999}.dropdown-menu .disabled>a:hover{text-decoration:none;cursor:default;background-color:transparent}.open{*z-index:1000}.open>.dropdown-menu{display:block}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{border-top:0;border-bottom:4px solid #000;content:"\2191"}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:1px}.dropdown-submenu{position:relative}.dropdown-submenu>.dropdown-menu{top:0;left:100%;margin-top:-6px;margin-left:-1px;-webkit-border-radius:0 6px 6px 6px;-moz-border-radius:0 6px 6px 6px;border-radius:0 6px 6px 6px}.dropdown-submenu:hover .dropdown-menu{display:block}.dropdown-submenu>a:after{display:block;float:right;width:0;height:0;margin-top:5px;margin-right:-10px;border-color:transparent;border-left-color:#ccc;border-style:solid;border-width:5px 0 5px 5px;content:" "}.dropdown-submenu:hover>a:after{border-left-color:#fff}.dropdown .dropdown-menu .nav-header{padding-right:20px;padding-left:20px}.typeahead{margin-top:2px;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.05);box-shadow:inset 0 1px 1px rgba(0,0,0,0.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,0.15)}.well-large{padding:24px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.well-small{padding:9px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.fade{opacity:0;-webkit-transition:opacity .15s linear;-moz-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{position:relative;height:0;overflow:hidden;overflow:visible \9;-webkit-transition:height .35s ease;-moz-transition:height .35s ease;-o-transition:height .35s ease;transition:height .35s ease}.collapse.in{height:auto}.close{float:right;font-size:20px;font-weight:bold;line-height:20px;color:#000;text-shadow:0 1px 0 #fff;opacity:.2;filter:alpha(opacity=20)}.close:hover{color:#000;text-decoration:none;cursor:pointer;opacity:.4;filter:alpha(opacity=40)}button.close{padding:0;cursor:pointer;background:transparent;border:0;-webkit-appearance:none}.btn{display:inline-block;*display:inline;padding:4px 14px;margin-bottom:0;*margin-left:.3em;font-size:14px;line-height:20px;*line-height:20px;color:#333;text-align:center;text-shadow:0 1px 1px rgba(255,255,255,0.75);vertical-align:middle;cursor:pointer;background-color:#f5f5f5;*background-color:#e6e6e6;background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#e6e6e6));background-image:-webkit-linear-gradient(top,#fff,#e6e6e6);background-image:-o-linear-gradient(top,#fff,#e6e6e6);background-image:linear-gradient(to bottom,#fff,#e6e6e6);background-image:-moz-linear-gradient(top,#fff,#e6e6e6);background-repeat:repeat-x;border:1px solid #bbb;*border:0;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);border-color:#e6e6e6 #e6e6e6 #bfbfbf;border-bottom-color:#a2a2a2;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ffffffff',endColorstr='#ffe6e6e6',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false);*zoom:1;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05)}.btn:hover,.btn:active,.btn.active,.btn.disabled,.btn[disabled]{color:#333;background-color:#e6e6e6;*background-color:#d9d9d9}.btn:active,.btn.active{background-color:#ccc \9}.btn:first-child{*margin-left:0}.btn:hover{color:#333;text-decoration:none;background-color:#e6e6e6;*background-color:#d9d9d9;background-position:0 -15px;-webkit-transition:background-position .1s linear;-moz-transition:background-position .1s linear;-o-transition:background-position .1s linear;transition:background-position .1s linear}.btn:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn.active,.btn:active{background-color:#e6e6e6;background-color:#d9d9d9 \9;background-image:none;outline:0;-webkit-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05)}.btn.disabled,.btn[disabled]{cursor:default;background-color:#e6e6e6;background-image:none;opacity:.65;filter:alpha(opacity=65);-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none}.btn-large{padding:9px 14px;font-size:16px;line-height:normal;-webkit-border-radius:5px;-moz-border-radius:5px;border-radius:5px}.btn-large [class^="icon-"]{margin-top:2px}.btn-small{padding:3px 9px;font-size:12px;line-height:18px}.btn-small [class^="icon-"]{margin-top:0}.btn-mini{padding:2px 6px;font-size:11px;line-height:16px}.btn-block{display:block;width:100%;padding-right:0;padding-left:0;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.btn-block+.btn-block{margin-top:5px}.btn-primary.active,.btn-warning.active,.btn-danger.active,.btn-success.active,.btn-info.active,.btn-inverse.active{color:rgba(255,255,255,0.75)}.btn{border-color:#c5c5c5;border-color:rgba(0,0,0,0.15) rgba(0,0,0,0.15) rgba(0,0,0,0.25)}.btn-primary{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#006dcc;*background-color:#04c;background-image:-webkit-gradient(linear,0 0,0 100%,from(#08c),to(#04c));background-image:-webkit-linear-gradient(top,#08c,#04c);background-image:-o-linear-gradient(top,#08c,#04c);background-image:linear-gradient(to bottom,#08c,#04c);background-image:-moz-linear-gradient(top,#08c,#04c);background-repeat:repeat-x;border-color:#04c #04c #002a80;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0044cc',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false)}.btn-primary:hover,.btn-primary:active,.btn-primary.active,.btn-primary.disabled,.btn-primary[disabled]{color:#fff;background-color:#04c;*background-color:#003bb3}.btn-primary:active,.btn-primary.active{background-color:#039 \9}.btn-warning{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#faa732;*background-color:#f89406;background-image:-webkit-gradient(linear,0 0,0 100%,from(#fbb450),to(#f89406));background-image:-webkit-linear-gradient(top,#fbb450,#f89406);background-image:-o-linear-gradient(top,#fbb450,#f89406);background-image:linear-gradient(to bottom,#fbb450,#f89406);background-image:-moz-linear-gradient(top,#fbb450,#f89406);background-repeat:repeat-x;border-color:#f89406 #f89406 #ad6704;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#fffbb450',endColorstr='#fff89406',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false)}.btn-warning:hover,.btn-warning:active,.btn-warning.active,.btn-warning.disabled,.btn-warning[disabled]{color:#fff;background-color:#f89406;*background-color:#df8505}.btn-warning:active,.btn-warning.active{background-color:#c67605 \9}.btn-danger{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#da4f49;*background-color:#bd362f;background-image:-webkit-gradient(linear,0 0,0 100%,from(#ee5f5b),to(#bd362f));background-image:-webkit-linear-gradient(top,#ee5f5b,#bd362f);background-image:-o-linear-gradient(top,#ee5f5b,#bd362f);background-image:linear-gradient(to bottom,#ee5f5b,#bd362f);background-image:-moz-linear-gradient(top,#ee5f5b,#bd362f);background-repeat:repeat-x;border-color:#bd362f #bd362f #802420;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ffee5f5b',endColorstr='#ffbd362f',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false)}.btn-danger:hover,.btn-danger:active,.btn-danger.active,.btn-danger.disabled,.btn-danger[disabled]{color:#fff;background-color:#bd362f;*background-color:#a9302a}.btn-danger:active,.btn-danger.active{background-color:#942a25 \9}.btn-success{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#5bb75b;*background-color:#51a351;background-image:-webkit-gradient(linear,0 0,0 100%,from(#62c462),to(#51a351));background-image:-webkit-linear-gradient(top,#62c462,#51a351);background-image:-o-linear-gradient(top,#62c462,#51a351);background-image:linear-gradient(to bottom,#62c462,#51a351);background-image:-moz-linear-gradient(top,#62c462,#51a351);background-repeat:repeat-x;border-color:#51a351 #51a351 #387038;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff62c462',endColorstr='#ff51a351',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false)}.btn-success:hover,.btn-success:active,.btn-success.active,.btn-success.disabled,.btn-success[disabled]{color:#fff;background-color:#51a351;*background-color:#499249}.btn-success:active,.btn-success.active{background-color:#408140 \9}.btn-info{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#49afcd;*background-color:#2f96b4;background-image:-webkit-gradient(linear,0 0,0 100%,from(#5bc0de),to(#2f96b4));background-image:-webkit-linear-gradient(top,#5bc0de,#2f96b4);background-image:-o-linear-gradient(top,#5bc0de,#2f96b4);background-image:linear-gradient(to bottom,#5bc0de,#2f96b4);background-image:-moz-linear-gradient(top,#5bc0de,#2f96b4);background-repeat:repeat-x;border-color:#2f96b4 #2f96b4 #1f6377;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff5bc0de',endColorstr='#ff2f96b4',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false)}.btn-info:hover,.btn-info:active,.btn-info.active,.btn-info.disabled,.btn-info[disabled]{color:#fff;background-color:#2f96b4;*background-color:#2a85a0}.btn-info:active,.btn-info.active{background-color:#24748c \9}.btn-inverse{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#363636;*background-color:#222;background-image:-webkit-gradient(linear,0 0,0 100%,from(#444),to(#222));background-image:-webkit-linear-gradient(top,#444,#222);background-image:-o-linear-gradient(top,#444,#222);background-image:linear-gradient(to bottom,#444,#222);background-image:-moz-linear-gradient(top,#444,#222);background-repeat:repeat-x;border-color:#222 #222 #000;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff444444',endColorstr='#ff222222',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false)}.btn-inverse:hover,.btn-inverse:active,.btn-inverse.active,.btn-inverse.disabled,.btn-inverse[disabled]{color:#fff;background-color:#222;*background-color:#151515}.btn-inverse:active,.btn-inverse.active{background-color:#080808 \9}button.btn,input[type="submit"].btn{*padding-top:3px;*padding-bottom:3px}button.btn::-moz-focus-inner,input[type="submit"].btn::-moz-focus-inner{padding:0;border:0}button.btn.btn-large,input[type="submit"].btn.btn-large{*padding-top:7px;*padding-bottom:7px}button.btn.btn-small,input[type="submit"].btn.btn-small{*padding-top:3px;*padding-bottom:3px}button.btn.btn-mini,input[type="submit"].btn.btn-mini{*padding-top:1px;*padding-bottom:1px}.btn-link,.btn-link:active{background-color:transparent;background-image:none;-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none}.btn-link{color:#08c;cursor:pointer;border-color:transparent;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-link:hover{color:#005580;text-decoration:underline;background-color:transparent}.btn-group{position:relative;*margin-left:.3em;font-size:0;white-space:nowrap}.btn-group:first-child{*margin-left:0}.btn-group+.btn-group{margin-left:5px}.btn-toolbar{margin-top:10px;margin-bottom:10px;font-size:0}.btn-toolbar .btn-group{display:inline-block;*display:inline;*zoom:1}.btn-toolbar .btn+.btn,.btn-toolbar .btn-group+.btn,.btn-toolbar .btn+.btn-group{margin-left:5px}.btn-group>.btn{position:relative;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-group>.btn+.btn{margin-left:-1px}.btn-group>.btn,.btn-group>.dropdown-menu{font-size:14px}.btn-group>.btn-mini{font-size:11px}.btn-group>.btn-small{font-size:12px}.btn-group>.btn-large{font-size:16px}.btn-group>.btn:first-child{margin-left:0;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-bottomleft:4px;-moz-border-radius-topleft:4px}.btn-group>.btn:last-child,.btn-group>.dropdown-toggle{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-moz-border-radius-topright:4px;-moz-border-radius-bottomright:4px}.btn-group>.btn.large:first-child{margin-left:0;-webkit-border-bottom-left-radius:6px;border-bottom-left-radius:6px;-webkit-border-top-left-radius:6px;border-top-left-radius:6px;-moz-border-radius-bottomleft:6px;-moz-border-radius-topleft:6px}.btn-group>.btn.large:last-child,.btn-group>.large.dropdown-toggle{-webkit-border-top-right-radius:6px;border-top-right-radius:6px;-webkit-border-bottom-right-radius:6px;border-bottom-right-radius:6px;-moz-border-radius-topright:6px;-moz-border-radius-bottomright:6px}.btn-group>.btn:hover,.btn-group>.btn:focus,.btn-group>.btn:active,.btn-group>.btn.active{z-index:2}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{*padding-top:5px;padding-right:8px;*padding-bottom:5px;padding-left:8px;-webkit-box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05)}.btn-group>.btn-mini+.dropdown-toggle{*padding-top:2px;padding-right:5px;*padding-bottom:2px;padding-left:5px}.btn-group>.btn-small+.dropdown-toggle{*padding-top:5px;*padding-bottom:4px}.btn-group>.btn-large+.dropdown-toggle{*padding-top:7px;padding-right:12px;*padding-bottom:7px;padding-left:12px}.btn-group.open .dropdown-toggle{background-image:none;-webkit-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05)}.btn-group.open .btn.dropdown-toggle{background-color:#e6e6e6}.btn-group.open .btn-primary.dropdown-toggle{background-color:#04c}.btn-group.open .btn-warning.dropdown-toggle{background-color:#f89406}.btn-group.open .btn-danger.dropdown-toggle{background-color:#bd362f}.btn-group.open .btn-success.dropdown-toggle{background-color:#51a351}.btn-group.open .btn-info.dropdown-toggle{background-color:#2f96b4}.btn-group.open .btn-inverse.dropdown-toggle{background-color:#222}.btn .caret{margin-top:8px;margin-left:0}.btn-mini .caret,.btn-small .caret,.btn-large .caret{margin-top:6px}.btn-large .caret{border-top-width:5px;border-right-width:5px;border-left-width:5px}.dropup .btn-large .caret{border-top:0;border-bottom:5px solid #000}.btn-primary .caret,.btn-warning .caret,.btn-danger .caret,.btn-info .caret,.btn-success .caret,.btn-inverse .caret{border-top-color:#fff;border-bottom-color:#fff}.btn-group-vertical{display:inline-block;*display:inline;*zoom:1}.btn-group-vertical .btn{display:block;float:none;width:100%;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-group-vertical .btn+.btn{margin-top:-1px;margin-left:0}.btn-group-vertical .btn:first-child{-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0}.btn-group-vertical .btn:last-child{-webkit-border-radius:0 0 4px 4px;-moz-border-radius:0 0 4px 4px;border-radius:0 0 4px 4px}.btn-group-vertical .btn-large:first-child{-webkit-border-radius:6px 6px 0 0;-moz-border-radius:6px 6px 0 0;border-radius:6px 6px 0 0}.btn-group-vertical .btn-large:last-child{-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px}.alert{padding:8px 35px 8px 14px;margin-bottom:20px;color:#c09853;text-shadow:0 1px 0 rgba(255,255,255,0.5);background-color:#fcf8e3;border:1px solid #fbeed5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.alert h4{margin:0}.alert .close{position:relative;top:-2px;right:-21px;line-height:20px}.alert-success{color:#468847;background-color:#dff0d8;border-color:#d6e9c6}.alert-danger,.alert-error{color:#b94a48;background-color:#f2dede;border-color:#eed3d7}.alert-info{color:#3a87ad;background-color:#d9edf7;border-color:#bce8f1}.alert-block{padding-top:14px;padding-bottom:14px}.alert-block>p,.alert-block>ul{margin-bottom:0}.alert-block p+p{margin-top:5px}.nav{margin-bottom:20px;margin-left:0;list-style:none}.nav>li>a{display:block}.nav>li>a:hover{text-decoration:none;background-color:#eee}.nav>.pull-right{float:right}.nav-header{display:block;padding:3px 15px;font-size:11px;font-weight:bold;line-height:20px;color:#999;text-shadow:0 1px 0 rgba(255,255,255,0.5);text-transform:uppercase}.nav li+.nav-header{margin-top:9px}.nav-list{padding-right:15px;padding-left:15px;margin-bottom:0}.nav-list>li>a,.nav-list .nav-header{margin-right:-15px;margin-left:-15px;text-shadow:0 1px 0 rgba(255,255,255,0.5)}.nav-list>li>a{padding:3px 15px}.nav-list>.active>a,.nav-list>.active>a:hover{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.2);background-color:#08c}.nav-list [class^="icon-"]{margin-right:2px}.nav-list .divider{*width:100%;height:1px;margin:9px 1px;*margin:-5px 0 5px;overflow:hidden;background-color:#e5e5e5;border-bottom:1px solid #fff}.nav-tabs,.nav-pills{*zoom:1}.nav-tabs:before,.nav-pills:before,.nav-tabs:after,.nav-pills:after{display:table;line-height:0;content:""}.nav-tabs:after,.nav-pills:after{clear:both}.nav-tabs>li,.nav-pills>li{float:left}.nav-tabs>li>a,.nav-pills>li>a{padding-right:12px;padding-left:12px;margin-right:2px;line-height:14px}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{margin-bottom:-1px}.nav-tabs>li>a{padding-top:8px;padding-bottom:8px;line-height:20px;border:1px solid transparent;-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover{border-color:#eee #eee #ddd}.nav-tabs>.active>a,.nav-tabs>.active>a:hover{color:#555;cursor:default;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent}.nav-pills>li>a{padding-top:8px;padding-bottom:8px;margin-top:2px;margin-bottom:2px;-webkit-border-radius:5px;-moz-border-radius:5px;border-radius:5px}.nav-pills>.active>a,.nav-pills>.active>a:hover{color:#fff;background-color:#08c}.nav-stacked>li{float:none}.nav-stacked>li>a{margin-right:0}.nav-tabs.nav-stacked{border-bottom:0}.nav-tabs.nav-stacked>li>a{border:1px solid #ddd;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.nav-tabs.nav-stacked>li:first-child>a{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topright:4px;-moz-border-radius-topleft:4px}.nav-tabs.nav-stacked>li:last-child>a{-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomright:4px;-moz-border-radius-bottomleft:4px}.nav-tabs.nav-stacked>li>a:hover{z-index:2;border-color:#ddd}.nav-pills.nav-stacked>li>a{margin-bottom:3px}.nav-pills.nav-stacked>li:last-child>a{margin-bottom:1px}.nav-tabs .dropdown-menu{-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px}.nav-pills .dropdown-menu{-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.nav .dropdown-toggle .caret{margin-top:6px;border-top-color:#08c;border-bottom-color:#08c}.nav .dropdown-toggle:hover .caret{border-top-color:#005580;border-bottom-color:#005580}.nav-tabs .dropdown-toggle .caret{margin-top:8px}.nav .active .dropdown-toggle .caret{border-top-color:#fff;border-bottom-color:#fff}.nav-tabs .active .dropdown-toggle .caret{border-top-color:#555;border-bottom-color:#555}.nav>.dropdown.active>a:hover{cursor:pointer}.nav-tabs .open .dropdown-toggle,.nav-pills .open .dropdown-toggle,.nav>li.dropdown.open.active>a:hover{color:#fff;background-color:#999;border-color:#999}.nav li.dropdown.open .caret,.nav li.dropdown.open.active .caret,.nav li.dropdown.open a:hover .caret{border-top-color:#fff;border-bottom-color:#fff;opacity:1;filter:alpha(opacity=100)}.tabs-stacked .open>a:hover{border-color:#999}.tabbable{*zoom:1}.tabbable:before,.tabbable:after{display:table;line-height:0;content:""}.tabbable:after{clear:both}.tab-content{overflow:auto}.tabs-below>.nav-tabs,.tabs-right>.nav-tabs,.tabs-left>.nav-tabs{border-bottom:0}.tab-content>.tab-pane,.pill-content>.pill-pane{display:none}.tab-content>.active,.pill-content>.active{display:block}.tabs-below>.nav-tabs{border-top:1px solid #ddd}.tabs-below>.nav-tabs>li{margin-top:-1px;margin-bottom:0}.tabs-below>.nav-tabs>li>a{-webkit-border-radius:0 0 4px 4px;-moz-border-radius:0 0 4px 4px;border-radius:0 0 4px 4px}.tabs-below>.nav-tabs>li>a:hover{border-top-color:#ddd;border-bottom-color:transparent}.tabs-below>.nav-tabs>.active>a,.tabs-below>.nav-tabs>.active>a:hover{border-color:transparent #ddd #ddd #ddd}.tabs-left>.nav-tabs>li,.tabs-right>.nav-tabs>li{float:none}.tabs-left>.nav-tabs>li>a,.tabs-right>.nav-tabs>li>a{min-width:74px;margin-right:0;margin-bottom:3px}.tabs-left>.nav-tabs{float:left;margin-right:19px;border-right:1px solid #ddd}.tabs-left>.nav-tabs>li>a{margin-right:-1px;-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px}.tabs-left>.nav-tabs>li>a:hover{border-color:#eee #ddd #eee #eee}.tabs-left>.nav-tabs .active>a,.tabs-left>.nav-tabs .active>a:hover{border-color:#ddd transparent #ddd #ddd;*border-right-color:#fff}.tabs-right>.nav-tabs{float:right;margin-left:19px;border-left:1px solid #ddd}.tabs-right>.nav-tabs>li>a{margin-left:-1px;-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.tabs-right>.nav-tabs>li>a:hover{border-color:#eee #eee #eee #ddd}.tabs-right>.nav-tabs .active>a,.tabs-right>.nav-tabs .active>a:hover{border-color:#ddd #ddd #ddd transparent;*border-left-color:#fff}.nav>.disabled>a{color:#999}.nav>.disabled>a:hover{text-decoration:none;cursor:default;background-color:transparent}.navbar{*position:relative;*z-index:2;margin-bottom:20px;overflow:visible;color:#555}.navbar-inner{min-height:40px;padding-right:20px;padding-left:20px;background-color:#fafafa;background-image:-moz-linear-gradient(top,#fff,#d6f2e2);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#d6f2e2));background-image:-webkit-linear-gradient(top,#fff,#d2e2f2);background-image:-o-linear-gradient(top,#fff,#d2e2f2);background-image:linear-gradient(to bottom,#fff,#d2e2f2);background-repeat:repeat-x;border:1px solid #d4d4d4;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ffffffff',endColorstr='#ffd2e2f2',GradientType=0);-webkit-box-shadow:0 1px 4px rgba(0,0,0,0.065);-moz-box-shadow:0 1px 4px rgba(0,0,0,0.065);box-shadow:0 1px 4px rgba(0,0,0,0.065)}.navbar .container{width:auto}.nav-collapse.collapse{height:auto}.navbar .brand{display:block;float:left;padding:10px 20px 10px;margin-left:-20px;font-size:20px;font-weight:200;color:#555;text-shadow:0 1px 0 #fff}.navbar .brand:hover{text-decoration:none}.navbar-text{margin-bottom:0;line-height:40px}.navbar-link{color:#555}.navbar-link:hover{color:#333}.navbar .divider-vertical{height:40px;margin:0 9px;border-right:1px solid #fff;border-left:1px solid #f2f2f2}.navbar .btn,.navbar .btn-group{margin-top:6px}.navbar .btn-group .btn{margin:0}.navbar-form{margin-bottom:0;*zoom:1}.navbar-form:before,.navbar-form:after{display:table;line-height:0;content:""}.navbar-form:after{clear:both}.navbar-form input,.navbar-form select,.navbar-form .radio,.navbar-form .checkbox{margin-top:5px}.navbar-form input,.navbar-form select,.navbar-form .btn{display:inline-block;margin-bottom:0}.navbar-form input[type="image"],.navbar-form input[type="checkbox"],.navbar-form input[type="radio"]{margin-top:3px}.navbar-form .input-append,.navbar-form .input-prepend{margin-top:6px;white-space:nowrap}.navbar-form .input-append input,.navbar-form .input-prepend input{margin-top:0}.navbar-search{position:relative;float:left;margin-top:5px;margin-bottom:0}.navbar-search .search-query{padding:4px 14px;margin-bottom:0;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:13px;font-weight:normal;line-height:1;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.navbar-static-top{position:static;width:100%;margin-bottom:0}.navbar-static-top .navbar-inner{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.navbar-fixed-top,.navbar-fixed-bottom{position:fixed;right:0;left:0;z-index:1030;margin-bottom:0}.navbar-fixed-top .navbar-inner,.navbar-fixed-bottom .navbar-inner,.navbar-static-top .navbar-inner{border:0}.navbar-fixed-top .navbar-inner,.navbar-fixed-bottom .navbar-inner{padding-right:0;padding-left:0;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:940px}.navbar-fixed-top{top:0}.navbar-fixed-top .navbar-inner,.navbar-static-top .navbar-inner{-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.1),0 1px 10px rgba(0,0,0,0.1);-moz-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.1),0 1px 10px rgba(0,0,0,0.1);box-shadow:inset 0 -1px 0 rgba(0,0,0,0.1),0 1px 10px rgba(0,0,0,0.1)}.navbar-fixed-bottom{bottom:0}.navbar-fixed-bottom .navbar-inner{-webkit-box-shadow:inset 0 1px 0 rgba(0,0,0,0.1),0 -1px 10px rgba(0,0,0,0.1);-moz-box-shadow:inset 0 1px 0 rgba(0,0,0,0.1),0 -1px 10px rgba(0,0,0,0.1);box-shadow:inset 0 1px 0 rgba(0,0,0,0.1),0 -1px 10px rgba(0,0,0,0.1)}.navbar .nav{position:relative;left:0;display:block;float:left;margin:0 10px 0 0}.navbar .nav.pull-right{float:right}.navbar .nav>li{float:left}.navbar .nav>li>a{float:none;padding:10px 15px 10px;color:#555;text-decoration:none;text-shadow:0 1px 0 #fff}.navbar .nav .dropdown-toggle .caret{margin-top:8px}.navbar .nav>li>a:focus,.navbar .nav>li>a:hover{color:#333;text-decoration:none;background-color:transparent}.navbar .nav>.active>a,.navbar .nav>.active>a:hover,.navbar .nav>.active>a:focus{color:#555;text-decoration:none;background-color:#e5e5e5;-webkit-box-shadow:inset 0 3px 8px rgba(0,0,0,0.125);-moz-box-shadow:inset 0 3px 8px rgba(0,0,0,0.125);box-shadow:inset 0 3px 8px rgba(0,0,0,0.125)}.navbar .btn-navbar{display:none;float:right;padding:7px 10px;margin-right:5px;margin-left:5px;color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#ededed;*background-color:#e5e5e5;background-image:-webkit-gradient(linear,0 0,0 100%,from(#f2f2f2),to(#e5e5e5));background-image:-webkit-linear-gradient(top,#f2f2f2,#e5e5e5);background-image:-o-linear-gradient(top,#f2f2f2,#e5e5e5);background-image:linear-gradient(to bottom,#f2f2f2,#e5e5e5);background-image:-moz-linear-gradient(top,#f2f2f2,#e5e5e5);background-repeat:repeat-x;border-color:#e5e5e5 #e5e5e5 #bfbfbf;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#fff2f2f2',endColorstr='#ffe5e5e5',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false);-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075);-moz-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075);box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075)}.navbar .btn-navbar:hover,.navbar .btn-navbar:active,.navbar .btn-navbar.active,.navbar .btn-navbar.disabled,.navbar .btn-navbar[disabled]{color:#fff;background-color:#e5e5e5;*background-color:#d9d9d9}.navbar .btn-navbar:active,.navbar .btn-navbar.active{background-color:#ccc \9}.navbar .btn-navbar .icon-bar{display:block;width:18px;height:2px;background-color:#f5f5f5;-webkit-border-radius:1px;-moz-border-radius:1px;border-radius:1px;-webkit-box-shadow:0 1px 0 rgba(0,0,0,0.25);-moz-box-shadow:0 1px 0 rgba(0,0,0,0.25);box-shadow:0 1px 0 rgba(0,0,0,0.25)}.btn-navbar .icon-bar+.icon-bar{margin-top:3px}.navbar .nav>li>.dropdown-menu:before{position:absolute;top:-7px;left:9px;display:inline-block;border-right:7px solid transparent;border-bottom:7px solid #ccc;border-left:7px solid transparent;border-bottom-color:rgba(0,0,0,0.2);content:''}.navbar .nav>li>.dropdown-menu:after{position:absolute;top:-6px;left:10px;display:inline-block;border-right:6px solid transparent;border-bottom:6px solid #fff;border-left:6px solid transparent;content:''}.navbar-fixed-bottom .nav>li>.dropdown-menu:before{top:auto;bottom:-7px;border-top:7px solid #ccc;border-bottom:0;border-top-color:rgba(0,0,0,0.2)}.navbar-fixed-bottom .nav>li>.dropdown-menu:after{top:auto;bottom:-6px;border-top:6px solid #fff;border-bottom:0}.navbar .nav li.dropdown.open>.dropdown-toggle,.navbar .nav li.dropdown.active>.dropdown-toggle,.navbar .nav li.dropdown.open.active>.dropdown-toggle{color:#555;background-color:#e5e5e5}.navbar .nav li.dropdown>.dropdown-toggle .caret{border-top-color:#555;border-bottom-color:#555}.navbar .nav li.dropdown.open>.dropdown-toggle .caret,.navbar .nav li.dropdown.active>.dropdown-toggle .caret,.navbar .nav li.dropdown.open.active>.dropdown-toggle .caret{border-top-color:#555;border-bottom-color:#555}.navbar .pull-right>li>.dropdown-menu,.navbar .nav>li>.dropdown-menu.pull-right{right:0;left:auto}.navbar .pull-right>li>.dropdown-menu:before,.navbar .nav>li>.dropdown-menu.pull-right:before{right:12px;left:auto}.navbar .pull-right>li>.dropdown-menu:after,.navbar .nav>li>.dropdown-menu.pull-right:after{right:13px;left:auto}.navbar .pull-right>li>.dropdown-menu .dropdown-menu,.navbar .nav>li>.dropdown-menu.pull-right .dropdown-menu{right:100%;left:auto;margin-right:-1px;margin-left:0;-webkit-border-radius:6px 0 6px 6px;-moz-border-radius:6px 0 6px 6px;border-radius:6px 0 6px 6px}.navbar-inverse{color:#999}.navbar-inverse .navbar-inner{background-color:#1b1b1b;background-image:-moz-linear-gradient(top,#222,#111);background-image:-webkit-gradient(linear,0 0,0 100%,from(#222),to(#111));background-image:-webkit-linear-gradient(top,#222,#111);background-image:-o-linear-gradient(top,#222,#111);background-image:linear-gradient(to bottom,#222,#111);background-repeat:repeat-x;border-color:#252525;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff222222',endColorstr='#ff111111',GradientType=0)}.navbar-inverse .brand,.navbar-inverse .nav>li>a{color:#999;text-shadow:0 -1px 0 rgba(0,0,0,0.25)}.navbar-inverse .brand:hover,.navbar-inverse .nav>li>a:hover{color:#fff}.navbar-inverse .nav>li>a:focus,.navbar-inverse .nav>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .nav .active>a,.navbar-inverse .nav .active>a:hover,.navbar-inverse .nav .active>a:focus{color:#fff;background-color:#111}.navbar-inverse .navbar-link{color:#999}.navbar-inverse .navbar-link:hover{color:#fff}.navbar-inverse .divider-vertical{border-right-color:#222;border-left-color:#111}.navbar-inverse .nav li.dropdown.open>.dropdown-toggle,.navbar-inverse .nav li.dropdown.active>.dropdown-toggle,.navbar-inverse .nav li.dropdown.open.active>.dropdown-toggle{color:#fff;background-color:#111}.navbar-inverse .nav li.dropdown>.dropdown-toggle .caret{border-top-color:#999;border-bottom-color:#999}.navbar-inverse .nav li.dropdown.open>.dropdown-toggle .caret,.navbar-inverse .nav li.dropdown.active>.dropdown-toggle .caret,.navbar-inverse .nav li.dropdown.open.active>.dropdown-toggle .caret{border-top-color:#fff;border-bottom-color:#fff}.navbar-inverse .navbar-search .search-query{color:#fff;background-color:#515151;border-color:#111;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);-webkit-transition:none;-moz-transition:none;-o-transition:none;transition:none}.navbar-inverse .navbar-search .search-query:-moz-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query:-ms-input-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query::-webkit-input-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query:focus,.navbar-inverse .navbar-search .search-query.focused{padding:5px 15px;color:#333;text-shadow:0 1px 0 #fff;background-color:#fff;border:0;outline:0;-webkit-box-shadow:0 0 3px rgba(0,0,0,0.15);-moz-box-shadow:0 0 3px rgba(0,0,0,0.15);box-shadow:0 0 3px rgba(0,0,0,0.15)}.navbar-inverse .btn-navbar{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#0e0e0e;*background-color:#040404;background-image:-webkit-gradient(linear,0 0,0 100%,from(#151515),to(#040404));background-image:-webkit-linear-gradient(top,#151515,#040404);background-image:-o-linear-gradient(top,#151515,#040404);background-image:linear-gradient(to bottom,#151515,#040404);background-image:-moz-linear-gradient(top,#151515,#040404);background-repeat:repeat-x;border-color:#040404 #040404 #000;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff151515',endColorstr='#ff040404',GradientType=0);filter:progid:dximagetransform.microsoft.gradient(enabled=false)}.navbar-inverse .btn-navbar:hover,.navbar-inverse .btn-navbar:active,.navbar-inverse .btn-navbar.active,.navbar-inverse .btn-navbar.disabled,.navbar-inverse .btn-navbar[disabled]{color:#fff;background-color:#040404;*background-color:#000}.navbar-inverse .btn-navbar:active,.navbar-inverse .btn-navbar.active{background-color:#000 \9}.breadcrumb{padding:8px 15px;margin:0 0 20px;list-style:none;background-color:#f5f5f5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.breadcrumb li{display:inline-block;*display:inline;text-shadow:0 1px 0 #fff;*zoom:1}.breadcrumb .divider{padding:0 5px;color:#ccc}.breadcrumb .active{color:#999}.pagination{height:40px;margin:20px 0}.pagination ul{display:inline-block;*display:inline;margin-bottom:0;margin-left:0;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;*zoom:1;-webkit-box-shadow:0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:0 1px 2px rgba(0,0,0,0.05);box-shadow:0 1px 2px rgba(0,0,0,0.05)}.pagination li{display:inline}.pagination a,.pagination span{float:left;padding:0 14px;line-height:38px;text-decoration:none;background-color:#fff;border:1px solid #ddd;border-left-width:0}.pagination a:hover,.pagination .active a,.pagination .active span{background-color:#f5f5f5}.pagination .active a,.pagination .active span{color:#999;cursor:default}.pagination .disabled span,.pagination .disabled a,.pagination .disabled a:hover{color:#999;cursor:default;background-color:transparent}.pagination li:first-child a,.pagination li:first-child span{border-left-width:1px;-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px}.pagination li:last-child a,.pagination li:last-child span{-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0}.pagination-centered{text-align:center}.pagination-right{text-align:right}.pager{margin:20px 0;text-align:center;list-style:none;*zoom:1}.pager:before,.pager:after{display:table;line-height:0;content:""}.pager:after{clear:both}.pager li{display:inline}.pager a{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.pager a:hover{text-decoration:none;background-color:#f5f5f5}.pager .next a{float:right}.pager .previous a{float:left}.pager .disabled a,.pager .disabled a:hover{color:#999;cursor:default;background-color:#fff}.modal-open .dropdown-menu{z-index:2050}.modal-open .dropdown.open{*z-index:2050}.modal-open .popover{z-index:2060}.modal-open .tooltip{z-index:2080}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{opacity:0}.modal-backdrop,.modal-backdrop.fade.in{opacity:.8;filter:alpha(opacity=80)}.modal{position:fixed;top:50%;left:50%;z-index:1050;width:560px;margin:-250px 0 0 -280px;overflow:auto;background-color:#fff;border:1px solid #999;border:1px solid rgba(0,0,0,0.3);*border:1px solid #999;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 3px 7px rgba(0,0,0,0.3);-moz-box-shadow:0 3px 7px rgba(0,0,0,0.3);box-shadow:0 3px 7px rgba(0,0,0,0.3);-webkit-background-clip:padding-box;-moz-background-clip:padding-box;background-clip:padding-box}.modal.fade{top:-25%;-webkit-transition:opacity .3s linear,top .3s ease-out;-moz-transition:opacity .3s linear,top .3s ease-out;-o-transition:opacity .3s linear,top .3s ease-out;transition:opacity .3s linear,top .3s ease-out}.modal.fade.in{top:50%}.modal-header{padding:9px 15px;border-bottom:1px solid #eee}.modal-header .close{margin-top:2px}.modal-header h3{margin:0;line-height:30px}.modal-body{max-height:400px;padding:15px;overflow-y:auto}.modal-form{margin-bottom:0}.modal-footer{padding:14px 15px 15px;margin-bottom:0;text-align:right;background-color:#f5f5f5;border-top:1px solid #ddd;-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px;*zoom:1;-webkit-box-shadow:inset 0 1px 0 #fff;-moz-box-shadow:inset 0 1px 0 #fff;box-shadow:inset 0 1px 0 #fff}.modal-footer:before,.modal-footer:after{display:table;line-height:0;content:""}.modal-footer:after{clear:both}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.tooltip{position:absolute;z-index:1030;display:block;padding:5px;font-size:11px;opacity:0;filter:alpha(opacity=0);visibility:visible}.tooltip.in{opacity:.8;filter:alpha(opacity=80)}.tooltip.top{margin-top:-3px}.tooltip.right{margin-left:3px}.tooltip.bottom{margin-top:3px}.tooltip.left{margin-left:-3px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;text-decoration:none;background-color:#000;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-top-color:#000;border-width:5px 5px 0}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-right-color:#000;border-width:5px 5px 5px 0}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-left-color:#000;border-width:5px 0 5px 5px}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-bottom-color:#000;border-width:0 5px 5px}.popover{position:absolute;top:0;left:0;z-index:1010;display:none;width:236px;padding:1px;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);-moz-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2);-webkit-background-clip:padding-box;-moz-background-clip:padding;background-clip:padding-box}.popover.top{margin-bottom:10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-right:10px}.popover-title{padding:8px 14px;margin:0;font-size:14px;font-weight:normal;line-height:18px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;-webkit-border-radius:5px 5px 0 0;-moz-border-radius:5px 5px 0 0;border-radius:5px 5px 0 0}.popover-content{padding:9px 14px}.popover-content p,.popover-content ul,.popover-content ol{margin-bottom:0}.popover .arrow,.popover .arrow:after{position:absolute;display:inline-block;width:0;height:0;border-color:transparent;border-style:solid}.popover .arrow:after{z-index:-1;content:""}.popover.top .arrow{bottom:-10px;left:50%;margin-left:-10px;border-top-color:#fff;border-width:10px 10px 0}.popover.top .arrow:after{bottom:-1px;left:-11px;border-top-color:rgba(0,0,0,0.25);border-width:11px 11px 0}.popover.right .arrow{top:50%;left:-10px;margin-top:-10px;border-right-color:#fff;border-width:10px 10px 10px 0}.popover.right .arrow:after{bottom:-11px;left:-1px;border-right-color:rgba(0,0,0,0.25);border-width:11px 11px 11px 0}.popover.bottom .arrow{top:-10px;left:50%;margin-left:-10px;border-bottom-color:#fff;border-width:0 10px 10px}.popover.bottom .arrow:after{top:-1px;left:-11px;border-bottom-color:rgba(0,0,0,0.25);border-width:0 11px 11px}.popover.left .arrow{top:50%;right:-10px;margin-top:-10px;border-left-color:#fff;border-width:10px 0 10px 10px}.popover.left .arrow:after{right:-1px;bottom:-11px;border-left-color:rgba(0,0,0,0.25);border-width:11px 0 11px 11px}.thumbnails{margin-left:-20px;list-style:none;*zoom:1}.thumbnails:before,.thumbnails:after{display:table;line-height:0;content:""}.thumbnails:after{clear:both}.row-fluid .thumbnails{margin-left:0}.thumbnails>li{float:left;margin-bottom:20px;margin-left:20px}.thumbnail{display:block;padding:4px;line-height:20px;border:1px solid #ddd;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:0 1px 3px rgba(0,0,0,0.055);-moz-box-shadow:0 1px 3px rgba(0,0,0,0.055);box-shadow:0 1px 3px rgba(0,0,0,0.055);-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out}a.thumbnail:hover{border-color:#08c;-webkit-box-shadow:0 1px 4px rgba(0,105,214,0.25);-moz-box-shadow:0 1px 4px rgba(0,105,214,0.25);box-shadow:0 1px 4px rgba(0,105,214,0.25)}.thumbnail>img{display:block;max-width:100%;margin-right:auto;margin-left:auto}.thumbnail .caption{padding:9px;color:#555}.label,.badge{font-size:11.844px;font-weight:bold;line-height:14px;color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);white-space:nowrap;vertical-align:baseline;background-color:#999}.label{padding:1px 4px 2px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.badge{padding:1px 9px 2px;-webkit-border-radius:9px;-moz-border-radius:9px;border-radius:9px}a.label:hover,a.badge:hover{color:#fff;text-decoration:none;cursor:pointer}.label-important,.badge-important{background-color:#b94a48}.label-important[href],.badge-important[href]{background-color:#953b39}.label-warning,.badge-warning{background-color:#f89406}.label-warning[href],.badge-warning[href]{background-color:#c67605}.label-success,.badge-success{background-color:#468847}.label-success[href],.badge-success[href]{background-color:#356635}.label-info,.badge-info{background-color:#3a87ad}.label-info[href],.badge-info[href]{background-color:#2d6987}.label-inverse,.badge-inverse{background-color:#333}.label-inverse[href],.badge-inverse[href]{background-color:#1a1a1a}.btn .label,.btn .badge{position:relative;top:-1px}.btn-mini .label,.btn-mini .badge{top:0}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-moz-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-ms-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:0 0}to{background-position:40px 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;overflow:hidden;background-color:#f7f7f7;background-image:-moz-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:-webkit-gradient(linear,0 0,0 100%,from(#f5f5f5),to(#f9f9f9));background-image:-webkit-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:-o-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:linear-gradient(to bottom,#f5f5f5,#f9f9f9);background-repeat:repeat-x;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#fff5f5f5',endColorstr='#fff9f9f9',GradientType=0);-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1);box-shadow:inset 0 1px 2px rgba(0,0,0,0.1)}.progress .bar{float:left;width:0;height:100%;font-size:12px;color:#fff;text-align:center;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#0e90d2;background-image:-moz-linear-gradient(top,#149bdf,#0480be);background-image:-webkit-gradient(linear,0 0,0 100%,from(#149bdf),to(#0480be));background-image:-webkit-linear-gradient(top,#149bdf,#0480be);background-image:-o-linear-gradient(top,#149bdf,#0480be);background-image:linear-gradient(to bottom,#149bdf,#0480be);background-repeat:repeat-x;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff149bdf',endColorstr='#ff0480be',GradientType=0);-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);-moz-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;-webkit-transition:width .6s ease;-moz-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress .bar+.bar{-webkit-box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15);-moz-box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15);box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15)}.progress-striped .bar{background-color:#149bdf;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);-webkit-background-size:40px 40px;-moz-background-size:40px 40px;-o-background-size:40px 40px;background-size:40px 40px}.progress.active .bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-moz-animation:progress-bar-stripes 2s linear infinite;-ms-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-danger .bar,.progress .bar-danger{background-color:#dd514c;background-image:-moz-linear-gradient(top,#ee5f5b,#c43c35);background-image:-webkit-gradient(linear,0 0,0 100%,from(#ee5f5b),to(#c43c35));background-image:-webkit-linear-gradient(top,#ee5f5b,#c43c35);background-image:-o-linear-gradient(top,#ee5f5b,#c43c35);background-image:linear-gradient(to bottom,#ee5f5b,#c43c35);background-repeat:repeat-x;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ffee5f5b',endColorstr='#ffc43c35',GradientType=0)}.progress-danger.progress-striped .bar,.progress-striped .bar-danger{background-color:#ee5f5b;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-success .bar,.progress .bar-success{background-color:#5eb95e;background-image:-moz-linear-gradient(top,#62c462,#57a957);background-image:-webkit-gradient(linear,0 0,0 100%,from(#62c462),to(#57a957));background-image:-webkit-linear-gradient(top,#62c462,#57a957);background-image:-o-linear-gradient(top,#62c462,#57a957);background-image:linear-gradient(to bottom,#62c462,#57a957);background-repeat:repeat-x;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff62c462',endColorstr='#ff57a957',GradientType=0)}.progress-success.progress-striped .bar,.progress-striped .bar-success{background-color:#62c462;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-info .bar,.progress .bar-info{background-color:#4bb1cf;background-image:-moz-linear-gradient(top,#5bc0de,#339bb9);background-image:-webkit-gradient(linear,0 0,0 100%,from(#5bc0de),to(#339bb9));background-image:-webkit-linear-gradient(top,#5bc0de,#339bb9);background-image:-o-linear-gradient(top,#5bc0de,#339bb9);background-image:linear-gradient(to bottom,#5bc0de,#339bb9);background-repeat:repeat-x;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#ff5bc0de',endColorstr='#ff339bb9',GradientType=0)}.progress-info.progress-striped .bar,.progress-striped .bar-info{background-color:#5bc0de;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-warning .bar,.progress .bar-warning{background-color:#faa732;background-image:-moz-linear-gradient(top,#fbb450,#f89406);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fbb450),to(#f89406));background-image:-webkit-linear-gradient(top,#fbb450,#f89406);background-image:-o-linear-gradient(top,#fbb450,#f89406);background-image:linear-gradient(to bottom,#fbb450,#f89406);background-repeat:repeat-x;filter:progid:dximagetransform.microsoft.gradient(startColorstr='#fffbb450',endColorstr='#fff89406',GradientType=0)}.progress-warning.progress-striped .bar,.progress-striped .bar-warning{background-color:#fbb450;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.accordion{margin-bottom:20px}.accordion-group{margin-bottom:2px;border:1px solid #e5e5e5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.accordion-heading{border-bottom:0}.accordion-heading .accordion-toggle{display:block;padding:8px 15px}.accordion-toggle{cursor:pointer}.accordion-inner{padding:9px 15px;border-top:1px solid #e5e5e5}.carousel{position:relative;margin-bottom:20px;line-height:1}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel .item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;-moz-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel .item>img{display:block;line-height:1}.carousel .active,.carousel .next,.carousel .prev{display:block}.carousel .active{left:0}.carousel .next,.carousel .prev{position:absolute;top:0;width:100%}.carousel .next{left:100%}.carousel .prev{left:-100%}.carousel .next.left,.carousel .prev.right{left:0}.carousel .active.left{left:-100%}.carousel .active.right{left:100%}.carousel-control{position:absolute;top:40%;left:15px;width:40px;height:40px;margin-top:-20px;font-size:60px;font-weight:100;line-height:30px;color:#fff;text-align:center;background:#222;border:3px solid #fff;-webkit-border-radius:23px;-moz-border-radius:23px;border-radius:23px;opacity:.5;filter:alpha(opacity=50)}.carousel-control.right{right:15px;left:auto}.carousel-control:hover{color:#fff;text-decoration:none;opacity:.9;filter:alpha(opacity=90)}.carousel-caption{position:absolute;right:0;bottom:0;left:0;padding:15px;background:#333;background:rgba(0,0,0,0.75)}.carousel-caption h4,.carousel-caption p{line-height:20px;color:#fff}.carousel-caption h4{margin:0 0 5px}.carousel-caption p{margin-bottom:0}.hero-unit{padding:60px;margin-bottom:30px;background-color:#eee;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.hero-unit h1{margin-bottom:0;font-size:60px;line-height:1;letter-spacing:-1px;color:inherit}.hero-unit p{font-size:18px;font-weight:200;line-height:30px;color:inherit}.pull-right{float:right}.pull-left{float:left}.hide{display:none}.show{display:block}.invisible{visibility:hidden}.affix{position:fixed}
diff --git a/docs/index.md b/docs/index.md
index 0ea0e103e43c08ad18eba03afa7aba994d2ed3ce..3cf9cc1c6412f662f281ad3c47ba82756702a25e 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -3,8 +3,8 @@ layout: global
 title: Spark Overview
 ---
 
-Apache Spark is a cluster computing system that aims to make data analytics faster to run and faster to write.
-It provides high-level APIs in [Scala](scala-programming-guide.html), [Java](java-programming-guide.html), and [Python](python-programming-guide.html), and a general execution engine that supports rich operator graphs.
+Apache Spark is a fast and general-purpose cluster computing system.
+It provides high-level APIs in [Scala](scala-programming-guide.html), [Java](java-programming-guide.html), and [Python](python-programming-guide.html) that make parallel jobs easy to write, and an optimized engine that supports general computation graphs.
 Spark can run on the Apache Mesos cluster manager, Hadoop YARN, Amazon EC2, or without an independent resource manager ("standalone mode").
 
 # Downloading
@@ -24,8 +24,8 @@ For its Scala API, Spark {{site.SPARK_VERSION}} depends on Scala {{site.SCALA_VE
 Spark comes with several sample programs in the `examples` directory.
 To run one of the samples, use `./run-example <class> <params>` in the top-level Spark directory
 (the `run-example` script sets up the appropriate paths and launches that program).
-For example, `./run-example spark.examples.SparkPi` will run a sample program that estimates Pi. Each
-example prints usage help if no params are given.
+For example, try `./run-example org.apache.spark.examples.SparkPi local`.
+Each example prints usage help when run with no parameters.
 
 Note that all of the sample programs take a `<master>` parameter specifying the cluster URL
 to connect to. This can be a [URL for a distributed cluster](scala-programming-guide.html#master-urls),
@@ -95,7 +95,7 @@ In addition, if you wish to run Spark on [YARN](running-on-yarn.md), set
   exercises about Spark, Shark, Mesos, and more. [Videos](http://ampcamp.berkeley.edu/agenda-2012),
   [slides](http://ampcamp.berkeley.edu/agenda-2012) and [exercises](http://ampcamp.berkeley.edu/exercises-2012) are
   available online for free.
-* [Code Examples](http://spark.incubator.apache.org/examples.html): more are also available in the [examples subfolder](https://github.com/mesos/spark/tree/master/examples/src/main/scala/) of Spark
+* [Code Examples](http://spark.incubator.apache.org/examples.html): more are also available in the [examples subfolder](https://github.com/apache/incubator-spark/tree/master/examples/src/main/scala/) of Spark
 * [Paper Describing Spark](http://www.cs.berkeley.edu/~matei/papers/2012/nsdi_spark.pdf)
 * [Paper Describing Spark Streaming](http://www.eecs.berkeley.edu/Pubs/TechRpts/2012/EECS-2012-259.pdf)
 
diff --git a/docs/java-programming-guide.md b/docs/java-programming-guide.md
index dd19a5f0c970cdd96c5d9be0acc3255f7689d933..53085cc6719b07ea185ab1c8e39a9d987cafa315 100644
--- a/docs/java-programming-guide.md
+++ b/docs/java-programming-guide.md
@@ -10,9 +10,9 @@ easy to follow even if you don't know Scala.
 This guide will show how to use the Spark features described there in Java.
 
 The Spark Java API is defined in the
-[`spark.api.java`](api/core/index.html#spark.api.java.package) package, and includes
-a [`JavaSparkContext`](api/core/index.html#spark.api.java.JavaSparkContext) for
-initializing Spark and [`JavaRDD`](api/core/index.html#spark.api.java.JavaRDD) classes,
+[`org.apache.spark.api.java`](api/core/index.html#org.apache.spark.api.java.package) package, and includes
+a [`JavaSparkContext`](api/core/index.html#org.apache.spark.api.java.JavaSparkContext) for
+initializing Spark and [`JavaRDD`](api/core/index.html#org.apache.spark.api.java.JavaRDD) classes,
 which support the same methods as their Scala counterparts but take Java functions and return
 Java data and collection types. The main differences have to do with passing functions to RDD
 operations (e.g. map) and handling RDDs of different types, as discussed next.
@@ -23,12 +23,12 @@ There are a few key differences between the Java and Scala APIs:
 
 * Java does not support anonymous or first-class functions, so functions must
   be implemented by extending the
-  [`spark.api.java.function.Function`](api/core/index.html#spark.api.java.function.Function),
-  [`Function2`](api/core/index.html#spark.api.java.function.Function2), etc.
+  [`org.apache.spark.api.java.function.Function`](api/core/index.html#org.apache.spark.api.java.function.Function),
+  [`Function2`](api/core/index.html#org.apache.spark.api.java.function.Function2), etc.
   classes.
 * To maintain type safety, the Java API defines specialized Function and RDD
   classes for key-value pairs and doubles. For example, 
-  [`JavaPairRDD`](api/core/index.html#spark.api.java.JavaPairRDD)
+  [`JavaPairRDD`](api/core/index.html#org.apache.spark.api.java.JavaPairRDD)
   stores key-value pairs.
 * RDD methods like `collect()` and `countByKey()` return Java collections types,
   such as `java.util.List` and `java.util.Map`.
@@ -44,8 +44,8 @@ In the Scala API, these methods are automatically added using Scala's
 [implicit conversions](http://www.scala-lang.org/node/130) mechanism.
 
 In the Java API, the extra methods are defined in the
-[`JavaPairRDD`](api/core/index.html#spark.api.java.JavaPairRDD)
-and [`JavaDoubleRDD`](api/core/index.html#spark.api.java.JavaDoubleRDD)
+[`JavaPairRDD`](api/core/index.html#org.apache.spark.api.java.JavaPairRDD)
+and [`JavaDoubleRDD`](api/core/index.html#org.apache.spark.api.java.JavaDoubleRDD)
 classes.  RDD methods like `map` are overloaded by specialized `PairFunction`
 and `DoubleFunction` classes, allowing them to return RDDs of the appropriate
 types.  Common methods like `filter` and `sample` are implemented by
@@ -75,7 +75,7 @@ class has a single abstract method, `call()`, that must be implemented.
 ## Storage Levels
 
 RDD [storage level](scala-programming-guide.html#rdd-persistence) constants, such as `MEMORY_AND_DISK`, are
-declared in the [spark.api.java.StorageLevels](api/core/index.html#spark.api.java.StorageLevels) class. To
+declared in the [org.apache.spark.api.java.StorageLevels](api/core/index.html#org.apache.spark.api.java.StorageLevels) class. To
 define your own storage level, you can use StorageLevels.create(...). 
 
 
@@ -92,8 +92,8 @@ The Java API supports other Spark features, including
 As an example, we will implement word count using the Java API.
 
 {% highlight java %}
-import spark.api.java.*;
-import spark.api.java.function.*;
+import org.apache.spark.api.java.*;
+import org.apache.spark.api.java.function.*;
 
 JavaSparkContext sc = new JavaSparkContext(...);
 JavaRDD<String> lines = ctx.textFile("hdfs://...");
@@ -179,7 +179,7 @@ just a matter of style.
 # Javadoc
 
 We currently provide documentation for the Java API as Scaladoc, in the
-[`spark.api.java` package](api/core/index.html#spark.api.java.package), because
+[`org.apache.spark.api.java` package](api/core/index.html#org.apache.spark.api.java.package), because
 some of the classes are implemented in Scala. The main downside is that the types and function
 definitions show Scala syntax (for example, `def reduce(func: Function2[T, T]): T` instead of
 `T reduce(Function2<T, T> func)`). 
@@ -189,7 +189,10 @@ We hope to generate documentation with Java-style syntax in the future.
 # Where to Go from Here
 
 Spark includes several sample programs using the Java API in
-[`examples/src/main/java`](https://github.com/mesos/spark/tree/master/examples/src/main/java/spark/examples).  You can run them by passing the class name to the
-`run-example` script included in Spark -- for example, `./run-example
-spark.examples.JavaWordCount`.  Each example program prints usage help when run
+[`examples/src/main/java`](https://github.com/apache/incubator-spark/tree/master/examples/src/main/java/org/apache/spark/examples).  You can run them by passing the class name to the
+`run-example` script included in Spark; for example:
+
+    ./run-example org.apache.spark.examples.JavaWordCount
+
+Each example program prints usage help when run
 without any arguments.
diff --git a/docs/python-programming-guide.md b/docs/python-programming-guide.md
index 27e0d10080d4126709f5df93f9dbbf93d5dc7e24..8a539fe774f56bf3ab381d4ed71b6a9faa2c42cd 100644
--- a/docs/python-programming-guide.md
+++ b/docs/python-programming-guide.md
@@ -135,8 +135,11 @@ Code dependencies can be added to an existing SparkContext using its `addPyFile(
 
 # Where to Go from Here
 
-PySpark includes several sample programs in the [`python/examples` folder](https://github.com/mesos/spark/tree/master/python/examples).
-You can run them by passing the files to the `pyspark` script -- for example `./pyspark python/examples/wordcount.py`.
+PySpark includes several sample programs in the [`python/examples` folder](https://github.com/apache/incubator-spark/tree/master/python/examples).
+You can run them by passing the files to the `pyspark` script; e.g.:
+
+    ./pyspark python/examples/wordcount.py
+
 Each program prints usage help when run without arguments.
 
 We currently provide [API documentation](api/pyspark/index.html) for the Python API as Epydoc.
diff --git a/docs/quick-start.md b/docs/quick-start.md
index 4507b21c5edf27ddcd66e9f065bd5e38809dc11c..70c3df8095dfb25797acb785411436e3ccb049bc 100644
--- a/docs/quick-start.md
+++ b/docs/quick-start.md
@@ -108,8 +108,8 @@ We'll create a very simple Spark job in Scala. So simple, in fact, that it's nam
 
 {% highlight scala %}
 /*** SimpleJob.scala ***/
-import spark.SparkContext
-import SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
 
 object SimpleJob {
   def main(args: Array[String]) {
@@ -135,7 +135,7 @@ version := "1.0"
 
 scalaVersion := "{{site.SCALA_VERSION}}"
 
-libraryDependencies += "org.spark-project" %% "spark-core" % "{{site.SPARK_VERSION}}"
+libraryDependencies += "org.apache.spark" %% "spark-core" % "{{site.SPARK_VERSION}}"
 
 resolvers += "Akka Repository" at "http://repo.akka.io/releases/"
 {% endhighlight %}
@@ -170,8 +170,8 @@ We'll create a very simple Spark job, `SimpleJob.java`:
 
 {% highlight java %}
 /*** SimpleJob.java ***/
-import spark.api.java.*;
-import spark.api.java.function.Function;
+import org.apache.spark.api.java.*;
+import org.apache.spark.api.java.function.Function;
 
 public class SimpleJob {
   public static void main(String[] args) {
@@ -213,7 +213,7 @@ To build the job, we also write a Maven `pom.xml` file that lists Spark as a dep
   </repositories>
   <dependencies>
     <dependency> <!-- Spark dependency -->
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-core_{{site.SCALA_VERSION}}</artifactId>
       <version>{{site.SPARK_VERSION}}</version>
     </dependency>
diff --git a/docs/scala-programming-guide.md b/docs/scala-programming-guide.md
index e321b8f5b889269f52cf571dac0df87c274b7dfb..f7768e55fc6c84b1d275981644adf2b7c5f91bff 100644
--- a/docs/scala-programming-guide.md
+++ b/docs/scala-programming-guide.md
@@ -21,7 +21,7 @@ Spark {{site.SPARK_VERSION}} uses Scala {{site.SCALA_VERSION}}. If you write app
 
 To write a Spark application, you need to add a dependency on Spark. If you use SBT or Maven, Spark is available through Maven Central at:
 
-    groupId = org.spark-project
+    groupId = org.apache.spark
     artifactId = spark-core_{{site.SCALA_VERSION}}
     version = {{site.SPARK_VERSION}} 
 
@@ -36,8 +36,8 @@ For other build systems, you can run `sbt/sbt assembly` to pack Spark and its de
 Finally, you need to import some Spark classes and implicit conversions into your program. Add the following lines:
 
 {% highlight scala %}
-import spark.SparkContext
-import SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
 {% endhighlight %}
 
 # Initializing Spark
@@ -142,7 +142,7 @@ All transformations in Spark are <i>lazy</i>, in that they do not compute their
 
 By default, each transformed RDD is recomputed each time you run an action on it. However, you may also *persist* an RDD in memory using the `persist` (or `cache`) method, in which case Spark will keep the elements around on the cluster for much faster access the next time you query it. There is also support for persisting datasets on disk, or replicated across the cluster. The next section in this document describes these options.
 
-The following tables list the transformations and actions currently supported (see also the [RDD API doc](api/core/index.html#spark.RDD) for details):
+The following tables list the transformations and actions currently supported (see also the [RDD API doc](api/core/index.html#org.apache.spark.RDD) for details):
 
 ### Transformations
 
@@ -211,7 +211,7 @@ The following tables list the transformations and actions currently supported (s
 </tr>
 </table>
 
-A complete list of transformations is available in the [RDD API doc](api/core/index.html#spark.RDD).
+A complete list of transformations is available in the [RDD API doc](api/core/index.html#org.apache.spark.RDD).
 
 ### Actions
 
@@ -259,7 +259,7 @@ A complete list of transformations is available in the [RDD API doc](api/core/in
 </tr>
 </table>
 
-A complete list of actions is available in the [RDD API doc](api/core/index.html#spark.RDD).
+A complete list of actions is available in the [RDD API doc](api/core/index.html#org.apache.spark.RDD).
 
 ## RDD Persistence
 
@@ -267,7 +267,7 @@ One of the most important capabilities in Spark is *persisting* (or *caching*) a
 
 You can mark an RDD to be persisted using the `persist()` or `cache()` methods on it. The first time it is computed in an action, it will be kept in memory on the nodes. The cache is fault-tolerant -- if any partition of an RDD is lost, it will automatically be recomputed using the transformations that originally created it.
 
-In addition, each RDD can be stored using a different *storage level*, allowing you, for example, to persist the dataset on disk, or persist it in memory but as serialized Java objects (to save space), or even replicate it across nodes. These levels are chosen by passing a [`spark.storage.StorageLevel`](api/core/index.html#spark.storage.StorageLevel) object to `persist()`. The `cache()` method is a shorthand for using the default storage level, which is `StorageLevel.MEMORY_ONLY` (store deserialized objects in memory). The complete set of available storage levels is:
+In addition, each RDD can be stored using a different *storage level*, allowing you, for example, to persist the dataset on disk, or persist it in memory but as serialized Java objects (to save space), or even replicate it across nodes. These levels are chosen by passing a [`org.apache.spark.storage.StorageLevel`](api/core/index.html#org.apache.spark.storage.StorageLevel) object to `persist()`. The `cache()` method is a shorthand for using the default storage level, which is `StorageLevel.MEMORY_ONLY` (store deserialized objects in memory). The complete set of available storage levels is:
 
 <table class="table">
 <tr><th style="width:23%">Storage Level</th><th>Meaning</th></tr>
@@ -318,7 +318,7 @@ We recommend going through the following process to select one:
   application). *All* the storage levels provide full fault tolerance by recomputing lost data, but the replicated ones
   let you continue running tasks on the RDD without waiting to recompute a lost partition.
  
-If you want to define your own storage level (say, with replication factor of 3 instead of 2), then use the function factor method `apply()` of the [`StorageLevel`](api/core/index.html#spark.storage.StorageLevel$) singleton object.  
+If you want to define your own storage level (say, with replication factor of 3 instead of 2), then use the function factor method `apply()` of the [`StorageLevel`](api/core/index.html#org.apache.spark.storage.StorageLevel$) singleton object.  
 
 # Shared Variables
 
@@ -364,7 +364,11 @@ res2: Int = 10
 # Where to Go from Here
 
 You can see some [example Spark programs](http://www.spark-project.org/examples.html) on the Spark website.
-In addition, Spark includes several sample programs in `examples/src/main/scala`. Some of them have both Spark versions and local (non-parallel) versions, allowing you to see what had to be changed to make the program run on a cluster. You can run them using by passing the class name to the `run-example` script included in Spark -- for example, `./run-example spark.examples.SparkPi`. Each example program prints usage help when run without any arguments.
+In addition, Spark includes several samples in `examples/src/main/scala`. Some of them have both Spark versions and local (non-parallel) versions, allowing you to see what had to be changed to make the program run on a cluster. You can run them using by passing the class name to the `run-example` script included in Spark; for example:
+
+    ./run-example org.apache.spark.examples.SparkPi
+
+Each example program prints usage help when run without any arguments.
 
 For help on optimizing your program, the [configuration](configuration.html) and
 [tuning](tuning.html) guides provide information on best practices. They are especially important for
diff --git a/docs/spark-debugger.md b/docs/spark-debugger.md
index f6f0988858e794ee8b81f148c6c2d27dc53c0304..d6315d97f4d590d47c6e85cb108bc70368ea2323 100644
--- a/docs/spark-debugger.md
+++ b/docs/spark-debugger.md
@@ -2,7 +2,7 @@
 layout: global
 title: The Spark Debugger
 ---
-**Summary:** The Spark debugger provides replay debugging for deterministic (logic) errors in Spark programs. It's currently in development, but you can try it out in the [arthur branch](https://github.com/mesos/spark/tree/arthur).
+**Summary:** The Spark debugger provides replay debugging for deterministic (logic) errors in Spark programs. It's currently in development, but you can try it out in the [arthur branch](https://github.com/apache/incubator-spark/tree/arthur).
 
 ## Introduction
 
@@ -19,7 +19,7 @@ For deterministic errors, debugging a Spark program is now as easy as debugging
 
 ## Approach
 
-As your Spark program runs, the slaves report key events back to the master -- for example, RDD creations, RDD contents, and uncaught exceptions. (A full list of event types is in [EventLogging.scala](https://github.com/mesos/spark/blob/arthur/core/src/main/scala/spark/EventLogging.scala).) The master logs those events, and you can load the event log into the debugger after your program is done running.
+As your Spark program runs, the slaves report key events back to the master -- for example, RDD creations, RDD contents, and uncaught exceptions. (A full list of event types is in [EventLogging.scala](https://github.com/apache/incubator-spark/blob/arthur/core/src/main/scala/spark/EventLogging.scala).) The master logs those events, and you can load the event log into the debugger after your program is done running.
 
 _A note on nondeterminism:_ For fault recovery, Spark requires RDD transformations (for example, the function passed to `RDD.map`) to be deterministic. The Spark debugger also relies on this property, and it can also warn you if your transformation is nondeterministic. This works by checksumming the contents of each RDD and comparing the checksums from the original execution to the checksums after recomputing the RDD in the debugger.
 
diff --git a/docs/streaming-custom-receivers.md b/docs/streaming-custom-receivers.md
index dfa343bf94f90684eb2edf91c16ef3d7fdeb9803..4e27d6559cf1e5673c3e1012be807bbe2e10d05a 100644
--- a/docs/streaming-custom-receivers.md
+++ b/docs/streaming-custom-receivers.md
@@ -1,24 +1,22 @@
 ---
 layout: global
-title: Tutorial - Spark Streaming, Plugging in a custom receiver.
+title: Spark Streaming Custom Receivers
 ---
 
 A "Spark Streaming" receiver can be a simple network stream, streams of messages from a message queue, files etc. A receiver can also assume roles more than just receiving data like filtering, preprocessing, to name a few of the possibilities. The api to plug-in any user defined custom receiver is thus provided to encourage development of receivers which may be well suited to ones specific need.
 
 This guide shows the programming model and features by walking through a simple sample receiver and corresponding Spark Streaming application.
 
-### Write a simple receiver
+### Writing a Simple Receiver
 
-This starts with implementing [NetworkReceiver](#References)
+This starts with implementing [NetworkReceiver](api/streaming/index.html#org.apache.spark.streaming.dstream.NetworkReceiver).
 
-Following is a simple socket text-stream receiver.
+The following is a simple socket text-stream receiver.
 
 {% highlight scala %}
-
-       class SocketTextStreamReceiver(host: String,
-                                      port: Int
-                                       ) extends NetworkReceiver[String] {
-
+       class SocketTextStreamReceiver(host: String, port: Int(
+         extends NetworkReceiver[String]
+       {
          protected lazy val blocksGenerator: BlockGenerator =
            new BlockGenerator(StorageLevel.MEMORY_ONLY_SER_2)
 
@@ -36,23 +34,20 @@ Following is a simple socket text-stream receiver.
          protected def onStop() {
            blocksGenerator.stop()
          }
-
        }
-
 {% endhighlight %}
 
 
 All we did here is extended NetworkReceiver and called blockGenerator's API method (i.e. +=) to push our blocks of data. Please refer to scala-docs of NetworkReceiver for more details.
 
 
-### An Actor as Receiver.
+### An Actor as Receiver
 
 This starts with implementing [Actor](#References)
 
 Following is a simple socket text-stream receiver, which is appearently overly simplified using Akka's socket.io api.
 
 {% highlight scala %}
-
        class SocketTextStreamReceiver (host:String,
          port:Int,
          bytesToString: ByteString => String) extends Actor with Receiver {
@@ -64,52 +59,41 @@ Following is a simple socket text-stream receiver, which is appearently overly s
          }
 
        }
-
-
 {% endhighlight %}
 
 All we did here is mixed in trait Receiver and called pushBlock api method to push our blocks of data. Please refer to scala-docs of Receiver for more details.
 
-### A sample spark application
+### A Sample Spark Application
 
 * First create a Spark streaming context with master url and batchduration.
 
 {% highlight scala %}
-
     val ssc = new StreamingContext(master, "WordCountCustomStreamSource",
       Seconds(batchDuration))
-
 {% endhighlight %}
 
 * Plug-in the custom receiver into the spark streaming context and create a DStream.
 
 {% highlight scala %}
-
     val lines = ssc.networkStream[String](new SocketTextStreamReceiver(
       "localhost", 8445))
-
 {% endhighlight %}
 
 * OR Plug-in the actor as receiver into the spark streaming context and create a DStream.
 
 {% highlight scala %}
-
     val lines = ssc.actorStream[String](Props(new SocketTextStreamReceiver(
       "localhost",8445, z => z.utf8String)),"SocketReceiver")
-
 {% endhighlight %}
 
 * Process it.
 
 {% highlight scala %}
-
     val words = lines.flatMap(_.split(" "))
     val wordCounts = words.map(x => (x, 1)).reduceByKey(_ + _)
 
     wordCounts.print()
     ssc.start()
-
-
 {% endhighlight %}
 
 * After processing it, stream can be tested using the netcat utility.
@@ -119,12 +103,11 @@ All we did here is mixed in trait Receiver and called pushBlock api method to pu
      hello hello
 
 
-## Multiple homogeneous/heterogeneous receivers.
+## Multiple Homogeneous/Heterogeneous Receivers.
 
 A DStream union operation is provided for taking union on multiple input streams.
 
 {% highlight scala %}
-
     val lines = ssc.actorStream[String](Props(new SocketTextStreamReceiver(
       "localhost",8445, z => z.utf8String)),"SocketReceiver")
 
@@ -133,7 +116,6 @@ A DStream union operation is provided for taking union on multiple input streams
       "localhost",8446, z => z.utf8String)),"SocketReceiver")
 
     val union = lines.union(lines2)
-
 {% endhighlight %}
 
 Above stream can be easily process as described earlier.
@@ -143,4 +125,4 @@ _A more comprehensive example is provided in the spark streaming examples_
 ## References
 
 1.[Akka Actor documentation](http://doc.akka.io/docs/akka/2.0.5/scala/actors.html)
-2.[NetworkReceiver](http://spark-project.org/docs/latest/api/streaming/index.html#spark.streaming.dstream.NetworkReceiver)
+2.[NetworkReceiver](api/streaming/index.html#org.apache.spark.streaming.dstream.NetworkReceiver)
diff --git a/docs/streaming-programming-guide.md b/docs/streaming-programming-guide.md
index 3330e635986d805da2bcc1954943ffcb5e52ad5c..c7df1720249481eed6a4b4268ac9c8796d0587b6 100644
--- a/docs/streaming-programming-guide.md
+++ b/docs/streaming-programming-guide.md
@@ -13,6 +13,14 @@ A Spark Streaming application is very similar to a Spark application; it consist
 
 This guide shows some how to start programming with DStreams. 
 
+# Linking with Spark Streaming
+
+Add the following SBT or Maven dependency to your project to use Spark Streaming:
+
+    groupId = org.apache.spark
+    artifactId = spark-streaming_{{site.SCALA_VERSION}}
+    version = {{site.SPARK_VERSION}}
+
 # Initializing Spark Streaming
 The first thing a Spark Streaming program must do is create a `StreamingContext` object, which tells Spark how to access a cluster. A `StreamingContext` can be created by using
 
@@ -34,7 +42,7 @@ ssc.textFileStream(directory)      // Creates a stream by monitoring and process
 ssc.socketStream(hostname, port)   // Creates a stream that uses a TCP socket to read data from hostname:port
 {% endhighlight %}
 
-We also provide a input streams for Kafka, Flume, Akka actor, etc. For a complete list of input streams, take a look at the [StreamingContext API documentation](api/streaming/index.html#spark.streaming.StreamingContext).
+We also provide a input streams for Kafka, Flume, Akka actor, etc. For a complete list of input streams, take a look at the [StreamingContext API documentation](api/streaming/index.html#org.apache.spark.streaming.StreamingContext).
 
 
 
@@ -156,7 +164,7 @@ Spark Streaming features windowed computations, which allow you to apply transfo
 
 </table>
 
-A complete list of DStream operations is available in the API documentation of [DStream](api/streaming/index.html#spark.streaming.DStream) and [PairDStreamFunctions](api/streaming/index.html#spark.streaming.PairDStreamFunctions).
+A complete list of DStream operations is available in the API documentation of [DStream](api/streaming/index.html#org.apache.spark.streaming.DStream) and [PairDStreamFunctions](api/streaming/index.html#org.apache.spark.streaming.PairDStreamFunctions).
 
 ## Output Operations
 When an output operator is called, it triggers the computation of a stream. Currently the following output operators are defined:
@@ -203,11 +211,11 @@ ssc.stop()
 {% endhighlight %}
 
 # Example
-A simple example to start off is the [NetworkWordCount](https://github.com/mesos/spark/tree/master/examples/src/main/scala/spark/streaming/examples/NetworkWordCount.scala). This example counts the words received from a network server every second. Given below is the relevant sections of the source code. You can find the full source code in `<Spark repo>/streaming/src/main/scala/spark/streaming/examples/NetworkWordCount.scala` .
+A simple example to start off is the [NetworkWordCount](https://github.com/apache/incubator-spark/tree/master/examples/src/main/scala/spark/streaming/examples/NetworkWordCount.scala). This example counts the words received from a network server every second. Given below is the relevant sections of the source code. You can find the full source code in `<Spark repo>/streaming/src/main/scala/spark/streaming/examples/NetworkWordCount.scala` .
 
 {% highlight scala %}
-import spark.streaming.{Seconds, StreamingContext}
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.{Seconds, StreamingContext}
+import StreamingContext._
 ...
 
 // Create the context and set up a network input stream to receive from a host:port
@@ -234,7 +242,7 @@ $ nc -lk 9999
 Then, in a different terminal, you can start NetworkWordCount by using
 
 {% highlight bash %}
-$ ./run-example spark.streaming.examples.NetworkWordCount local[2] localhost 9999
+$ ./run-example org.apache.spark.streaming.examples.NetworkWordCount local[2] localhost 9999
 {% endhighlight %}
 
 This will make NetworkWordCount connect to the netcat server. Any lines typed in the terminal running the netcat server will be counted and printed on screen.
@@ -272,7 +280,7 @@ Time: 1357008430000 ms
 </td>
 </table>
 
-You can find more examples in `<Spark repo>/streaming/src/main/scala/spark/streaming/examples/`. They can be run in the similar manner using `./run-example spark.streaming.examples....` . Executing without any parameter would give the required parameter list. Further explanation to run them can be found in comments in the files.
+You can find more examples in `<Spark repo>/streaming/src/main/scala/spark/streaming/examples/`. They can be run in the similar manner using `./run-example org.apache.spark.streaming.examples....` . Executing without any parameter would give the required parameter list. Further explanation to run them can be found in comments in the files.
 
 # DStream Persistence
 Similar to RDDs, DStreams also allow developers to persist the stream's data in memory. That is, using `persist()` method on a DStream would automatically persist every RDD of that DStream in memory. This is useful if the data in the DStream will be computed multiple times (e.g., multiple operations on the same data). For window-based operations like `reduceByWindow` and `reduceByKeyAndWindow` and state-based operations like `updateStateByKey`, this is implicitly true. Hence, DStreams generated by window-based operations are automatically persisted in memory, without the developer calling `persist()`.
@@ -301,8 +309,8 @@ dstream.checkpoint(checkpointInterval) // checkpointInterval must be a multiple
 For DStreams that must be checkpointed (that is, DStreams created by `updateStateByKey` and `reduceByKeyAndWindow` with inverse function), the checkpoint interval of the DStream is by default set to a multiple of the DStream's sliding interval such that its at least 10 seconds.
 
 
-## Customizing Receiver
-Spark comes with a built in support for most common usage scenarios where input stream source can be either a network socket stream to support for a few message queues. Apart from that it is also possible to supply your own custom receiver via a convenient API. Find more details at [Custom Receiver Guide](streaming-custom-receivers.html)
+## Custom Receivers
+Spark comes with a built in support for most common usage scenarios where input stream source can be either a network socket stream to support for a few message queues. Apart from that it is also possible to supply your own custom receiver via a convenient API. Find more details at [Custom Receiver Guide](streaming-custom-receivers.html).
 
 # Performance Tuning
 Getting the best performance of a Spark Streaming application on a cluster requires a bit of tuning. This section explains a number of the parameters and configurations that can tuned to improve the performance of you application. At a high level, you need to consider two things:
@@ -315,7 +323,7 @@ Getting the best performance of a Spark Streaming application on a cluster requi
 There are a number of optimizations that can be done in Spark to minimize the processing time of each batch. These have been discussed in detail in [Tuning Guide](tuning.html). This section highlights some of the most important ones.
 
 ### Level of Parallelism
-Cluster resources maybe under-utilized if the number of parallel tasks used in any stage of the computation is not high enough. For example, for distributed reduce operations like `reduceByKey` and `reduceByKeyAndWindow`, the default number of parallel tasks is 8. You can pass the level of parallelism as an argument (see the [`spark.PairDStreamFunctions`](api/streaming/index.html#spark.PairDStreamFunctions) documentation), or set the system property `spark.default.parallelism` to change the default.
+Cluster resources maybe under-utilized if the number of parallel tasks used in any stage of the computation is not high enough. For example, for distributed reduce operations like `reduceByKey` and `reduceByKeyAndWindow`, the default number of parallel tasks is 8. You can pass the level of parallelism as an argument (see the [`PairDStreamFunctions`](api/streaming/index.html#org.apache.spark.PairDStreamFunctions) documentation), or set the system property `spark.default.parallelism` to change the default.
 
 ### Data Serialization
 The overhead of data serialization can be significant, especially when sub-second batch sizes are to be achieved. There are two aspects to it.
@@ -345,7 +353,7 @@ This value is closely tied with any window operation that is being used. Any win
 ## Memory Tuning
 Tuning the memory usage and GC behavior of Spark applications have been discussed in great detail in the [Tuning Guide](tuning.html). It is recommended that you read that. In this section, we highlight a few customizations that are strongly recommended to minimize GC related pauses in Spark Streaming applications and achieving more consistent batch processing times.
 
-* **Default persistence level of DStreams**: Unlike RDDs, the default persistence level of DStreams serializes the data in memory (that is, [StorageLevel.MEMORY_ONLY_SER](api/core/index.html#spark.storage.StorageLevel$) for DStream compared to [StorageLevel.MEMORY_ONLY](api/core/index.html#spark.storage.StorageLevel$) for RDDs). Even though keeping the data serialized incurs a higher serialization overheads, it significantly reduces GC pauses.
+* **Default persistence level of DStreams**: Unlike RDDs, the default persistence level of DStreams serializes the data in memory (that is, [StorageLevel.MEMORY_ONLY_SER](api/core/index.html#org.apache.spark.storage.StorageLevel$) for DStream compared to [StorageLevel.MEMORY_ONLY](api/core/index.html#org.apache.spark.storage.StorageLevel$) for RDDs). Even though keeping the data serialized incurs a higher serialization overheads, it significantly reduces GC pauses.
 
 * **Concurrent garbage collector**: Using the concurrent mark-and-sweep GC further minimizes the variability of GC pauses. Even though concurrent GC is known to reduce the overall processing throughput of the system, its use is still recommended to achieve more consistent batch processing times.
 
@@ -467,10 +475,10 @@ If the driver had crashed in the middle of the processing of time 3, then it wil
 
 # Java API
 
-Similar to [Spark's Java API](java-programming-guide.html), we also provide a Java API for Spark Streaming which allows all its features to be accessible from a Java program. This is defined in [spark.streaming.api.java] (api/streaming/index.html#spark.streaming.api.java.package) package and includes [JavaStreamingContext](api/streaming/index.html#spark.streaming.api.java.JavaStreamingContext) and [JavaDStream](api/streaming/index.html#spark.streaming.api.java.JavaDStream) classes that provide the same methods as their Scala counterparts, but take Java functions (that is, Function, and Function2) and return Java data and collection types. Some of the key points to note are:
+Similar to [Spark's Java API](java-programming-guide.html), we also provide a Java API for Spark Streaming which allows all its features to be accessible from a Java program. This is defined in [org.apache.spark.streaming.api.java] (api/streaming/index.html#org.apache.spark.streaming.api.java.package) package and includes [JavaStreamingContext](api/streaming/index.html#org.apache.spark.streaming.api.java.JavaStreamingContext) and [JavaDStream](api/streaming/index.html#org.apache.spark.streaming.api.java.JavaDStream) classes that provide the same methods as their Scala counterparts, but take Java functions (that is, Function, and Function2) and return Java data and collection types. Some of the key points to note are:
 
-1. Functions for transformations must be implemented as subclasses of [Function](api/core/index.html#spark.api.java.function.Function) and [Function2](api/core/index.html#spark.api.java.function.Function2)
-1. Unlike the Scala API, the Java API handles DStreams for key-value pairs using a separate [JavaPairDStream](api/streaming/index.html#spark.streaming.api.java.JavaPairDStream) class(similar to [JavaRDD and JavaPairRDD](java-programming-guide.html#rdd-classes). DStream functions like `map` and `filter` are implemented separately by JavaDStreams and JavaPairDStream to return DStreams of appropriate types.
+1. Functions for transformations must be implemented as subclasses of [Function](api/core/index.html#org.apache.spark.api.java.function.Function) and [Function2](api/core/index.html#org.apache.spark.api.java.function.Function2)
+1. Unlike the Scala API, the Java API handles DStreams for key-value pairs using a separate [JavaPairDStream](api/streaming/index.html#org.apache.spark.streaming.api.java.JavaPairDStream) class(similar to [JavaRDD and JavaPairRDD](java-programming-guide.html#rdd-classes). DStream functions like `map` and `filter` are implemented separately by JavaDStreams and JavaPairDStream to return DStreams of appropriate types.
 
 Spark's [Java Programming Guide](java-programming-guide.html) gives more ideas about using the Java API. To extends the ideas presented for the RDDs to DStreams, we present parts of the Java version of the same NetworkWordCount example presented above. The full source code is given at `<spark repo>/examples/src/main/java/spark/streaming/examples/JavaNetworkWordCount.java`
 
@@ -482,7 +490,7 @@ JavaDStream<String> lines = ssc.socketTextStream(ip, port);
 {% endhighlight %}
 
 
-Then the `lines` are split into words by using the `flatMap` function and [FlatMapFunction](api/core/index.html#spark.api.java.function.FlatMapFunction).
+Then the `lines` are split into words by using the `flatMap` function and [FlatMapFunction](api/core/index.html#org.apache.spark.api.java.function.FlatMapFunction).
 
 {% highlight java %}
 JavaDStream<String> words = lines.flatMap(
@@ -494,7 +502,7 @@ JavaDStream<String> words = lines.flatMap(
   });
 {% endhighlight %}
 
-The `words` is then mapped to a [JavaPairDStream](api/streaming/index.html#spark.streaming.api.java.JavaPairDStream) of `(word, 1)` pairs using `map` and [PairFunction](api/core/index.html#spark.api.java.function.PairFunction). This is  reduced by using `reduceByKey` and [Function2](api/core/index.html#spark.api.java.function.Function2).
+The `words` is then mapped to a [JavaPairDStream](api/streaming/index.html#org.apache.spark.streaming.api.java.JavaPairDStream) of `(word, 1)` pairs using `map` and [PairFunction](api/core/index.html#org.apache.spark.api.java.function.PairFunction). This is  reduced by using `reduceByKey` and [Function2](api/core/index.html#org.apache.spark.api.java.function.Function2).
 
 {% highlight java %}
 JavaPairDStream<String, Integer> wordCounts = words.map(
@@ -513,8 +521,8 @@ JavaPairDStream<String, Integer> wordCounts = words.map(
 {% endhighlight %}
 
 
-
 # Where to Go from Here
-* API docs - [Scala](api/streaming/index.html#spark.streaming.package) and [Java](api/streaming/index.html#spark.streaming.api.java.package)
-* More examples - [Scala](https://github.com/mesos/spark/tree/master/examples/src/main/scala/spark/streaming/examples) and [Java](https://github.com/mesos/spark/tree/master/examples/src/main/java/spark/streaming/examples)
+
+* API docs - [Scala](api/streaming/index.html#org.apache.spark.streaming.package) and [Java](api/streaming/index.html#org.apache.spark.streaming.api.java.package)
+* More examples - [Scala](https://github.com/apache/incubator-spark/tree/master/examples/src/main/scala/spark/streaming/examples) and [Java](https://github.com/apache/incubator-spark/tree/master/examples/src/main/java/spark/streaming/examples)
 * [Paper describing Spark Streaming](http://www.eecs.berkeley.edu/Pubs/TechRpts/2012/EECS-2012-259.pdf)
diff --git a/docs/tuning.md b/docs/tuning.md
index 5ffca54481b805c98ed70689a8eba1e9b2b6b35b..28d88a26592725ff750d09b2c924fe3dd72a8ce2 100644
--- a/docs/tuning.md
+++ b/docs/tuning.md
@@ -32,24 +32,25 @@ in your operations) and performance. It provides two serialization libraries:
   [`java.io.Externalizable`](http://docs.oracle.com/javase/6/docs/api/java/io/Externalizable.html).
   Java serialization is flexible but often quite slow, and leads to large
   serialized formats for many classes.
-* [Kryo serialization](http://code.google.com/p/kryo/wiki/V1Documentation): Spark can also use
+* [Kryo serialization](http://code.google.com/p/kryo/): Spark can also use
   the Kryo library (version 2) to serialize objects more quickly. Kryo is significantly
   faster and more compact than Java serialization (often as much as 10x), but does not support all
   `Serializable` types and requires you to *register* the classes you'll use in the program in advance
   for best performance.
 
-You can switch to using Kryo by calling `System.setProperty("spark.serializer", "spark.KryoSerializer")`
+You can switch to using Kryo by calling `System.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer")`
 *before* creating your SparkContext. The only reason it is not the default is because of the custom
 registration requirement, but we recommend trying it in any network-intensive application.
 
 Finally, to register your classes with Kryo, create a public class that extends
-[`spark.KryoRegistrator`](api/core/index.html#spark.KryoRegistrator) and set the
+[`org.apache.spark.serializer.KryoRegistrator`](api/core/index.html#org.apache.spark.serializer.KryoRegistrator) and set the
 `spark.kryo.registrator` system property to point to it, as follows:
 
 {% highlight scala %}
 import com.esotericsoftware.kryo.Kryo
+import org.apache.spark.serializer.KryoRegistrator
 
-class MyRegistrator extends spark.KryoRegistrator {
+class MyRegistrator extends KryoRegistrator {
   override def registerClasses(kryo: Kryo) {
     kryo.register(classOf[MyClass1])
     kryo.register(classOf[MyClass2])
@@ -57,7 +58,7 @@ class MyRegistrator extends spark.KryoRegistrator {
 }
 
 // Make sure to set these properties *before* creating a SparkContext!
-System.setProperty("spark.serializer", "spark.KryoSerializer")
+System.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
 System.setProperty("spark.kryo.registrator", "mypackage.MyRegistrator")
 val sc = new SparkContext(...)
 {% endhighlight %}
@@ -216,7 +217,7 @@ enough. Spark automatically sets the number of "map" tasks to run on each file a
 (though you can control it through optional parameters to `SparkContext.textFile`, etc), and for
 distributed "reduce" operations, such as `groupByKey` and `reduceByKey`, it uses the largest
 parent RDD's number of partitions. You can pass the level of parallelism as a second argument
-(see the [`spark.PairRDDFunctions`](api/core/index.html#spark.PairRDDFunctions) documentation),
+(see the [`spark.PairRDDFunctions`](api/core/index.html#org.apache.spark.rdd.PairRDDFunctions) documentation),
 or set the system property `spark.default.parallelism` to change the default.
 In general, we recommend 2-3 tasks per CPU core in your cluster.
 
diff --git a/examples/pom.xml b/examples/pom.xml
index 687fbcca8f34cf3b7b003fd2b8a2d3f72d71d032..224cf6c96c9cc91b80a03963f777c246fae403f7 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -19,39 +19,39 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <parent>
-    <groupId>org.spark-project</groupId>
+    <groupId>org.apache.spark</groupId>
     <artifactId>spark-parent</artifactId>
     <version>0.8.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
-  <groupId>org.spark-project</groupId>
+  <groupId>org.apache.spark</groupId>
   <artifactId>spark-examples</artifactId>
   <packaging>jar</packaging>
   <name>Spark Project Examples</name>
-  <url>http://spark-project.org/</url>
+  <url>http://spark.incubator.apache.org/</url>
 
   <dependencies>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-core</artifactId>
       <version>${project.version}</version>
       <scope>provided</scope>
     </dependency>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-streaming</artifactId>
       <version>${project.version}</version>
       <scope>provided</scope>
     </dependency>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-mllib</artifactId>
       <version>${project.version}</version>
       <scope>provided</scope>
     </dependency>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-bagel</artifactId>
       <version>${project.version}</version>
       <scope>provided</scope>
@@ -132,7 +132,7 @@
       <id>hadoop2-yarn</id>
       <dependencies>
         <dependency>
-          <groupId>org.spark-project</groupId>
+          <groupId>org.apache.spark</groupId>
           <artifactId>spark-yarn</artifactId>
           <version>${project.version}</version>
           <scope>provided</scope>
diff --git a/examples/src/main/java/spark/examples/JavaHdfsLR.java b/examples/src/main/java/org/apache/spark/examples/JavaHdfsLR.java
similarity index 94%
rename from examples/src/main/java/spark/examples/JavaHdfsLR.java
rename to examples/src/main/java/org/apache/spark/examples/JavaHdfsLR.java
index 9485e0cfa96639c9ac36d7eb5aca186fb63b3c3f..be0d38589c5df6f2925e06f7a081ff6c72057530 100644
--- a/examples/src/main/java/spark/examples/JavaHdfsLR.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaHdfsLR.java
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.examples;
+package org.apache.spark.examples;
 
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.Function;
-import spark.api.java.function.Function2;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.api.java.function.Function2;
 
 import java.io.Serializable;
 import java.util.Arrays;
diff --git a/examples/src/main/java/spark/examples/JavaKMeans.java b/examples/src/main/java/org/apache/spark/examples/JavaKMeans.java
similarity index 92%
rename from examples/src/main/java/spark/examples/JavaKMeans.java
rename to examples/src/main/java/org/apache/spark/examples/JavaKMeans.java
index 2d34776177ca261d4ab82f56cc4d8e550be66cfc..5a6afe7eaefd6112a7348e77db4a40e1bc9136d9 100644
--- a/examples/src/main/java/spark/examples/JavaKMeans.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaKMeans.java
@@ -15,15 +15,15 @@
  * limitations under the License.
  */
 
-package spark.examples;
+package org.apache.spark.examples;
 
 import scala.Tuple2;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.Function;
-import spark.api.java.function.PairFunction;
-import spark.util.Vector;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.api.java.function.PairFunction;
+import org.apache.spark.util.Vector;
 
 import java.util.List;
 import java.util.Map;
diff --git a/examples/src/main/java/spark/examples/JavaLogQuery.java b/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
similarity index 94%
rename from examples/src/main/java/spark/examples/JavaLogQuery.java
rename to examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
index d22684d98044f69745d5e96fc112fb6544e57565..152f02921338a8ab974effdd211e1365f5f55741 100644
--- a/examples/src/main/java/spark/examples/JavaLogQuery.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
@@ -15,16 +15,16 @@
  * limitations under the License.
  */
 
-package spark.examples;
+package org.apache.spark.examples;
 
 import com.google.common.collect.Lists;
 import scala.Tuple2;
 import scala.Tuple3;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.Function2;
-import spark.api.java.function.PairFunction;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function2;
+import org.apache.spark.api.java.function.PairFunction;
 
 import java.io.Serializable;
 import java.util.Collections;
diff --git a/examples/src/main/java/spark/examples/JavaPageRank.java b/examples/src/main/java/org/apache/spark/examples/JavaPageRank.java
similarity index 89%
rename from examples/src/main/java/spark/examples/JavaPageRank.java
rename to examples/src/main/java/org/apache/spark/examples/JavaPageRank.java
index 75df1af2e30b84fbc6ddd69b6189d8ebe2eeeedd..c5603a639bdd9c1abf3127c7ebf859b362db4aac 100644
--- a/examples/src/main/java/spark/examples/JavaPageRank.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaPageRank.java
@@ -15,17 +15,17 @@
  * limitations under the License.
  */
 
-package spark.examples;
+package org.apache.spark.examples;
 
 import scala.Tuple2;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.FlatMapFunction;
-import spark.api.java.function.Function;
-import spark.api.java.function.Function2;
-import spark.api.java.function.PairFlatMapFunction;
-import spark.api.java.function.PairFunction;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.FlatMapFunction;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.api.java.function.Function2;
+import org.apache.spark.api.java.function.PairFlatMapFunction;
+import org.apache.spark.api.java.function.PairFunction;
 
 import java.util.List;
 import java.util.ArrayList;
diff --git a/examples/src/main/java/spark/examples/JavaSparkPi.java b/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java
similarity index 89%
rename from examples/src/main/java/spark/examples/JavaSparkPi.java
rename to examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java
index d5f42fbb38e4698f7e555353481f6699603f1862..4a2380caf5af554c1daa13f198326a7961699e5c 100644
--- a/examples/src/main/java/spark/examples/JavaSparkPi.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.examples;
+package org.apache.spark.examples;
 
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.Function;
-import spark.api.java.function.Function2;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.api.java.function.Function2;
 
 import java.util.ArrayList;
 import java.util.List;
diff --git a/examples/src/main/java/spark/examples/JavaTC.java b/examples/src/main/java/org/apache/spark/examples/JavaTC.java
similarity index 94%
rename from examples/src/main/java/spark/examples/JavaTC.java
rename to examples/src/main/java/org/apache/spark/examples/JavaTC.java
index 559d7f9e53372a1345f29311e611f96c01002460..17f21f6b776d152cfff1c733542fd9afb2c1975f 100644
--- a/examples/src/main/java/spark/examples/JavaTC.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaTC.java
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.examples;
+package org.apache.spark.examples;
 
 import scala.Tuple2;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.PairFunction;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.PairFunction;
 
 import java.util.ArrayList;
 import java.util.HashSet;
diff --git a/examples/src/main/java/spark/examples/JavaWordCount.java b/examples/src/main/java/org/apache/spark/examples/JavaWordCount.java
similarity index 85%
rename from examples/src/main/java/spark/examples/JavaWordCount.java
rename to examples/src/main/java/org/apache/spark/examples/JavaWordCount.java
index 1af370c1c3852478ef982f69f667cb635e677a60..07d32ad659a74dd1bcfcdc808be0022646b8bd67 100644
--- a/examples/src/main/java/spark/examples/JavaWordCount.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaWordCount.java
@@ -15,15 +15,15 @@
  * limitations under the License.
  */
 
-package spark.examples;
+package org.apache.spark.examples;
 
 import scala.Tuple2;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.FlatMapFunction;
-import spark.api.java.function.Function2;
-import spark.api.java.function.PairFunction;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.FlatMapFunction;
+import org.apache.spark.api.java.function.Function2;
+import org.apache.spark.api.java.function.PairFunction;
 
 import java.util.Arrays;
 import java.util.List;
diff --git a/examples/src/main/java/spark/mllib/examples/JavaALS.java b/examples/src/main/java/org/apache/spark/mllib/examples/JavaALS.java
similarity index 88%
rename from examples/src/main/java/spark/mllib/examples/JavaALS.java
rename to examples/src/main/java/org/apache/spark/mllib/examples/JavaALS.java
index b48f459cb7ee2d4f5c385ea5b10b58ad90c8b309..628cb892b686267c1996433faf324ab35b45ced4 100644
--- a/examples/src/main/java/spark/mllib/examples/JavaALS.java
+++ b/examples/src/main/java/org/apache/spark/mllib/examples/JavaALS.java
@@ -15,15 +15,15 @@
  * limitations under the License.
  */
 
-package spark.mllib.examples;
+package org.apache.spark.mllib.examples;
 
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.Function;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function;
 
-import spark.mllib.recommendation.ALS;
-import spark.mllib.recommendation.MatrixFactorizationModel;
-import spark.mllib.recommendation.Rating;
+import org.apache.spark.mllib.recommendation.ALS;
+import org.apache.spark.mllib.recommendation.MatrixFactorizationModel;
+import org.apache.spark.mllib.recommendation.Rating;
 
 import java.io.Serializable;
 import java.util.Arrays;
diff --git a/examples/src/main/java/spark/mllib/examples/JavaKMeans.java b/examples/src/main/java/org/apache/spark/mllib/examples/JavaKMeans.java
similarity index 89%
rename from examples/src/main/java/spark/mllib/examples/JavaKMeans.java
rename to examples/src/main/java/org/apache/spark/mllib/examples/JavaKMeans.java
index 02f40438b82c3667a076d4a51bf8c70e7aa29131..cd59a139b9fee00c5faa413d2ca9a453d1ba9526 100644
--- a/examples/src/main/java/spark/mllib/examples/JavaKMeans.java
+++ b/examples/src/main/java/org/apache/spark/mllib/examples/JavaKMeans.java
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.mllib.examples;
+package org.apache.spark.mllib.examples;
 
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.Function;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function;
 
-import spark.mllib.clustering.KMeans;
-import spark.mllib.clustering.KMeansModel;
+import org.apache.spark.mllib.clustering.KMeans;
+import org.apache.spark.mllib.clustering.KMeansModel;
 
 import java.util.Arrays;
 import java.util.StringTokenizer;
diff --git a/examples/src/main/java/spark/mllib/examples/JavaLR.java b/examples/src/main/java/org/apache/spark/mllib/examples/JavaLR.java
similarity index 87%
rename from examples/src/main/java/spark/mllib/examples/JavaLR.java
rename to examples/src/main/java/org/apache/spark/mllib/examples/JavaLR.java
index bf4aeaf40f63e411a39353b0e49a3d9aeffd3904..258061c8e6ba627fd388b4991a1be62d40eb3277 100644
--- a/examples/src/main/java/spark/mllib/examples/JavaLR.java
+++ b/examples/src/main/java/org/apache/spark/mllib/examples/JavaLR.java
@@ -15,16 +15,16 @@
  * limitations under the License.
  */
 
-package spark.mllib.examples;
+package org.apache.spark.mllib.examples;
 
 
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.Function;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function;
 
-import spark.mllib.classification.LogisticRegressionWithSGD;
-import spark.mllib.classification.LogisticRegressionModel;
-import spark.mllib.regression.LabeledPoint;
+import org.apache.spark.mllib.classification.LogisticRegressionWithSGD;
+import org.apache.spark.mllib.classification.LogisticRegressionModel;
+import org.apache.spark.mllib.regression.LabeledPoint;
 
 import java.util.Arrays;
 import java.util.StringTokenizer;
diff --git a/examples/src/main/java/spark/streaming/examples/JavaFlumeEventCount.java b/examples/src/main/java/org/apache/spark/streaming/examples/JavaFlumeEventCount.java
similarity index 90%
rename from examples/src/main/java/spark/streaming/examples/JavaFlumeEventCount.java
rename to examples/src/main/java/org/apache/spark/streaming/examples/JavaFlumeEventCount.java
index 096a9ae21976ffed603329102ba056acf1b0ccb4..261813bf2f39c49f61df463e864df12fdedad858 100644
--- a/examples/src/main/java/spark/streaming/examples/JavaFlumeEventCount.java
+++ b/examples/src/main/java/org/apache/spark/streaming/examples/JavaFlumeEventCount.java
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples;
+package org.apache.spark.streaming.examples;
 
-import spark.api.java.function.Function;
-import spark.streaming.*;
-import spark.streaming.api.java.*;
-import spark.streaming.dstream.SparkFlumeEvent;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.streaming.*;
+import org.apache.spark.streaming.api.java.*;
+import org.apache.spark.streaming.dstream.SparkFlumeEvent;
 
 /**
  *  Produces a count of events received from Flume.
diff --git a/examples/src/main/java/spark/streaming/examples/JavaNetworkWordCount.java b/examples/src/main/java/org/apache/spark/streaming/examples/JavaNetworkWordCount.java
similarity index 86%
rename from examples/src/main/java/spark/streaming/examples/JavaNetworkWordCount.java
rename to examples/src/main/java/org/apache/spark/streaming/examples/JavaNetworkWordCount.java
index c54d3f3d599a5e58a9f52901e30e79f45741a75b..def87c199be57eb0352c35fc5e3858e8ea46f7c7 100644
--- a/examples/src/main/java/spark/streaming/examples/JavaNetworkWordCount.java
+++ b/examples/src/main/java/org/apache/spark/streaming/examples/JavaNetworkWordCount.java
@@ -15,17 +15,17 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples;
+package org.apache.spark.streaming.examples;
 
 import com.google.common.collect.Lists;
 import scala.Tuple2;
-import spark.api.java.function.FlatMapFunction;
-import spark.api.java.function.Function2;
-import spark.api.java.function.PairFunction;
-import spark.streaming.Duration;
-import spark.streaming.api.java.JavaDStream;
-import spark.streaming.api.java.JavaPairDStream;
-import spark.streaming.api.java.JavaStreamingContext;
+import org.apache.spark.api.java.function.FlatMapFunction;
+import org.apache.spark.api.java.function.Function2;
+import org.apache.spark.api.java.function.PairFunction;
+import org.apache.spark.streaming.Duration;
+import org.apache.spark.streaming.api.java.JavaDStream;
+import org.apache.spark.streaming.api.java.JavaPairDStream;
+import org.apache.spark.streaming.api.java.JavaStreamingContext;
 
 /**
  * Counts words in UTF8 encoded, '\n' delimited text received from the network every second.
diff --git a/examples/src/main/java/spark/streaming/examples/JavaQueueStream.java b/examples/src/main/java/org/apache/spark/streaming/examples/JavaQueueStream.java
similarity index 85%
rename from examples/src/main/java/spark/streaming/examples/JavaQueueStream.java
rename to examples/src/main/java/org/apache/spark/streaming/examples/JavaQueueStream.java
index 1f4a991542876e9b1af7615eaf445af05fe19207..c8c7389dd1bbaa11407f4d28ea1bbf08925c5b40 100644
--- a/examples/src/main/java/spark/streaming/examples/JavaQueueStream.java
+++ b/examples/src/main/java/org/apache/spark/streaming/examples/JavaQueueStream.java
@@ -15,17 +15,17 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples;
+package org.apache.spark.streaming.examples;
 
 import com.google.common.collect.Lists;
 import scala.Tuple2;
-import spark.api.java.JavaRDD;
-import spark.api.java.function.Function2;
-import spark.api.java.function.PairFunction;
-import spark.streaming.Duration;
-import spark.streaming.api.java.JavaDStream;
-import spark.streaming.api.java.JavaPairDStream;
-import spark.streaming.api.java.JavaStreamingContext;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.function.Function2;
+import org.apache.spark.api.java.function.PairFunction;
+import org.apache.spark.streaming.Duration;
+import org.apache.spark.streaming.api.java.JavaDStream;
+import org.apache.spark.streaming.api.java.JavaPairDStream;
+import org.apache.spark.streaming.api.java.JavaStreamingContext;
 
 import java.util.LinkedList;
 import java.util.List;
diff --git a/examples/src/main/scala/spark/examples/BroadcastTest.scala b/examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala
similarity index 95%
rename from examples/src/main/scala/spark/examples/BroadcastTest.scala
rename to examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala
index 911490cb6c12412dc24d047c1a6dad5fe4b28331..868ff81f67a82837ed0c108374c009d37a9a75bb 100644
--- a/examples/src/main/scala/spark/examples/BroadcastTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
-import spark.SparkContext
+import org.apache.spark.SparkContext
 
 object BroadcastTest {
   def main(args: Array[String]) {
diff --git a/examples/src/main/scala/spark/examples/CassandraTest.scala b/examples/src/main/scala/org/apache/spark/examples/CassandraTest.scala
similarity index 98%
rename from examples/src/main/scala/spark/examples/CassandraTest.scala
rename to examples/src/main/scala/org/apache/spark/examples/CassandraTest.scala
index 104bfd52047f30411460d11544db543aacbf9082..33bf7151a7cc493bfce74029a3734bbc138fbd9a 100644
--- a/examples/src/main/scala/spark/examples/CassandraTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/CassandraTest.scala
@@ -15,15 +15,15 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
 import org.apache.hadoop.mapreduce.Job
 import org.apache.cassandra.hadoop.ColumnFamilyOutputFormat
 import org.apache.cassandra.hadoop.ConfigHelper
 import org.apache.cassandra.hadoop.ColumnFamilyInputFormat
 import org.apache.cassandra.thrift._
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
 import java.nio.ByteBuffer
 import java.util.SortedMap
 import org.apache.cassandra.db.IColumn
diff --git a/examples/src/main/scala/spark/examples/ExceptionHandlingTest.scala b/examples/src/main/scala/org/apache/spark/examples/ExceptionHandlingTest.scala
similarity index 94%
rename from examples/src/main/scala/spark/examples/ExceptionHandlingTest.scala
rename to examples/src/main/scala/org/apache/spark/examples/ExceptionHandlingTest.scala
index 67ddaec8d200355b6d443d417e474d53a709582c..92eb96bd8e0c2a04f5a26adbad79cc2a757f2be0 100644
--- a/examples/src/main/scala/spark/examples/ExceptionHandlingTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ExceptionHandlingTest.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
-import spark.SparkContext
+import org.apache.spark.SparkContext
 
 object ExceptionHandlingTest {
   def main(args: Array[String]) {
diff --git a/examples/src/main/scala/spark/examples/GroupByTest.scala b/examples/src/main/scala/org/apache/spark/examples/GroupByTest.scala
similarity index 94%
rename from examples/src/main/scala/spark/examples/GroupByTest.scala
rename to examples/src/main/scala/org/apache/spark/examples/GroupByTest.scala
index 5cee4136158b45b68a7b2ea9a785e50478d7c3e1..42c2e0e8e19c4af07e28ea8cd19bda9b935b871b 100644
--- a/examples/src/main/scala/spark/examples/GroupByTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/GroupByTest.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
 import java.util.Random
 
 object GroupByTest {
diff --git a/examples/src/main/scala/spark/examples/HBaseTest.scala b/examples/src/main/scala/org/apache/spark/examples/HBaseTest.scala
similarity index 94%
rename from examples/src/main/scala/spark/examples/HBaseTest.scala
rename to examples/src/main/scala/org/apache/spark/examples/HBaseTest.scala
index 4dd6c243acfa28962e5d675b00bda59f7d334458..efe2e93b0dc91cb7a77c532ca1084bc6fca3d593 100644
--- a/examples/src/main/scala/spark/examples/HBaseTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/HBaseTest.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
-import spark._
-import spark.rdd.NewHadoopRDD
+import org.apache.spark._
+import org.apache.spark.rdd.NewHadoopRDD
 import org.apache.hadoop.hbase.{HBaseConfiguration, HTableDescriptor}
 import org.apache.hadoop.hbase.client.HBaseAdmin
 import org.apache.hadoop.hbase.mapreduce.TableInputFormat
diff --git a/examples/src/main/scala/spark/examples/HdfsTest.scala b/examples/src/main/scala/org/apache/spark/examples/HdfsTest.scala
similarity index 95%
rename from examples/src/main/scala/spark/examples/HdfsTest.scala
rename to examples/src/main/scala/org/apache/spark/examples/HdfsTest.scala
index 23258336e23a1aa1e37731271a6fdca0caab9b38..d6a88d3032c494bbf2bf5f651a0041993fe09b79 100644
--- a/examples/src/main/scala/spark/examples/HdfsTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/HdfsTest.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
-import spark._
+import org.apache.spark._
 
 object HdfsTest {
   def main(args: Array[String]) {
diff --git a/examples/src/main/scala/spark/examples/LocalALS.scala b/examples/src/main/scala/org/apache/spark/examples/LocalALS.scala
similarity index 99%
rename from examples/src/main/scala/spark/examples/LocalALS.scala
rename to examples/src/main/scala/org/apache/spark/examples/LocalALS.scala
index 7a449a9d720185d151ae3eb604913f68194273ab..4af45b2b4a0670df6d8c5072c818cdf0be72fe1b 100644
--- a/examples/src/main/scala/spark/examples/LocalALS.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalALS.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
 import scala.math.sqrt
 import cern.jet.math._
diff --git a/examples/src/main/scala/spark/examples/LocalFileLR.scala b/examples/src/main/scala/org/apache/spark/examples/LocalFileLR.scala
similarity index 96%
rename from examples/src/main/scala/spark/examples/LocalFileLR.scala
rename to examples/src/main/scala/org/apache/spark/examples/LocalFileLR.scala
index c1f8d32aa87b9852662c896107c163d8d3fec491..fb130ea1988f76ff9456e5f2d2db58b1e53377cc 100644
--- a/examples/src/main/scala/spark/examples/LocalFileLR.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalFileLR.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
 import java.util.Random
-import spark.util.Vector
+import org.apache.spark.util.Vector
 
 object LocalFileLR {
   val D = 10   // Numer of dimensions
diff --git a/examples/src/main/scala/spark/examples/LocalKMeans.scala b/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
similarity index 96%
rename from examples/src/main/scala/spark/examples/LocalKMeans.scala
rename to examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
index 0a0bc6f476b41496b804c620f1091f9f48560619..f90ea35cd447c035848fbf124d726d94645ade74 100644
--- a/examples/src/main/scala/spark/examples/LocalKMeans.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
 import java.util.Random
-import spark.util.Vector
-import spark.SparkContext._
+import org.apache.spark.util.Vector
+import org.apache.spark.SparkContext._
 import scala.collection.mutable.HashMap
 import scala.collection.mutable.HashSet
 
diff --git a/examples/src/main/scala/spark/examples/LocalLR.scala b/examples/src/main/scala/org/apache/spark/examples/LocalLR.scala
similarity index 96%
rename from examples/src/main/scala/spark/examples/LocalLR.scala
rename to examples/src/main/scala/org/apache/spark/examples/LocalLR.scala
index ab99bf1fbe44bbc240dd46186167f673fc1a0b7b..cd4e9f1af0e2c81887ae8b579de3ff19960e415c 100644
--- a/examples/src/main/scala/spark/examples/LocalLR.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalLR.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
 import java.util.Random
-import spark.util.Vector
+import org.apache.spark.util.Vector
 
 /**
  * Logistic regression based classification.
diff --git a/examples/src/main/scala/spark/examples/LocalPi.scala b/examples/src/main/scala/org/apache/spark/examples/LocalPi.scala
similarity index 94%
rename from examples/src/main/scala/spark/examples/LocalPi.scala
rename to examples/src/main/scala/org/apache/spark/examples/LocalPi.scala
index ccd69695df4ad1a8edca6cd2f43d33b0e8a1942c..bb7f22ec8df42b489f1cf1ff81129f323f941d4a 100644
--- a/examples/src/main/scala/spark/examples/LocalPi.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalPi.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
 import scala.math.random
-import spark._
+import org.apache.spark._
 import SparkContext._
 
 object LocalPi {
diff --git a/examples/src/main/scala/spark/examples/LogQuery.scala b/examples/src/main/scala/org/apache/spark/examples/LogQuery.scala
similarity index 97%
rename from examples/src/main/scala/spark/examples/LogQuery.scala
rename to examples/src/main/scala/org/apache/spark/examples/LogQuery.scala
index e815ececf7ae95461c3abe886014a87870b91869..17ff3ce76497f6297b583e3678c9ac06fcc9550a 100644
--- a/examples/src/main/scala/spark/examples/LogQuery.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LogQuery.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
 /**
  * Executes a roll up-style query against Apache logs.
  */
diff --git a/examples/src/main/scala/spark/examples/MultiBroadcastTest.scala b/examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala
similarity index 95%
rename from examples/src/main/scala/spark/examples/MultiBroadcastTest.scala
rename to examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala
index d0b1cf06e59df22e76fad4d3cc992c7250f362ab..f79f0142b8679134210db7616284176a994f3c36 100644
--- a/examples/src/main/scala/spark/examples/MultiBroadcastTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
-import spark.SparkContext
+import org.apache.spark.SparkContext
 
 object MultiBroadcastTest {
   def main(args: Array[String]) {
diff --git a/examples/src/main/scala/spark/examples/SimpleSkewedGroupByTest.scala b/examples/src/main/scala/org/apache/spark/examples/SimpleSkewedGroupByTest.scala
similarity index 96%
rename from examples/src/main/scala/spark/examples/SimpleSkewedGroupByTest.scala
rename to examples/src/main/scala/org/apache/spark/examples/SimpleSkewedGroupByTest.scala
index d197bbaf7c47837fc8376e7e3445f7fd7b13ae29..37ddfb5db7635ca2d7303b77105b74580a515efb 100644
--- a/examples/src/main/scala/spark/examples/SimpleSkewedGroupByTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SimpleSkewedGroupByTest.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
 import java.util.Random
 
 object SimpleSkewedGroupByTest {
diff --git a/examples/src/main/scala/spark/examples/SkewedGroupByTest.scala b/examples/src/main/scala/org/apache/spark/examples/SkewedGroupByTest.scala
similarity index 95%
rename from examples/src/main/scala/spark/examples/SkewedGroupByTest.scala
rename to examples/src/main/scala/org/apache/spark/examples/SkewedGroupByTest.scala
index 4641b8244487eb40e022fd70159d97d308353a7a..9c954b2b5baa996a0cdf433fe7c373371cdfd60c 100644
--- a/examples/src/main/scala/spark/examples/SkewedGroupByTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SkewedGroupByTest.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
 import java.util.Random
 
 object SkewedGroupByTest {
diff --git a/examples/src/main/scala/spark/examples/SparkALS.scala b/examples/src/main/scala/org/apache/spark/examples/SparkALS.scala
similarity index 98%
rename from examples/src/main/scala/spark/examples/SparkALS.scala
rename to examples/src/main/scala/org/apache/spark/examples/SparkALS.scala
index ba0dfd8f9b5c4109df77a38cd168f808f4ec93e8..814944ba1c6bf63f591ae3ca04b3e0b864f3fef8 100644
--- a/examples/src/main/scala/spark/examples/SparkALS.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkALS.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
 import scala.math.sqrt
 import cern.jet.math._
 import cern.colt.matrix._
 import cern.colt.matrix.linalg._
-import spark._
+import org.apache.spark._
 
 /**
  * Alternating least squares matrix factorization.
diff --git a/examples/src/main/scala/spark/examples/SparkHdfsLR.scala b/examples/src/main/scala/org/apache/spark/examples/SparkHdfsLR.scala
similarity index 94%
rename from examples/src/main/scala/spark/examples/SparkHdfsLR.scala
rename to examples/src/main/scala/org/apache/spark/examples/SparkHdfsLR.scala
index 43c91156640c899aaf622c87efdbe646b4adf398..646682878fda5373027b660c53c17e39176d0416 100644
--- a/examples/src/main/scala/spark/examples/SparkHdfsLR.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkHdfsLR.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
 import java.util.Random
 import scala.math.exp
-import spark.util.Vector
-import spark._
-import spark.scheduler.InputFormatInfo
+import org.apache.spark.util.Vector
+import org.apache.spark._
+import org.apache.spark.scheduler.InputFormatInfo
 
 /**
  * Logistic regression based classification.
diff --git a/examples/src/main/scala/spark/examples/SparkKMeans.scala b/examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala
similarity index 94%
rename from examples/src/main/scala/spark/examples/SparkKMeans.scala
rename to examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala
index 38ed3b149af2051bbf1426a8f1031680ac37c2e8..f7bf75b4e5038c10e374ae6e26d4e561b2cda568 100644
--- a/examples/src/main/scala/spark/examples/SparkKMeans.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
 import java.util.Random
-import spark.SparkContext
-import spark.util.Vector
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.util.Vector
+import org.apache.spark.SparkContext._
 import scala.collection.mutable.HashMap
 import scala.collection.mutable.HashSet
 
diff --git a/examples/src/main/scala/spark/examples/SparkLR.scala b/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
similarity index 95%
rename from examples/src/main/scala/spark/examples/SparkLR.scala
rename to examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
index 52a0d69744ad6005cde158c9f61868cd86e708bf..9ed9fe4d761d5f8ab7a58b9b98137fe59085e779 100644
--- a/examples/src/main/scala/spark/examples/SparkLR.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
 import java.util.Random
 import scala.math.exp
-import spark.util.Vector
-import spark._
+import org.apache.spark.util.Vector
+import org.apache.spark._
 
 /**
  * Logistic regression based classification.
diff --git a/examples/src/main/scala/spark/examples/SparkPageRank.scala b/examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala
similarity index 91%
rename from examples/src/main/scala/spark/examples/SparkPageRank.scala
rename to examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala
index dedbbd01a3a9c3b0ba6d181721e1e3dfb3812d1f..2721caf08bd176ca61ff3c10a4d285528c2cd179 100644
--- a/examples/src/main/scala/spark/examples/SparkPageRank.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala
@@ -1,7 +1,7 @@
-package spark.examples
+package org.apache.spark.examples
 
-import spark.SparkContext._
-import spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.SparkContext
 
 
 /**
diff --git a/examples/src/main/scala/spark/examples/SparkPi.scala b/examples/src/main/scala/org/apache/spark/examples/SparkPi.scala
similarity index 96%
rename from examples/src/main/scala/spark/examples/SparkPi.scala
rename to examples/src/main/scala/org/apache/spark/examples/SparkPi.scala
index 00560ac9d160a6a3c4e6e55a5423fcd985a8ea53..5a2bc9b0d08eebc70a35731b593e1e7dd5f7e6b7 100644
--- a/examples/src/main/scala/spark/examples/SparkPi.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkPi.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
 import scala.math.random
-import spark._
+import org.apache.spark._
 import SparkContext._
 
 /** Computes an approximation to pi */
diff --git a/examples/src/main/scala/spark/examples/SparkTC.scala b/examples/src/main/scala/org/apache/spark/examples/SparkTC.scala
similarity index 97%
rename from examples/src/main/scala/spark/examples/SparkTC.scala
rename to examples/src/main/scala/org/apache/spark/examples/SparkTC.scala
index bf988a953bbb36187ff31a63b76510a6d9324c5d..5a7a9d1bd8f744bcc92c5e8f0a30c50051fd64bc 100644
--- a/examples/src/main/scala/spark/examples/SparkTC.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkTC.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.examples
+package org.apache.spark.examples
 
-import spark._
+import org.apache.spark._
 import SparkContext._
 import scala.util.Random
 import scala.collection.mutable
diff --git a/examples/src/main/scala/spark/examples/bagel/PageRankUtils.scala b/examples/src/main/scala/org/apache/spark/examples/bagel/PageRankUtils.scala
similarity index 94%
rename from examples/src/main/scala/spark/examples/bagel/PageRankUtils.scala
rename to examples/src/main/scala/org/apache/spark/examples/bagel/PageRankUtils.scala
index c23ee9895f2b73f139a551ce542d2da1dbfafe7f..cfafbaf23e4c22c06d485cb24be0b35ff7e44fd7 100644
--- a/examples/src/main/scala/spark/examples/bagel/PageRankUtils.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/bagel/PageRankUtils.scala
@@ -15,13 +15,14 @@
  * limitations under the License.
  */
 
-package spark.examples.bagel
+package org.apache.spark.examples.bagel
 
-import spark._
-import spark.SparkContext._
+import org.apache.spark._
+import org.apache.spark.SparkContext._
+import org.apache.spark.serializer.KryoRegistrator
 
-import spark.bagel._
-import spark.bagel.Bagel._
+import org.apache.spark.bagel._
+import org.apache.spark.bagel.Bagel._
 
 import scala.collection.mutable.ArrayBuffer
 
diff --git a/examples/src/main/scala/spark/examples/bagel/WikipediaPageRank.scala b/examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRank.scala
similarity index 92%
rename from examples/src/main/scala/spark/examples/bagel/WikipediaPageRank.scala
rename to examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRank.scala
index 00635a7ffa2f7471c3db86fe3d31d1aea91d9de4..72b5c7b88e19bb8c8e8dd4b08c3d7e32c07b02e5 100644
--- a/examples/src/main/scala/spark/examples/bagel/WikipediaPageRank.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRank.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.examples.bagel
+package org.apache.spark.examples.bagel
 
-import spark._
-import spark.SparkContext._
+import org.apache.spark._
+import org.apache.spark.SparkContext._
 
-import spark.bagel._
-import spark.bagel.Bagel._
+import org.apache.spark.bagel._
+import org.apache.spark.bagel.Bagel._
 
 import scala.xml.{XML,NodeSeq}
 
@@ -37,7 +37,7 @@ object WikipediaPageRank {
       System.exit(-1)
     }
 
-    System.setProperty("spark.serializer", "spark.KryoSerializer")
+    System.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
     System.setProperty("spark.kryo.registrator", classOf[PRKryoRegistrator].getName)
 
     val inputFile = args(0)
diff --git a/examples/src/main/scala/spark/examples/bagel/WikipediaPageRankStandalone.scala b/examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRankStandalone.scala
similarity index 95%
rename from examples/src/main/scala/spark/examples/bagel/WikipediaPageRankStandalone.scala
rename to examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRankStandalone.scala
index c416ddbc584bdb2d20b9b5b5abb55231762f325d..ddf6855325e732529fa13eb1781643dacde7e162 100644
--- a/examples/src/main/scala/spark/examples/bagel/WikipediaPageRankStandalone.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRankStandalone.scala
@@ -15,21 +15,18 @@
  * limitations under the License.
  */
 
-package spark.examples.bagel
+package org.apache.spark.examples.bagel
 
-import spark._
-import serializer.{DeserializationStream, SerializationStream, SerializerInstance}
-import spark.SparkContext._
-
-import spark.bagel._
-import spark.bagel.Bagel._
-
-import scala.xml.{XML,NodeSeq}
+import java.io.{InputStream, OutputStream, DataInputStream, DataOutputStream}
+import java.nio.ByteBuffer
 
 import scala.collection.mutable.ArrayBuffer
+import scala.xml.{XML, NodeSeq}
 
-import java.io.{InputStream, OutputStream, DataInputStream, DataOutputStream}
-import java.nio.ByteBuffer
+import org.apache.spark._
+import org.apache.spark.serializer.{DeserializationStream, SerializationStream, SerializerInstance}
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
 
 object WikipediaPageRankStandalone {
   def main(args: Array[String]) {
@@ -131,7 +128,7 @@ object WikipediaPageRankStandalone {
   }
 }
 
-class WPRSerializer extends spark.serializer.Serializer {
+class WPRSerializer extends org.apache.spark.serializer.Serializer {
   def newInstance(): SerializerInstance = new WPRSerializerInstance()
 }
 
diff --git a/examples/src/main/scala/spark/streaming/examples/ActorWordCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/ActorWordCount.scala
similarity index 93%
rename from examples/src/main/scala/spark/streaming/examples/ActorWordCount.scala
rename to examples/src/main/scala/org/apache/spark/streaming/examples/ActorWordCount.scala
index 05d3176478130301d34db93c81de5c5d268ee279..cd3423a07b6b788fb3b7bc6f2c9e8c22ffdc415c 100644
--- a/examples/src/main/scala/spark/streaming/examples/ActorWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/ActorWordCount.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
 
 import scala.collection.mutable.LinkedList
 import scala.util.Random
@@ -25,11 +25,11 @@ import akka.actor.ActorRef
 import akka.actor.Props
 import akka.actor.actorRef2Scala
 
-import spark.streaming.Seconds
-import spark.streaming.StreamingContext
-import spark.streaming.StreamingContext.toPairDStreamFunctions
-import spark.streaming.receivers.Receiver
-import spark.util.AkkaUtils
+import org.apache.spark.streaming.Seconds
+import org.apache.spark.streaming.StreamingContext
+import org.apache.spark.streaming.StreamingContext.toPairDStreamFunctions
+import org.apache.spark.streaming.receivers.Receiver
+import org.apache.spark.util.AkkaUtils
 
 case class SubscribeReceiver(receiverActor: ActorRef)
 case class UnsubscribeReceiver(receiverActor: ActorRef)
@@ -80,7 +80,7 @@ class FeederActor extends Actor {
  * goes and subscribe to a typical publisher/feeder actor and receives
  * data.
  *
- * @see [[spark.streaming.examples.FeederActor]]
+ * @see [[org.apache.spark.streaming.examples.FeederActor]]
  */
 class SampleActorReceiver[T: ClassManifest](urlOfPublisher: String)
 extends Actor with Receiver {
diff --git a/examples/src/main/scala/spark/streaming/examples/FlumeEventCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/FlumeEventCount.scala
similarity index 92%
rename from examples/src/main/scala/spark/streaming/examples/FlumeEventCount.scala
rename to examples/src/main/scala/org/apache/spark/streaming/examples/FlumeEventCount.scala
index 3ab4fc2c37b458381fefadd9b530a241b694800a..9f6e163454a64fe062989c7a5794f9ffd07f165e 100644
--- a/examples/src/main/scala/spark/streaming/examples/FlumeEventCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/FlumeEventCount.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
 
-import spark.util.IntParam
-import spark.storage.StorageLevel
-import spark.streaming._
+import org.apache.spark.util.IntParam
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming._
 
 /**
  *  Produces a count of events received from Flume.
diff --git a/examples/src/main/scala/spark/streaming/examples/HdfsWordCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/HdfsWordCount.scala
similarity index 92%
rename from examples/src/main/scala/spark/streaming/examples/HdfsWordCount.scala
rename to examples/src/main/scala/org/apache/spark/streaming/examples/HdfsWordCount.scala
index 30af01a26f42e4243fde65782dda231134d45c9e..bc8564b3ba080aa3ed3f367fb1211736d9035411 100644
--- a/examples/src/main/scala/spark/streaming/examples/HdfsWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/HdfsWordCount.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
 
-import spark.streaming.{Seconds, StreamingContext}
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.{Seconds, StreamingContext}
+import org.apache.spark.streaming.StreamingContext._
 
 
 /**
diff --git a/examples/src/main/scala/spark/streaming/examples/KafkaWordCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/KafkaWordCount.scala
similarity index 92%
rename from examples/src/main/scala/spark/streaming/examples/KafkaWordCount.scala
rename to examples/src/main/scala/org/apache/spark/streaming/examples/KafkaWordCount.scala
index d9c76d1a33bf6c1da810bd4d4fca354d45fa2518..12f939d5a7e4b529c05c8dfe77d67e8c3f254b40 100644
--- a/examples/src/main/scala/spark/streaming/examples/KafkaWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/KafkaWordCount.scala
@@ -15,17 +15,17 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
 
 import java.util.Properties
 import kafka.message.Message
 import kafka.producer.SyncProducerConfig
 import kafka.producer._
-import spark.SparkContext
-import spark.streaming._
-import spark.streaming.StreamingContext._
-import spark.storage.StorageLevel
-import spark.streaming.util.RawTextHelper._
+import org.apache.spark.SparkContext
+import org.apache.spark.streaming._
+import org.apache.spark.streaming.StreamingContext._
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.util.RawTextHelper._
 
 /**
  * Consumes messages from one or more topics in Kafka and does wordcount.
diff --git a/examples/src/main/scala/spark/streaming/examples/NetworkWordCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/NetworkWordCount.scala
similarity index 93%
rename from examples/src/main/scala/spark/streaming/examples/NetworkWordCount.scala
rename to examples/src/main/scala/org/apache/spark/streaming/examples/NetworkWordCount.scala
index b29d79aac53d7fd71d1f9de36452a992eb7999e7..e2487dca5f2cc894098c2e1bb08aee761c85bbea 100644
--- a/examples/src/main/scala/spark/streaming/examples/NetworkWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/NetworkWordCount.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
 
-import spark.streaming.{Seconds, StreamingContext}
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.{Seconds, StreamingContext}
+import org.apache.spark.streaming.StreamingContext._
 
 /**
  * Counts words in UTF8 encoded, '\n' delimited text received from the network every second.
diff --git a/examples/src/main/scala/spark/streaming/examples/QueueStream.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/QueueStream.scala
similarity index 90%
rename from examples/src/main/scala/spark/streaming/examples/QueueStream.scala
rename to examples/src/main/scala/org/apache/spark/streaming/examples/QueueStream.scala
index da36c8c23c65f3df19dcd0d1fd72e0398cab2ab4..fad512eebad1292b3b74edf81339080a0c81d32d 100644
--- a/examples/src/main/scala/spark/streaming/examples/QueueStream.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/QueueStream.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
 
-import spark.RDD
-import spark.streaming.{Seconds, StreamingContext}
-import spark.streaming.StreamingContext._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.streaming.{Seconds, StreamingContext}
+import org.apache.spark.streaming.StreamingContext._
 
 import scala.collection.mutable.SynchronizedQueue
 
diff --git a/examples/src/main/scala/spark/streaming/examples/RawNetworkGrep.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/RawNetworkGrep.scala
similarity index 90%
rename from examples/src/main/scala/spark/streaming/examples/RawNetworkGrep.scala
rename to examples/src/main/scala/org/apache/spark/streaming/examples/RawNetworkGrep.scala
index 7fb680bcc310ae47db2c8e5b76eca0dec8969f64..0b45c30d20dc35ec492239c155de742def77860f 100644
--- a/examples/src/main/scala/spark/streaming/examples/RawNetworkGrep.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/RawNetworkGrep.scala
@@ -15,20 +15,20 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
 
-import spark.util.IntParam
-import spark.storage.StorageLevel
+import org.apache.spark.util.IntParam
+import org.apache.spark.storage.StorageLevel
 
-import spark.streaming._
-import spark.streaming.util.RawTextHelper
+import org.apache.spark.streaming._
+import org.apache.spark.streaming.util.RawTextHelper
 
 /**
  * Receives text from multiple rawNetworkStreams and counts how many '\n' delimited
  * lines have the word 'the' in them. This is useful for benchmarking purposes. This
  * will only work with spark.streaming.util.RawTextSender running on all worker nodes
  * and with Spark using Kryo serialization (set Java property "spark.serializer" to
- * "spark.KryoSerializer").
+ * "org.apache.spark.serializer.KryoSerializer").
  * Usage: RawNetworkGrep <master> <numStreams> <host> <port> <batchMillis>
  *   <master> is the Spark master URL
  *   <numStream> is the number rawNetworkStreams, which should be same as number
diff --git a/examples/src/main/scala/spark/streaming/examples/StatefulNetworkWordCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/StatefulNetworkWordCount.scala
similarity index 95%
rename from examples/src/main/scala/spark/streaming/examples/StatefulNetworkWordCount.scala
rename to examples/src/main/scala/org/apache/spark/streaming/examples/StatefulNetworkWordCount.scala
index b709fc3c87ca1487163cafcf4b9232674fea5f10..cb30c4edb30ce408faff73dc504c50f3a426f882 100644
--- a/examples/src/main/scala/spark/streaming/examples/StatefulNetworkWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/StatefulNetworkWordCount.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
 
-import spark.streaming._
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming._
+import org.apache.spark.streaming.StreamingContext._
 
 /**
  * Counts words cumulatively in UTF8 encoded, '\n' delimited text received from the network every second.
diff --git a/examples/src/main/scala/spark/streaming/examples/TwitterAlgebirdCMS.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/TwitterAlgebirdCMS.scala
similarity index 94%
rename from examples/src/main/scala/spark/streaming/examples/TwitterAlgebirdCMS.scala
rename to examples/src/main/scala/org/apache/spark/streaming/examples/TwitterAlgebirdCMS.scala
index 8770abd57e02d72f6aa94926ea09ee94364cdcaf..35b6329ab3152caca3dd7564a88448cf2e9d284f 100644
--- a/examples/src/main/scala/spark/streaming/examples/TwitterAlgebirdCMS.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/TwitterAlgebirdCMS.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
 
-import spark.streaming.{Seconds, StreamingContext}
-import spark.storage.StorageLevel
+import org.apache.spark.streaming.{Seconds, StreamingContext}
+import org.apache.spark.storage.StorageLevel
 import com.twitter.algebird._
-import spark.streaming.StreamingContext._
-import spark.SparkContext._
+import org.apache.spark.streaming.StreamingContext._
+import org.apache.spark.SparkContext._
 
 /**
  * Illustrates the use of the Count-Min Sketch, from Twitter's Algebird library, to compute
diff --git a/examples/src/main/scala/spark/streaming/examples/TwitterAlgebirdHLL.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/TwitterAlgebirdHLL.scala
similarity index 94%
rename from examples/src/main/scala/spark/streaming/examples/TwitterAlgebirdHLL.scala
rename to examples/src/main/scala/org/apache/spark/streaming/examples/TwitterAlgebirdHLL.scala
index cba5c986beac62e811c1c0918d1f0ac5b7d8a7d0..8bfde2a8297c1a133c80d3be08f8ad6ae0b145f3 100644
--- a/examples/src/main/scala/spark/streaming/examples/TwitterAlgebirdHLL.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/TwitterAlgebirdHLL.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
 
-import spark.streaming.{Seconds, StreamingContext}
-import spark.storage.StorageLevel
+import org.apache.spark.streaming.{Seconds, StreamingContext}
+import org.apache.spark.storage.StorageLevel
 import com.twitter.algebird.HyperLogLog._
 import com.twitter.algebird.HyperLogLogMonoid
-import spark.streaming.dstream.TwitterInputDStream
+import org.apache.spark.streaming.dstream.TwitterInputDStream
 
 /**
  * Illustrates the use of the HyperLogLog algorithm, from Twitter's Algebird library, to compute
diff --git a/examples/src/main/scala/spark/streaming/examples/TwitterPopularTags.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/TwitterPopularTags.scala
similarity index 94%
rename from examples/src/main/scala/spark/streaming/examples/TwitterPopularTags.scala
rename to examples/src/main/scala/org/apache/spark/streaming/examples/TwitterPopularTags.scala
index 682b99f75e5ec6b9082a3f2f9c71c0fd29c1e50e..27aa6b14bf221ab08752ce721a2548e85e67f98b 100644
--- a/examples/src/main/scala/spark/streaming/examples/TwitterPopularTags.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/TwitterPopularTags.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
 
-import spark.streaming.{Seconds, StreamingContext}
+import org.apache.spark.streaming.{Seconds, StreamingContext}
 import StreamingContext._
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
 
 /**
  * Calculates popular hashtags (topics) over sliding 10 and 60 second windows from a Twitter
diff --git a/examples/src/main/scala/spark/streaming/examples/ZeroMQWordCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/ZeroMQWordCount.scala
similarity index 95%
rename from examples/src/main/scala/spark/streaming/examples/ZeroMQWordCount.scala
rename to examples/src/main/scala/org/apache/spark/streaming/examples/ZeroMQWordCount.scala
index a0cae06c3093fbae6bf73fad450a72390b6c640d..c8743b9e25523f70574fc01d8e3ab232c5c0eb97 100644
--- a/examples/src/main/scala/spark/streaming/examples/ZeroMQWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/ZeroMQWordCount.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
 
 import akka.actor.ActorSystem
 import akka.actor.actorRef2Scala
 import akka.zeromq._
-import spark.streaming.{ Seconds, StreamingContext }
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.{ Seconds, StreamingContext }
+import org.apache.spark.streaming.StreamingContext._
 import akka.zeromq.Subscribe
 
 /**
diff --git a/examples/src/main/scala/spark/streaming/examples/clickstream/PageViewGenerator.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala
similarity index 98%
rename from examples/src/main/scala/spark/streaming/examples/clickstream/PageViewGenerator.scala
rename to examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala
index dd36bbbf3207ccc8823b0df989ba546c0fac37f0..884d6d6f3414c7c18d511b45e1c8ea6618fee8be 100644
--- a/examples/src/main/scala/spark/streaming/examples/clickstream/PageViewGenerator.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples.clickstream
+package org.apache.spark.streaming.examples.clickstream
 
 import java.net.{InetAddress,ServerSocket,Socket,SocketException}
 import java.io.{InputStreamReader, BufferedReader, PrintWriter}
diff --git a/examples/src/main/scala/spark/streaming/examples/clickstream/PageViewStream.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewStream.scala
similarity index 95%
rename from examples/src/main/scala/spark/streaming/examples/clickstream/PageViewStream.scala
rename to examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewStream.scala
index 152da2348989051fb00fb3d68a25953163975d1c..8282cc9269c13fc287e0e7dc38c0e921139efce5 100644
--- a/examples/src/main/scala/spark/streaming/examples/clickstream/PageViewStream.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewStream.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.streaming.examples.clickstream
+package org.apache.spark.streaming.examples.clickstream
 
-import spark.streaming.{Seconds, StreamingContext}
-import spark.streaming.StreamingContext._
-import spark.SparkContext._
+import org.apache.spark.streaming.{Seconds, StreamingContext}
+import org.apache.spark.streaming.StreamingContext._
+import org.apache.spark.SparkContext._
 
 /** Analyses a streaming dataset of web page views. This class demonstrates several types of
   * operators available in Spark streaming.
diff --git a/mllib/pom.xml b/mllib/pom.xml
index ab31d5734e42a249e83f3b97c751c2d3fc981c98..966caf6835561f0a876970aeaf2118c6568d16e4 100644
--- a/mllib/pom.xml
+++ b/mllib/pom.xml
@@ -19,21 +19,21 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <parent>
-    <groupId>org.spark-project</groupId>
+    <groupId>org.apache.spark</groupId>
     <artifactId>spark-parent</artifactId>
     <version>0.8.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
-  <groupId>org.spark-project</groupId>
+  <groupId>org.apache.spark</groupId>
   <artifactId>spark-mllib</artifactId>
   <packaging>jar</packaging>
   <name>Spark Project ML Library</name>
-  <url>http://spark-project.org/</url>
+  <url>http://spark.incubator.apache.org/</url>
 
   <dependencies>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-core</artifactId>
       <version>${project.version}</version>
     </dependency>
diff --git a/mllib/src/main/scala/spark/mllib/classification/ClassificationModel.scala b/mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
similarity index 88%
rename from mllib/src/main/scala/spark/mllib/classification/ClassificationModel.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
index 70fae8c15a27549a7f935fc4b655fd571cff526e..60cb44ce8958981eccb871f06f756c8554519ad8 100644
--- a/mllib/src/main/scala/spark/mllib/classification/ClassificationModel.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
@@ -1,6 +1,6 @@
-package spark.mllib.classification
+package org.apache.spark.mllib.classification
 
-import spark.RDD
+import org.apache.spark.rdd.RDD
 
 trait ClassificationModel extends Serializable {
   /**
diff --git a/mllib/src/main/scala/spark/mllib/classification/LogisticRegression.scala b/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
similarity index 95%
rename from mllib/src/main/scala/spark/mllib/classification/LogisticRegression.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
index 482e4a674579474bb54f0f5d4c7c1df22b597e1f..50aede9c07d66905ba0e8877e1536f43adb6b72d 100644
--- a/mllib/src/main/scala/spark/mllib/classification/LogisticRegression.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
@@ -15,15 +15,16 @@
  * limitations under the License.
  */
 
-package spark.mllib.classification
+package org.apache.spark.mllib.classification
 
 import scala.math.round
 
-import spark.{Logging, RDD, SparkContext}
-import spark.mllib.optimization._
-import spark.mllib.regression._
-import spark.mllib.util.MLUtils
-import spark.mllib.util.DataValidators
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+import org.apache.spark.mllib.optimization._
+import org.apache.spark.mllib.regression._
+import org.apache.spark.mllib.util.MLUtils
+import org.apache.spark.mllib.util.DataValidators
 
 import org.jblas.DoubleMatrix
 
diff --git a/mllib/src/main/scala/spark/mllib/classification/SVM.scala b/mllib/src/main/scala/org/apache/spark/mllib/classification/SVM.scala
similarity index 95%
rename from mllib/src/main/scala/spark/mllib/classification/SVM.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/classification/SVM.scala
index 69393cd7b07982522a65fda44d0a5fb335598ff1..3511e24bce24821930060752370ea0c191521b49 100644
--- a/mllib/src/main/scala/spark/mllib/classification/SVM.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/classification/SVM.scala
@@ -15,15 +15,16 @@
  * limitations under the License.
  */
 
-package spark.mllib.classification
+package org.apache.spark.mllib.classification
 
 import scala.math.signum
 
-import spark.{Logging, RDD, SparkContext}
-import spark.mllib.optimization._
-import spark.mllib.regression._
-import spark.mllib.util.MLUtils
-import spark.mllib.util.DataValidators
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+import org.apache.spark.mllib.optimization._
+import org.apache.spark.mllib.regression._
+import org.apache.spark.mllib.util.MLUtils
+import org.apache.spark.mllib.util.DataValidators
 
 import org.jblas.DoubleMatrix
 
diff --git a/mllib/src/main/scala/spark/mllib/clustering/KMeans.scala b/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeans.scala
similarity index 98%
rename from mllib/src/main/scala/spark/mllib/clustering/KMeans.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeans.scala
index 97e3d110ae5e3a44c34abd49dd496dddd7aaa014..edbf77dbcc3a44b9d7b38a6080904a8dbcf2079b 100644
--- a/mllib/src/main/scala/spark/mllib/clustering/KMeans.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeans.scala
@@ -15,15 +15,16 @@
  * limitations under the License.
  */
 
-package spark.mllib.clustering
+package org.apache.spark.mllib.clustering
 
 import scala.collection.mutable.ArrayBuffer
 import scala.util.Random
 
-import spark.{SparkContext, RDD}
-import spark.SparkContext._
-import spark.Logging
-import spark.mllib.util.MLUtils
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.Logging
+import org.apache.spark.mllib.util.MLUtils
 
 import org.jblas.DoubleMatrix
 
diff --git a/mllib/src/main/scala/spark/mllib/clustering/KMeansModel.scala b/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala
similarity index 90%
rename from mllib/src/main/scala/spark/mllib/clustering/KMeansModel.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala
index b8f80e80cd7602e0b6f03e684443761fc74d9773..cfc81c985aa64616fe56addf01671d58228848fe 100644
--- a/mllib/src/main/scala/spark/mllib/clustering/KMeansModel.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.mllib.clustering
+package org.apache.spark.mllib.clustering
 
-import spark.RDD
-import spark.SparkContext._
-import spark.mllib.util.MLUtils
+import org.apache.spark.rdd.RDD
+import org.apache.spark.SparkContext._
+import org.apache.spark.mllib.util.MLUtils
 
 
 /**
diff --git a/mllib/src/main/scala/spark/mllib/clustering/LocalKMeans.scala b/mllib/src/main/scala/org/apache/spark/mllib/clustering/LocalKMeans.scala
similarity index 98%
rename from mllib/src/main/scala/spark/mllib/clustering/LocalKMeans.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/clustering/LocalKMeans.scala
index 89fe7d7e85391dcade62ec5ec9d45bda8360aecd..baf8251d8fc53df4e030b6d4d03de7f0b58a7e45 100644
--- a/mllib/src/main/scala/spark/mllib/clustering/LocalKMeans.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/clustering/LocalKMeans.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.clustering
+package org.apache.spark.mllib.clustering
 
 import scala.util.Random
 
diff --git a/mllib/src/main/scala/spark/mllib/optimization/Gradient.scala b/mllib/src/main/scala/org/apache/spark/mllib/optimization/Gradient.scala
similarity index 98%
rename from mllib/src/main/scala/spark/mllib/optimization/Gradient.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/optimization/Gradient.scala
index 05568f55af06077a6d10e9182d3a6eaf9485c30e..749e7364f4de1afde6371cec3cf5f9515bfd50d1 100644
--- a/mllib/src/main/scala/spark/mllib/optimization/Gradient.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/optimization/Gradient.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.optimization
+package org.apache.spark.mllib.optimization
 
 import org.jblas.DoubleMatrix
 
diff --git a/mllib/src/main/scala/spark/mllib/optimization/GradientDescent.scala b/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala
similarity index 96%
rename from mllib/src/main/scala/spark/mllib/optimization/GradientDescent.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala
index 31917df7e87124a9873b238481452fbd1bc078af..b77364e08ddacffdad88163438ea996b9f79d7e7 100644
--- a/mllib/src/main/scala/spark/mllib/optimization/GradientDescent.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala
@@ -15,10 +15,11 @@
  * limitations under the License.
  */
 
-package spark.mllib.optimization
+package org.apache.spark.mllib.optimization
 
-import spark.{Logging, RDD, SparkContext}
-import spark.SparkContext._
+import org.apache.spark.{Logging, SparkContext}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.SparkContext._
 
 import org.jblas.DoubleMatrix
 
@@ -29,8 +30,9 @@ import scala.collection.mutable.ArrayBuffer
  * @param gradient Gradient function to be used.
  * @param updater Updater to be used to update weights after every iteration.
  */
-class GradientDescent(var gradient: Gradient, var updater: Updater) extends Optimizer {
-
+class GradientDescent(var gradient: Gradient, var updater: Updater)
+  extends Optimizer with Logging
+{
   private var stepSize: Double = 1.0
   private var numIterations: Int = 100
   private var regParam: Double = 0.0
diff --git a/mllib/src/main/scala/spark/mllib/optimization/Optimizer.scala b/mllib/src/main/scala/org/apache/spark/mllib/optimization/Optimizer.scala
similarity index 92%
rename from mllib/src/main/scala/spark/mllib/optimization/Optimizer.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/optimization/Optimizer.scala
index 76a519c338847ea12a87168057516c7c58197fc3..94d30b56f212bbb4f6cba28d9cd7f646f8f13d99 100644
--- a/mllib/src/main/scala/spark/mllib/optimization/Optimizer.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/optimization/Optimizer.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.mllib.optimization
+package org.apache.spark.mllib.optimization
 
-import spark.RDD
+import org.apache.spark.rdd.RDD
 
 trait Optimizer {
 
diff --git a/mllib/src/main/scala/spark/mllib/optimization/Updater.scala b/mllib/src/main/scala/org/apache/spark/mllib/optimization/Updater.scala
similarity index 98%
rename from mllib/src/main/scala/spark/mllib/optimization/Updater.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/optimization/Updater.scala
index db67d6b0bcd7d7c11d7c0fdc08d72c710e00ba73..4c51f4f881f76e1f02b8342219a1fd94ffed3a9c 100644
--- a/mllib/src/main/scala/spark/mllib/optimization/Updater.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/optimization/Updater.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.optimization
+package org.apache.spark.mllib.optimization
 
 import scala.math._
 import org.jblas.DoubleMatrix
diff --git a/mllib/src/main/scala/spark/mllib/recommendation/ALS.scala b/mllib/src/main/scala/org/apache/spark/mllib/recommendation/ALS.scala
similarity index 98%
rename from mllib/src/main/scala/spark/mllib/recommendation/ALS.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/recommendation/ALS.scala
index dbfbf5997540501012afa8b6ab97b74b988673ce..be002d02bcd3e95773a8aa5bea87885ae8dfc73c 100644
--- a/mllib/src/main/scala/spark/mllib/recommendation/ALS.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/recommendation/ALS.scala
@@ -15,16 +15,17 @@
  * limitations under the License.
  */
 
-package spark.mllib.recommendation
+package org.apache.spark.mllib.recommendation
 
 import scala.collection.mutable.{ArrayBuffer, BitSet}
 import scala.util.Random
 import scala.util.Sorting
 
-import spark.{HashPartitioner, Partitioner, SparkContext, RDD}
-import spark.storage.StorageLevel
-import spark.KryoRegistrator
-import spark.SparkContext._
+import org.apache.spark.{HashPartitioner, Partitioner, SparkContext}
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.rdd.RDD
+import org.apache.spark.serializer.KryoRegistrator
+import org.apache.spark.SparkContext._
 
 import com.esotericsoftware.kryo.Kryo
 import org.jblas.{DoubleMatrix, SimpleBlas, Solve}
@@ -432,7 +433,7 @@ object ALS {
     val (master, ratingsFile, rank, iters, outputDir) =
       (args(0), args(1), args(2).toInt, args(3).toInt, args(4))
     val blocks = if (args.length == 6) args(5).toInt else -1
-    System.setProperty("spark.serializer", "spark.KryoSerializer")
+    System.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
     System.setProperty("spark.kryo.registrator", classOf[ALSRegistrator].getName)
     System.setProperty("spark.kryo.referenceTracking", "false")
     System.setProperty("spark.kryoserializer.buffer.mb", "8")
diff --git a/mllib/src/main/scala/spark/mllib/recommendation/MatrixFactorizationModel.scala b/mllib/src/main/scala/org/apache/spark/mllib/recommendation/MatrixFactorizationModel.scala
similarity index 94%
rename from mllib/src/main/scala/spark/mllib/recommendation/MatrixFactorizationModel.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/recommendation/MatrixFactorizationModel.scala
index 5e21717da525f5d9b89b819449670a931e20b497..af43d89c70f05948ae50c539804ef1de4a8fe6f2 100644
--- a/mllib/src/main/scala/spark/mllib/recommendation/MatrixFactorizationModel.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/recommendation/MatrixFactorizationModel.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.mllib.recommendation
+package org.apache.spark.mllib.recommendation
 
-import spark.RDD
-import spark.SparkContext._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.SparkContext._
 
 import org.jblas._
 
diff --git a/mllib/src/main/scala/spark/mllib/regression/GeneralizedLinearAlgorithm.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/GeneralizedLinearAlgorithm.scala
similarity index 95%
rename from mllib/src/main/scala/spark/mllib/regression/GeneralizedLinearAlgorithm.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/regression/GeneralizedLinearAlgorithm.scala
index d164d415d6a4005e73f3b8267d9be204ce2baff6..f98b0b536deaa00a3a19b9c0af9d701ded297399 100644
--- a/mllib/src/main/scala/spark/mllib/regression/GeneralizedLinearAlgorithm.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/GeneralizedLinearAlgorithm.scala
@@ -15,10 +15,11 @@
  * limitations under the License.
  */
 
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
 
-import spark.{Logging, RDD, SparkException}
-import spark.mllib.optimization._
+import org.apache.spark.{Logging, SparkException}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.mllib.optimization._
 
 import org.jblas.DoubleMatrix
 
@@ -52,7 +53,7 @@ abstract class GeneralizedLinearModel(val weights: Array[Double], val intercept:
    * @param testData RDD representing data points to be predicted
    * @return RDD[Double] where each entry contains the corresponding prediction
    */
-  def predict(testData: spark.RDD[Array[Double]]): RDD[Double] = {
+  def predict(testData: RDD[Array[Double]]): RDD[Double] = {
     // A small optimization to avoid serializing the entire model. Only the weightsMatrix
     // and intercept is needed.
     val localWeights = weightsMatrix
diff --git a/mllib/src/main/scala/spark/mllib/regression/LabeledPoint.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/LabeledPoint.scala
similarity index 96%
rename from mllib/src/main/scala/spark/mllib/regression/LabeledPoint.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/regression/LabeledPoint.scala
index 3de60482c54cf75bbba7241c90c8c5041826cbc0..63240e24dc29c2999b06a8753b1700969851c6ea 100644
--- a/mllib/src/main/scala/spark/mllib/regression/LabeledPoint.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/LabeledPoint.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
 
 /**
  * Class that represents the features and labels of a data point.
diff --git a/mllib/src/main/scala/spark/mllib/regression/Lasso.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/Lasso.scala
similarity index 97%
rename from mllib/src/main/scala/spark/mllib/regression/Lasso.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/regression/Lasso.scala
index 0f33456ef435758a815a20bc5fbb8a6531718e2f..d959695325984cb8cba0f1cf9c788b0309a09c51 100644
--- a/mllib/src/main/scala/spark/mllib/regression/Lasso.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/Lasso.scala
@@ -15,11 +15,12 @@
  * limitations under the License.
  */
 
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
 
-import spark.{Logging, RDD, SparkContext}
-import spark.mllib.optimization._
-import spark.mllib.util.MLUtils
+import org.apache.spark.{Logging, SparkContext}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.mllib.optimization._
+import org.apache.spark.mllib.util.MLUtils
 
 import org.jblas.DoubleMatrix
 
diff --git a/mllib/src/main/scala/spark/mllib/regression/LinearRegression.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/LinearRegression.scala
similarity index 96%
rename from mllib/src/main/scala/spark/mllib/regression/LinearRegression.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/regression/LinearRegression.scala
index 885ff5a30d423ade13058f3f1d8af63e32bb32b7..ae95ea24fcd7c84b5293d27a743e70fab92fd359 100644
--- a/mllib/src/main/scala/spark/mllib/regression/LinearRegression.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/LinearRegression.scala
@@ -15,11 +15,12 @@
  * limitations under the License.
  */
 
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
 
-import spark.{Logging, RDD, SparkContext}
-import spark.mllib.optimization._
-import spark.mllib.util.MLUtils
+import org.apache.spark.{Logging, SparkContext}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.mllib.optimization._
+import org.apache.spark.mllib.util.MLUtils
 
 import org.jblas.DoubleMatrix
 
diff --git a/mllib/src/main/scala/spark/mllib/regression/RegressionModel.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/RegressionModel.scala
similarity index 94%
rename from mllib/src/main/scala/spark/mllib/regression/RegressionModel.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/regression/RegressionModel.scala
index b845ba1a890ce3fcd0f218818a1ad922c648b574..423afc32d665cf25352a365767ae547512e726a6 100644
--- a/mllib/src/main/scala/spark/mllib/regression/RegressionModel.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/RegressionModel.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
 
-import spark.RDD
+import org.apache.spark.rdd.RDD
 
 trait RegressionModel extends Serializable {
   /**
diff --git a/mllib/src/main/scala/spark/mllib/regression/RidgeRegression.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/RidgeRegression.scala
similarity index 97%
rename from mllib/src/main/scala/spark/mllib/regression/RidgeRegression.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/regression/RidgeRegression.scala
index cb1303dd99ed3848b3377bc4c941492771436253..b29508d2b9f0fd4d2a9502ed9f21a780142f90e9 100644
--- a/mllib/src/main/scala/spark/mllib/regression/RidgeRegression.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/RidgeRegression.scala
@@ -15,11 +15,12 @@
  * limitations under the License.
  */
 
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
 
-import spark.{Logging, RDD, SparkContext}
-import spark.mllib.optimization._
-import spark.mllib.util.MLUtils
+import org.apache.spark.{Logging, SparkContext}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.mllib.optimization._
+import org.apache.spark.mllib.util.MLUtils
 
 import org.jblas.DoubleMatrix
 
diff --git a/mllib/src/main/scala/spark/mllib/util/DataValidators.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/DataValidators.scala
similarity index 90%
rename from mllib/src/main/scala/spark/mllib/util/DataValidators.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/util/DataValidators.scala
index 57553accf1fc5f1a5a5d4d1253ee5c6da0c5acc0..8b55bce7c4bec7d08e5decd262fce97e2acdc976 100644
--- a/mllib/src/main/scala/spark/mllib/util/DataValidators.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/DataValidators.scala
@@ -15,10 +15,11 @@
  * limitations under the License.
  */
 
-package spark.mllib.util
+package org.apache.spark.mllib.util
 
-import spark.{RDD, Logging}
-import spark.mllib.regression.LabeledPoint
+import org.apache.spark.Logging
+import org.apache.spark.rdd.RDD
+import org.apache.spark.mllib.regression.LabeledPoint
 
 /**
  * A collection of methods used to validate data before applying ML algorithms.
diff --git a/mllib/src/main/scala/spark/mllib/util/KMeansDataGenerator.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/KMeansDataGenerator.scala
similarity index 96%
rename from mllib/src/main/scala/spark/mllib/util/KMeansDataGenerator.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/util/KMeansDataGenerator.scala
index 672b63f65ab9d35681ab0d17eee1d15bda481a5f..9109189dff52fd671246cef6104257b94dacb5ee 100644
--- a/mllib/src/main/scala/spark/mllib/util/KMeansDataGenerator.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/KMeansDataGenerator.scala
@@ -15,11 +15,12 @@
  * limitations under the License.
  */
 
-package spark.mllib.util
+package org.apache.spark.mllib.util
 
 import scala.util.Random
 
-import spark.{RDD, SparkContext}
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
 
 /**
  * Generate test data for KMeans. This class first chooses k cluster centers
diff --git a/mllib/src/main/scala/spark/mllib/util/LinearDataGenerator.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/LinearDataGenerator.scala
similarity index 95%
rename from mllib/src/main/scala/spark/mllib/util/LinearDataGenerator.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/util/LinearDataGenerator.scala
index 9f48477f84ab019f4a89c71f9cc7eb7188a40b00..bc5045fb05d3bf5df0f4079a5c486320a5668e42 100644
--- a/mllib/src/main/scala/spark/mllib/util/LinearDataGenerator.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/LinearDataGenerator.scala
@@ -15,16 +15,17 @@
  * limitations under the License.
  */
 
-package spark.mllib.util
+package org.apache.spark.mllib.util
 
 import scala.collection.JavaConversions._
 import scala.util.Random
 
 import org.jblas.DoubleMatrix
 
-import spark.{RDD, SparkContext}
-import spark.mllib.regression.LabeledPoint
-import spark.mllib.regression.LabeledPoint
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+import org.apache.spark.mllib.regression.LabeledPoint
+import org.apache.spark.mllib.regression.LabeledPoint
 
 /**
  * Generate sample data used for Linear Data. This class generates
diff --git a/mllib/src/main/scala/spark/mllib/util/LogisticRegressionDataGenerator.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/LogisticRegressionDataGenerator.scala
similarity index 94%
rename from mllib/src/main/scala/spark/mllib/util/LogisticRegressionDataGenerator.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/util/LogisticRegressionDataGenerator.scala
index d6402f23e2d94dd5724593622e00b97c52162b72..52c4a71d621a11d73749279d2b9767a9a68b914b 100644
--- a/mllib/src/main/scala/spark/mllib/util/LogisticRegressionDataGenerator.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/LogisticRegressionDataGenerator.scala
@@ -15,12 +15,13 @@
  * limitations under the License.
  */
 
-package spark.mllib.util
+package org.apache.spark.mllib.util
 
 import scala.util.Random
 
-import spark.{RDD, SparkContext}
-import spark.mllib.regression.LabeledPoint
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+import org.apache.spark.mllib.regression.LabeledPoint
 
 /**
  * Generate test data for LogisticRegression. This class chooses positive labels
diff --git a/mllib/src/main/scala/spark/mllib/util/MFDataGenerator.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/MFDataGenerator.scala
similarity index 96%
rename from mllib/src/main/scala/spark/mllib/util/MFDataGenerator.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/util/MFDataGenerator.scala
index 88992cde0cf8ed81a488673b74467e90d535ebd7..5aec867257e16e493e15028eb8efb08df97a8dd4 100644
--- a/mllib/src/main/scala/spark/mllib/util/MFDataGenerator.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/MFDataGenerator.scala
@@ -15,14 +15,15 @@
  * limitations under the License.
  */
 
-package spark.mllib.recommendation
+package org.apache.spark.mllib.recommendation
 
 import scala.util.Random
 
 import org.jblas.DoubleMatrix
 
-import spark.{RDD, SparkContext}
-import spark.mllib.util.MLUtils
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+import org.apache.spark.mllib.util.MLUtils
 
 /**
 * Generate RDD(s) containing data for Matrix Factorization.
@@ -110,4 +111,4 @@ object MFDataGenerator{
     sc.stop()
   
   }
-}
\ No newline at end of file
+}
diff --git a/mllib/src/main/scala/spark/mllib/util/MLUtils.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/MLUtils.scala
similarity index 95%
rename from mllib/src/main/scala/spark/mllib/util/MLUtils.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/util/MLUtils.scala
index a8e6ae9953d3b1f6d3bbbe6d83e67db5e2fc6f9c..d91b74c3ac2b3de970e72bad7a7aee30b8868fcf 100644
--- a/mllib/src/main/scala/spark/mllib/util/MLUtils.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/MLUtils.scala
@@ -15,13 +15,14 @@
  * limitations under the License.
  */
 
-package spark.mllib.util
+package org.apache.spark.mllib.util
 
-import spark.{RDD, SparkContext}
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+import org.apache.spark.SparkContext._
 
 import org.jblas.DoubleMatrix
-import spark.mllib.regression.LabeledPoint
+import org.apache.spark.mllib.regression.LabeledPoint
 
 /**
  * Helper methods to load, save and pre-process data used in ML Lib.
diff --git a/mllib/src/main/scala/spark/mllib/util/SVMDataGenerator.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/SVMDataGenerator.scala
similarity index 89%
rename from mllib/src/main/scala/spark/mllib/util/SVMDataGenerator.scala
rename to mllib/src/main/scala/org/apache/spark/mllib/util/SVMDataGenerator.scala
index eff456cad637d35a43bdc1c8a849c6f6cc117c6e..6e9f667635643fcf655c1817c10e7e6b152f87d2 100644
--- a/mllib/src/main/scala/spark/mllib/util/SVMDataGenerator.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/SVMDataGenerator.scala
@@ -1,11 +1,12 @@
-package spark.mllib.util
+package org.apache.spark.mllib.util
 
 import scala.util.Random
 
-import spark.{RDD, SparkContext}
-
 import org.jblas.DoubleMatrix
-import spark.mllib.regression.LabeledPoint
+
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+import org.apache.spark.mllib.regression.LabeledPoint
 
 /**
  * Generate sample data used for SVM. This class generates uniform random values
diff --git a/mllib/src/test/java/spark/mllib/classification/JavaLogisticRegressionSuite.java b/mllib/src/test/java/org/apache/spark/mllib/classification/JavaLogisticRegressionSuite.java
similarity index 93%
rename from mllib/src/test/java/spark/mllib/classification/JavaLogisticRegressionSuite.java
rename to mllib/src/test/java/org/apache/spark/mllib/classification/JavaLogisticRegressionSuite.java
index e0ebd45cd8d7ceb8326904674a076ad74421b340..e18e3bc6a86beda897e4b15b539468a51c775704 100644
--- a/mllib/src/test/java/spark/mllib/classification/JavaLogisticRegressionSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/classification/JavaLogisticRegressionSuite.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.classification;
+package org.apache.spark.mllib.classification;
 
 import java.io.Serializable;
 import java.util.List;
@@ -25,10 +25,10 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
 
-import spark.mllib.regression.LabeledPoint;
+import org.apache.spark.mllib.regression.LabeledPoint;
 
 public class JavaLogisticRegressionSuite implements Serializable {
   private transient JavaSparkContext sc;
diff --git a/mllib/src/test/java/spark/mllib/classification/JavaSVMSuite.java b/mllib/src/test/java/org/apache/spark/mllib/classification/JavaSVMSuite.java
similarity index 93%
rename from mllib/src/test/java/spark/mllib/classification/JavaSVMSuite.java
rename to mllib/src/test/java/org/apache/spark/mllib/classification/JavaSVMSuite.java
index 7881b3c38f01589fcee0e29a1f96947176a9edc0..117e5eaa8b78e42294738f9faca2b0c6c5cd36f4 100644
--- a/mllib/src/test/java/spark/mllib/classification/JavaSVMSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/classification/JavaSVMSuite.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.classification;
+package org.apache.spark.mllib.classification;
 
 
 import java.io.Serializable;
@@ -26,10 +26,10 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
 
-import spark.mllib.regression.LabeledPoint;
+import org.apache.spark.mllib.regression.LabeledPoint;
 
 public class JavaSVMSuite implements Serializable {
   private transient JavaSparkContext sc;
diff --git a/mllib/src/test/java/spark/mllib/clustering/JavaKMeansSuite.java b/mllib/src/test/java/org/apache/spark/mllib/clustering/JavaKMeansSuite.java
similarity index 96%
rename from mllib/src/test/java/spark/mllib/clustering/JavaKMeansSuite.java
rename to mllib/src/test/java/org/apache/spark/mllib/clustering/JavaKMeansSuite.java
index 3f2d82bfb4a13fcb9a1bcbf7135dd2cccc85e4b5..32d3934ac135a9ecb4aa2df50b03b88dcd3addbe 100644
--- a/mllib/src/test/java/spark/mllib/clustering/JavaKMeansSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/clustering/JavaKMeansSuite.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.clustering;
+package org.apache.spark.mllib.clustering;
 
 import java.io.Serializable;
 import java.util.ArrayList;
@@ -26,8 +26,8 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
 
 public class JavaKMeansSuite implements Serializable {
   private transient JavaSparkContext sc;
diff --git a/mllib/src/test/java/spark/mllib/recommendation/JavaALSSuite.java b/mllib/src/test/java/org/apache/spark/mllib/recommendation/JavaALSSuite.java
similarity index 96%
rename from mllib/src/test/java/spark/mllib/recommendation/JavaALSSuite.java
rename to mllib/src/test/java/org/apache/spark/mllib/recommendation/JavaALSSuite.java
index 7993629a6d5447fe9a75de51563a85d2481638d8..3323f6cee2b910d7e6f4fcf6d531b443a98e8b89 100644
--- a/mllib/src/test/java/spark/mllib/recommendation/JavaALSSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/recommendation/JavaALSSuite.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.recommendation;
+package org.apache.spark.mllib.recommendation;
 
 import java.io.Serializable;
 import java.util.List;
@@ -27,8 +27,8 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
 
 import org.jblas.DoubleMatrix;
 
diff --git a/mllib/src/test/java/spark/mllib/regression/JavaLassoSuite.java b/mllib/src/test/java/org/apache/spark/mllib/regression/JavaLassoSuite.java
similarity index 94%
rename from mllib/src/test/java/spark/mllib/regression/JavaLassoSuite.java
rename to mllib/src/test/java/org/apache/spark/mllib/regression/JavaLassoSuite.java
index 5863140bafec8c6f5e70b3b219a08ba25abe90cb..f44b25cd44d19be342ac6ee0ddfcb597fe515ab1 100644
--- a/mllib/src/test/java/spark/mllib/regression/JavaLassoSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/regression/JavaLassoSuite.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.regression;
+package org.apache.spark.mllib.regression;
 
 import java.io.Serializable;
 import java.util.List;
@@ -25,9 +25,9 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.mllib.util.LinearDataGenerator;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.mllib.util.LinearDataGenerator;
 
 public class JavaLassoSuite implements Serializable {
   private transient JavaSparkContext sc;
diff --git a/mllib/src/test/java/spark/mllib/regression/JavaLinearRegressionSuite.java b/mllib/src/test/java/org/apache/spark/mllib/regression/JavaLinearRegressionSuite.java
similarity index 94%
rename from mllib/src/test/java/spark/mllib/regression/JavaLinearRegressionSuite.java
rename to mllib/src/test/java/org/apache/spark/mllib/regression/JavaLinearRegressionSuite.java
index 50716c786179158ee940807afd015cca0291da48..5a4410a632649ca99b22536adf198ffef9840827 100644
--- a/mllib/src/test/java/spark/mllib/regression/JavaLinearRegressionSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/regression/JavaLinearRegressionSuite.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.regression;
+package org.apache.spark.mllib.regression;
 
 import java.io.Serializable;
 import java.util.List;
@@ -25,9 +25,9 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.mllib.util.LinearDataGenerator;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.mllib.util.LinearDataGenerator;
 
 public class JavaLinearRegressionSuite implements Serializable {
   private transient JavaSparkContext sc;
diff --git a/mllib/src/test/java/spark/mllib/regression/JavaRidgeRegressionSuite.java b/mllib/src/test/java/org/apache/spark/mllib/regression/JavaRidgeRegressionSuite.java
similarity index 95%
rename from mllib/src/test/java/spark/mllib/regression/JavaRidgeRegressionSuite.java
rename to mllib/src/test/java/org/apache/spark/mllib/regression/JavaRidgeRegressionSuite.java
index 2c0aabad309cbd9330cab9badf742ce7b7e366ac..2fdd5fc8fdca6371b0a85351451a306bbd2fc459 100644
--- a/mllib/src/test/java/spark/mllib/regression/JavaRidgeRegressionSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/regression/JavaRidgeRegressionSuite.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.regression;
+package org.apache.spark.mllib.regression;
 
 import java.io.Serializable;
 import java.util.List;
@@ -27,9 +27,9 @@ import org.junit.Test;
 
 import org.jblas.DoubleMatrix;
 
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.mllib.util.LinearDataGenerator;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.mllib.util.LinearDataGenerator;
 
 public class JavaRidgeRegressionSuite implements Serializable {
   private transient JavaSparkContext sc;
diff --git a/mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
similarity index 97%
rename from mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala
rename to mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
index bd87c528c35db3daf0f95d05024c0dbf07f74595..34c67294e9ac9942f747a06db712d52e71f5e42b 100644
--- a/mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.classification
+package org.apache.spark.mllib.classification
 
 import scala.util.Random
 import scala.collection.JavaConversions._
@@ -24,8 +24,8 @@ import org.scalatest.BeforeAndAfterAll
 import org.scalatest.FunSuite
 import org.scalatest.matchers.ShouldMatchers
 
-import spark.SparkContext
-import spark.mllib.regression._
+import org.apache.spark.SparkContext
+import org.apache.spark.mllib.regression._
 
 object LogisticRegressionSuite {
 
diff --git a/mllib/src/test/scala/spark/mllib/classification/SVMSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/classification/SVMSuite.scala
similarity index 96%
rename from mllib/src/test/scala/spark/mllib/classification/SVMSuite.scala
rename to mllib/src/test/scala/org/apache/spark/mllib/classification/SVMSuite.scala
index 894ae458ad03d60ed4b1fe5652e07f7ad2bf7398..6a957e3ddca719188677c4bd150d113a8ab98204 100644
--- a/mllib/src/test/scala/spark/mllib/classification/SVMSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/classification/SVMSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.classification
+package org.apache.spark.mllib.classification
 
 import scala.util.Random
 import scala.math.signum
@@ -24,11 +24,11 @@ import scala.collection.JavaConversions._
 import org.scalatest.BeforeAndAfterAll
 import org.scalatest.FunSuite
 
-import spark.SparkContext
-import spark.mllib.regression._
-
 import org.jblas.DoubleMatrix
 
+import org.apache.spark.{SparkException, SparkContext}
+import org.apache.spark.mllib.regression._
+
 object SVMSuite {
 
   def generateSVMInputAsList(
@@ -159,7 +159,7 @@ class SVMSuite extends FunSuite with BeforeAndAfterAll {
       }
     }
 
-    intercept[spark.SparkException] {
+    intercept[SparkException] {
       val model = SVMWithSGD.train(testRDDInvalid, 100)
     }
 
diff --git a/mllib/src/test/scala/spark/mllib/clustering/KMeansSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/clustering/KMeansSuite.scala
similarity index 98%
rename from mllib/src/test/scala/spark/mllib/clustering/KMeansSuite.scala
rename to mllib/src/test/scala/org/apache/spark/mllib/clustering/KMeansSuite.scala
index d5d95c86393d21c345a9f9b164dead08b7689c2e..94245f6027b3094bba9cae70e19d359bce136b78 100644
--- a/mllib/src/test/scala/spark/mllib/clustering/KMeansSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/clustering/KMeansSuite.scala
@@ -15,15 +15,15 @@
  * limitations under the License.
  */
 
-package spark.mllib.clustering
+package org.apache.spark.mllib.clustering
 
 import scala.util.Random
 
 import org.scalatest.BeforeAndAfterAll
 import org.scalatest.FunSuite
 
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
 
 import org.jblas._
 
diff --git a/mllib/src/test/scala/spark/mllib/recommendation/ALSSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/recommendation/ALSSuite.scala
similarity index 97%
rename from mllib/src/test/scala/spark/mllib/recommendation/ALSSuite.scala
rename to mllib/src/test/scala/org/apache/spark/mllib/recommendation/ALSSuite.scala
index 15a60efda6ec785fa802621f56c6562a756fd6d6..347ef238f4042ceb80379e368b9586a7ccf18a78 100644
--- a/mllib/src/test/scala/spark/mllib/recommendation/ALSSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/recommendation/ALSSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.recommendation
+package org.apache.spark.mllib.recommendation
 
 import scala.collection.JavaConversions._
 import scala.util.Random
@@ -23,8 +23,8 @@ import scala.util.Random
 import org.scalatest.BeforeAndAfterAll
 import org.scalatest.FunSuite
 
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
 
 import org.jblas._
 
diff --git a/mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/regression/LassoSuite.scala
similarity index 96%
rename from mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala
rename to mllib/src/test/scala/org/apache/spark/mllib/regression/LassoSuite.scala
index 622dbbab7fe9eadff3f42499901ae43e8541aa08..db980c7bae64f946e64555c53f04969babed25c1 100644
--- a/mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/regression/LassoSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
 
 import scala.collection.JavaConversions._
 import scala.util.Random
@@ -23,8 +23,8 @@ import scala.util.Random
 import org.scalatest.BeforeAndAfterAll
 import org.scalatest.FunSuite
 
-import spark.SparkContext
-import spark.mllib.util.LinearDataGenerator
+import org.apache.spark.SparkContext
+import org.apache.spark.mllib.util.LinearDataGenerator
 
 
 class LassoSuite extends FunSuite with BeforeAndAfterAll {
diff --git a/mllib/src/test/scala/spark/mllib/regression/LinearRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/regression/LinearRegressionSuite.scala
similarity index 93%
rename from mllib/src/test/scala/spark/mllib/regression/LinearRegressionSuite.scala
rename to mllib/src/test/scala/org/apache/spark/mllib/regression/LinearRegressionSuite.scala
index acc48a3283cdd2158b958f3af0e41d23b5499762..ef500c704c8a9949e85fb40885669b500a921f1b 100644
--- a/mllib/src/test/scala/spark/mllib/regression/LinearRegressionSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/regression/LinearRegressionSuite.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
 
 import org.scalatest.BeforeAndAfterAll
 import org.scalatest.FunSuite
 
-import spark.SparkContext
-import spark.SparkContext._
-import spark.mllib.util.LinearDataGenerator
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.mllib.util.LinearDataGenerator
 
 class LinearRegressionSuite extends FunSuite with BeforeAndAfterAll {
   @transient private var sc: SparkContext = _
diff --git a/mllib/src/test/scala/spark/mllib/regression/RidgeRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/regression/RidgeRegressionSuite.scala
similarity index 94%
rename from mllib/src/test/scala/spark/mllib/regression/RidgeRegressionSuite.scala
rename to mllib/src/test/scala/org/apache/spark/mllib/regression/RidgeRegressionSuite.scala
index c4820357062d77d26c4e9f0c8e84df4c8b159585..c18092d804fa3e17055238aeb250a99e459e2d59 100644
--- a/mllib/src/test/scala/spark/mllib/regression/RidgeRegressionSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/regression/RidgeRegressionSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
 
 import scala.collection.JavaConversions._
 import scala.util.Random
@@ -24,9 +24,9 @@ import org.jblas.DoubleMatrix
 import org.scalatest.BeforeAndAfterAll
 import org.scalatest.FunSuite
 
-import spark.SparkContext
-import spark.SparkContext._
-import spark.mllib.util.LinearDataGenerator
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.mllib.util.LinearDataGenerator
 
 class RidgeRegressionSuite extends FunSuite with BeforeAndAfterAll {
   @transient private var sc: SparkContext = _
diff --git a/pom.xml b/pom.xml
index e2fd54a9660aeb84919467a8bc3f96a1be6e509b..6a73e81da0b767e2a005cfc2a8f9b35b7223dca1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -18,22 +18,22 @@
 
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
-  <groupId>org.spark-project</groupId>
+  <groupId>org.apache.spark</groupId>
   <artifactId>spark-parent</artifactId>
   <version>0.8.0-SNAPSHOT</version>
   <packaging>pom</packaging>
   <name>Spark Project Parent POM</name>
-  <url>http://spark-project.org/</url>
+  <url>http://spark.incubator.apache.org/</url>
   <licenses>
     <license>
-      <name>BSD License</name>
-      <url>https://github.com/mesos/spark/blob/master/LICENSE</url>
+      <name>Apache 2.0 License</name>
+      <url>http://www.apache.org/licenses/LICENSE-2.0.html</url>
       <distribution>repo</distribution>
     </license>
   </licenses>
   <scm>
-    <connection>scm:git:git@github.com:mesos/spark.git</connection>
-    <url>scm:git:git@github.com:mesos/spark.git</url>
+    <connection>scm:git:git@github.com:apache/incubator-spark.git</connection>
+    <url>scm:git:git@github.com:apache/incubator-spark.git</url>
   </scm>
   <developers>
     <developer>
@@ -41,12 +41,12 @@
       <name>Matei Zaharia</name>
       <email>matei.zaharia@gmail.com</email>
       <url>http://www.cs.berkeley.edu/~matei</url>
-      <organization>U.C. Berkeley Computer Science</organization>
-      <organizationUrl>http://www.cs.berkeley.edu/</organizationUrl>
+      <organization>Apache Software Foundation</organization>
+      <organizationUrl>http://spark.incubator.apache.org</organizationUrl>
     </developer>
   </developers>
   <issueManagement>
-    <system>github</system>
+    <system>JIRA</system>
     <url>https://spark-project.atlassian.net/browse/SPARK</url>
   </issueManagement>
 
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 2e2681267137ca5bd29df9469ba27dae869eb288..b1b99b37c4f9328a380faac3317c8084527fb7f5 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -74,7 +74,7 @@ object SparkBuild extends Build {
     core, repl, examples, bagel, streaming, mllib, tools, assemblyProj) ++ maybeYarnRef
 
   def sharedSettings = Defaults.defaultSettings ++ Seq(
-    organization := "org.spark-project",
+    organization := "org.apache.spark",
     version := "0.8.0-SNAPSHOT",
     scalaVersion := "2.9.3",
     scalacOptions := Seq("-unchecked", "-optimize", "-deprecation"),
@@ -103,7 +103,7 @@ object SparkBuild extends Build {
     //useGpg in Global := true,
 
     pomExtra := (
-      <url>http://spark-project.org/</url>
+      <url>http://spark.incubator.apache.org/</url>
       <licenses>
         <license>
           <name>Apache 2.0 License</name>
@@ -112,8 +112,8 @@ object SparkBuild extends Build {
         </license>
       </licenses>
       <scm>
-        <connection>scm:git:git@github.com:mesos/spark.git</connection>
-        <url>scm:git:git@github.com:mesos/spark.git</url>
+        <connection>scm:git:git@github.com:apache/incubator-spark.git</connection>
+        <url>scm:git:git@github.com:apache/incubator-spark.git</url>
       </scm>
       <developers>
         <developer>
@@ -121,10 +121,14 @@ object SparkBuild extends Build {
           <name>Matei Zaharia</name>
           <email>matei.zaharia@gmail.com</email>
           <url>http://www.cs.berkeley.edu/~matei</url>
-          <organization>U.C. Berkeley Computer Science</organization>
-          <organizationUrl>http://www.cs.berkeley.edu/</organizationUrl>
+          <organization>Apache Software Foundation</organization>
+          <organizationUrl>http://spark.incubator.apache.org</organizationUrl>
         </developer>
       </developers>
+      <issueManagement>
+        <system>JIRA</system>
+        <url>https://spark-project.atlassian.net/browse/SPARK</url>
+      </issueManagement>
     ),
 
 /*
diff --git a/python/examples/wordcount.py b/python/examples/wordcount.py
index a6de22766ab1451759ea94439da931d412e14bd2..b9139b9d765203dce45fb01cdbad1036b0918be8 100755
--- a/python/examples/wordcount.py
+++ b/python/examples/wordcount.py
@@ -32,4 +32,4 @@ if __name__ == "__main__":
                   .reduceByKey(add)
     output = counts.collect()
     for (word, count) in output:
-        print "%s : %i" % (word, count)
+        print "%s: %i" % (word, count)
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index 2803ce90f3ed2629a5b1e7934ee65e55b94a8f5a..8fbf2965097d925fb62d835e89945fa17bfeaa4f 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -114,9 +114,9 @@ class SparkContext(object):
             self.addPyFile(path)
 
         # Create a temporary directory inside spark.local.dir:
-        local_dir = self._jvm.spark.Utils.getLocalDir()
+        local_dir = self._jvm.org.apache.spark.util.Utils.getLocalDir()
         self._temp_dir = \
-            self._jvm.spark.Utils.createTempDir(local_dir).getAbsolutePath()
+            self._jvm.org.apache.spark.util.Utils.createTempDir(local_dir).getAbsolutePath()
 
     @property
     def defaultParallelism(self):
diff --git a/python/pyspark/files.py b/python/pyspark/files.py
index 89bcbcfe068557f8b8351225b5f331f00976d8be..57ee14eeb777667fa89518aa4d57c03f87edd0fb 100644
--- a/python/pyspark/files.py
+++ b/python/pyspark/files.py
@@ -52,4 +52,4 @@ class SparkFiles(object):
             return cls._root_directory
         else:
             # This will have to change if we support multiple SparkContexts:
-            return cls._sc._jvm.spark.SparkFiles.getRootDirectory()
+            return cls._sc._jvm.org.apache.spark.SparkFiles.getRootDirectory()
diff --git a/python/pyspark/java_gateway.py b/python/pyspark/java_gateway.py
index 3ccf062c86c665c26c5b816e71dbf9d3fa4d5e65..26fbe0f08045d30e0f07028949f3a783e7551035 100644
--- a/python/pyspark/java_gateway.py
+++ b/python/pyspark/java_gateway.py
@@ -53,7 +53,7 @@ def launch_gateway():
     # Connect to the gateway
     gateway = JavaGateway(GatewayClient(port=port), auto_convert=False)
     # Import the classes used by PySpark
-    java_import(gateway.jvm, "spark.api.java.*")
-    java_import(gateway.jvm, "spark.api.python.*")
+    java_import(gateway.jvm, "org.apache.spark.api.java.*")
+    java_import(gateway.jvm, "org.apache.spark.api.python.*")
     java_import(gateway.jvm, "scala.Tuple2")
     return gateway
diff --git a/python/pyspark/shell.py b/python/pyspark/shell.py
index 9b4b4e78cb7276e939c5e31dcafc930fbd98b136..54823f80378787ff5c73c78f6b8ab8d336d52975 100644
--- a/python/pyspark/shell.py
+++ b/python/pyspark/shell.py
@@ -21,6 +21,7 @@ An interactive shell.
 This file is designed to be launched as a PYTHONSTARTUP script.
 """
 import os
+import platform
 import pyspark
 from pyspark.context import SparkContext
 
@@ -28,6 +29,18 @@ from pyspark.context import SparkContext
 add_files = os.environ.get("ADD_FILES").split(',') if os.environ.get("ADD_FILES") != None else None
 
 sc = SparkContext(os.environ.get("MASTER", "local"), "PySparkShell", pyFiles=add_files)
+
+print """Welcome to
+      ____              __
+     / __/__  ___ _____/ /__
+    _\ \/ _ \/ _ `/ __/  '_/
+   /__ / .__/\_,_/_/ /_/\_\   version 0.8.0
+      /_/
+"""
+print "Using Python version %s (%s, %s)" % (
+    platform.python_version(),
+    platform.python_build()[0],
+    platform.python_build()[1])
 print "Spark context avaiable as sc."
 
 if add_files != None:
diff --git a/repl-bin/pom.xml b/repl-bin/pom.xml
index 919e35f24041a52531562662a83745882971d2b3..d61b36a61a6559a210a54270d1dee466bb77a5a4 100644
--- a/repl-bin/pom.xml
+++ b/repl-bin/pom.xml
@@ -19,17 +19,17 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <parent>
-    <groupId>org.spark-project</groupId>
+    <groupId>org.apache.spark</groupId>
     <artifactId>spark-parent</artifactId>
     <version>0.8.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
-  <groupId>org.spark-project</groupId>
+  <groupId>org.apache.spark</groupId>
   <artifactId>spark-repl-bin</artifactId>
   <packaging>pom</packaging>
   <name>Spark Project REPL binary packaging</name>
-  <url>http://spark-project.org/</url>
+  <url>http://spark.incubator.apache.org/</url>
 
   <properties>
     <deb.pkg.name>spark</deb.pkg.name>
@@ -39,18 +39,18 @@
 
   <dependencies>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-core</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-bagel</artifactId>
       <version>${project.version}</version>
       <scope>runtime</scope>
     </dependency>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-repl</artifactId>
       <version>${project.version}</version>
       <scope>runtime</scope>
@@ -109,7 +109,7 @@
       <id>hadoop2-yarn</id>
       <dependencies>
         <dependency>
-          <groupId>org.spark-project</groupId>
+          <groupId>org.apache.spark</groupId>
           <artifactId>spark-yarn</artifactId>
           <version>${project.version}</version>
         </dependency>
diff --git a/repl/pom.xml b/repl/pom.xml
index f800664cff68d2d8190d4f501bf56ff9e445b7a8..a1c87d7618249cc720a2ecb65aa75dc8ae9248e8 100644
--- a/repl/pom.xml
+++ b/repl/pom.xml
@@ -19,17 +19,17 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <parent>
-    <groupId>org.spark-project</groupId>
+    <groupId>org.apache.spark</groupId>
     <artifactId>spark-parent</artifactId>
     <version>0.8.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
-  <groupId>org.spark-project</groupId>
+  <groupId>org.apache.spark</groupId>
   <artifactId>spark-repl</artifactId>
   <packaging>jar</packaging>
   <name>Spark Project REPL</name>
-  <url>http://spark-project.org/</url>
+  <url>http://spark.incubator.apache.org/</url>
 
   <properties>
     <deb.install.path>/usr/share/spark</deb.install.path>
@@ -38,18 +38,18 @@
 
   <dependencies>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-core</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-bagel</artifactId>
       <version>${project.version}</version>
       <scope>runtime</scope>
     </dependency>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-mllib</artifactId>
       <version>${project.version}</version>
       <scope>runtime</scope>
@@ -136,7 +136,7 @@
       <id>hadoop2-yarn</id>
       <dependencies>
         <dependency>
-          <groupId>org.spark-project</groupId>
+          <groupId>org.apache.spark</groupId>
           <artifactId>spark-yarn</artifactId>
           <version>${project.version}</version>
         </dependency>
diff --git a/repl/src/main/scala/spark/repl/ExecutorClassLoader.scala b/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
similarity index 99%
rename from repl/src/main/scala/spark/repl/ExecutorClassLoader.scala
rename to repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
index 274bc585db91eccaafb40988e37b6a18f1084d9f..3e171849e34941061ee63b01a8157edfe0bf4b25 100644
--- a/repl/src/main/scala/spark/repl/ExecutorClassLoader.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.repl
+package org.apache.spark.repl
 
 import java.io.{ByteArrayOutputStream, InputStream}
 import java.net.{URI, URL, URLClassLoader, URLEncoder}
diff --git a/repl/src/main/scala/spark/repl/Main.scala b/repl/src/main/scala/org/apache/spark/repl/Main.scala
similarity index 97%
rename from repl/src/main/scala/spark/repl/Main.scala
rename to repl/src/main/scala/org/apache/spark/repl/Main.scala
index d824d62fd10110ec7b9c5323d10ba8c603d34730..17e149f8abcc9ac80077ea8ef31bed4e205e1de5 100644
--- a/repl/src/main/scala/spark/repl/Main.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/Main.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.repl
+package org.apache.spark.repl
 
 import scala.collection.mutable.Set
 
diff --git a/repl/src/main/scala/spark/repl/SparkHelper.scala b/repl/src/main/scala/org/apache/spark/repl/SparkHelper.scala
similarity index 100%
rename from repl/src/main/scala/spark/repl/SparkHelper.scala
rename to repl/src/main/scala/org/apache/spark/repl/SparkHelper.scala
diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
similarity index 98%
rename from repl/src/main/scala/spark/repl/SparkILoop.scala
rename to repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 0cecbd71adc9f8f63d023cf5e0b21aaee0543fb9..193ccb48eeb98827bb8d4f6f6df62209f7c5010a 100644
--- a/repl/src/main/scala/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -3,7 +3,7 @@
  * @author Alexander Spoon
  */
 
-package spark.repl
+package org.apache.spark.repl
 
 import scala.tools.nsc._
 import scala.tools.nsc.interpreter._
@@ -22,8 +22,8 @@ import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
 import interpreter._
 import io.{ File, Sources }
 
-import spark.Logging
-import spark.SparkContext
+import org.apache.spark.Logging
+import org.apache.spark.SparkContext
 
 /** The Scala interactive shell.  It provides a read-eval-print loop
  *  around the Interpreter class.
@@ -816,13 +816,13 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
   def initializeSpark() {
     intp.beQuietDuring {
       command("""
-        spark.repl.Main.interp.out.println("Creating SparkContext...");
-        spark.repl.Main.interp.out.flush();
-        @transient val sc = spark.repl.Main.interp.createSparkContext();
-        spark.repl.Main.interp.out.println("Spark context available as sc.");
-        spark.repl.Main.interp.out.flush();
+        org.apache.spark.repl.Main.interp.out.println("Creating SparkContext...");
+        org.apache.spark.repl.Main.interp.out.flush();
+        @transient val sc = org.apache.spark.repl.Main.interp.createSparkContext();
+        org.apache.spark.repl.Main.interp.out.println("Spark context available as sc.");
+        org.apache.spark.repl.Main.interp.out.flush();
         """)
-      command("import spark.SparkContext._")
+      command("import org.apache.spark.SparkContext._")
     }
     echo("Type in expressions to have them evaluated.")
     echo("Type :help for more information.")
diff --git a/repl/src/main/scala/spark/repl/SparkIMain.scala b/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
similarity index 99%
rename from repl/src/main/scala/spark/repl/SparkIMain.scala
rename to repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
index 43b6a6c9500302acac56155a44a5b4c658b62912..e6e35c9b5df9e85c8d2778cedd23b23e2abe134b 100644
--- a/repl/src/main/scala/spark/repl/SparkIMain.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
@@ -3,7 +3,7 @@
  * @author  Martin Odersky
  */
 
-package spark.repl
+package org.apache.spark.repl
 
 import scala.tools.nsc._
 import scala.tools.nsc.interpreter._
@@ -27,9 +27,9 @@ import scala.util.control.Exception.{ ultimately }
 import scala.reflect.NameTransformer
 import SparkIMain._
 
-import spark.HttpServer
-import spark.Utils
-import spark.SparkEnv
+import org.apache.spark.HttpServer
+import org.apache.spark.util.Utils
+import org.apache.spark.SparkEnv
 
 /** An interpreter for Scala code.
  *  
@@ -883,7 +883,7 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
         val execution = lineManager.set(originalLine) {
           // MATEI: set the right SparkEnv for our SparkContext, because
           // this execution will happen in a separate thread
-          val sc = spark.repl.Main.interp.sparkContext
+          val sc = org.apache.spark.repl.Main.interp.sparkContext
           if (sc != null && sc.env != null)
             SparkEnv.set(sc.env)
           // Execute the line
diff --git a/repl/src/main/scala/spark/repl/SparkISettings.scala b/repl/src/main/scala/org/apache/spark/repl/SparkISettings.scala
similarity index 98%
rename from repl/src/main/scala/spark/repl/SparkISettings.scala
rename to repl/src/main/scala/org/apache/spark/repl/SparkISettings.scala
index 8ebb01d14602171c3ab8c38eaf9df79ec629e0a8..605b7b259b54f82a4e2e2b00d18c531ec2eaf77c 100644
--- a/repl/src/main/scala/spark/repl/SparkISettings.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkISettings.scala
@@ -3,7 +3,7 @@
  * @author Alexander Spoon
  */
 
-package spark.repl
+package org.apache.spark.repl
 
 import scala.tools.nsc._
 import scala.tools.nsc.interpreter._
diff --git a/repl/src/main/scala/spark/repl/SparkImports.scala b/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
similarity index 99%
rename from repl/src/main/scala/spark/repl/SparkImports.scala
rename to repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
index 5caf5ca51a407d573abd728004b7aa123a3e94b8..41a1731d600011a1d88073517c3606226ead25c3 100644
--- a/repl/src/main/scala/spark/repl/SparkImports.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
@@ -3,7 +3,7 @@
  * @author  Paul Phillips
  */
 
-package spark.repl
+package org.apache.spark.repl
 
 import scala.tools.nsc._
 import scala.tools.nsc.interpreter._
diff --git a/repl/src/main/scala/spark/repl/SparkJLineCompletion.scala b/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala
similarity index 99%
rename from repl/src/main/scala/spark/repl/SparkJLineCompletion.scala
rename to repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala
index 0069d8b2f49c74d565f85a3a7bad12362f5ff3ec..fdc172d7530ae15996b72eb117988caa0ac519a3 100644
--- a/repl/src/main/scala/spark/repl/SparkJLineCompletion.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala
@@ -3,7 +3,7 @@
  * @author Paul Phillips
  */
 
-package spark.repl
+package org.apache.spark.repl
 
 import scala.tools.nsc._
 import scala.tools.nsc.interpreter._
diff --git a/repl/src/main/scala/spark/repl/SparkJLineReader.scala b/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala
similarity index 98%
rename from repl/src/main/scala/spark/repl/SparkJLineReader.scala
rename to repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala
index ef6b6e092e04df8143d0a0d72ac489a32707bed5..d9e1de105c2161cf1ad2d6b7196c2035d74b12f4 100644
--- a/repl/src/main/scala/spark/repl/SparkJLineReader.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala
@@ -3,7 +3,7 @@
  * @author Stepan Koltsov
  */
 
-package spark.repl
+package org.apache.spark.repl
 
 import scala.tools.nsc._
 import scala.tools.nsc.interpreter._
diff --git a/repl/src/main/scala/spark/repl/SparkMemberHandlers.scala b/repl/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala
similarity index 99%
rename from repl/src/main/scala/spark/repl/SparkMemberHandlers.scala
rename to repl/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala
index 2980dfcd76834652b0a4e205253f99c93d8b6789..a3409bf66542caa094dfedca3af53fbc0cb608c0 100644
--- a/repl/src/main/scala/spark/repl/SparkMemberHandlers.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala
@@ -3,7 +3,7 @@
  * @author  Martin Odersky
  */
 
-package spark.repl
+package org.apache.spark.repl
 
 import scala.tools.nsc._
 import scala.tools.nsc.interpreter._
diff --git a/repl/src/test/scala/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
similarity index 98%
rename from repl/src/test/scala/spark/repl/ReplSuite.scala
rename to repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index 80ae605558015613eccee56d82eafe6ce59d0aa2..8f9b632c0eea67ace92595a389cbe6755902b161 100644
--- a/repl/src/test/scala/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.repl
+package org.apache.spark.repl
 
 import java.io._
 import java.net.URLClassLoader
@@ -41,10 +41,10 @@ class ReplSuite extends FunSuite {
       }
     }
     val interp = new SparkILoop(in, new PrintWriter(out), master)
-    spark.repl.Main.interp = interp
+    org.apache.spark.repl.Main.interp = interp
     val separator = System.getProperty("path.separator")
     interp.process(Array("-classpath", paths.mkString(separator)))
-    spark.repl.Main.interp = null
+    org.apache.spark.repl.Main.interp = null
     if (interp.sparkContext != null) {
       interp.sparkContext.stop()
     }
diff --git a/spark-executor b/spark-executor
index 63692bd46c939428ee598b43ea8a041cce808fd6..2c07c5484338fc9acddf4a4c74130566efba5176 100755
--- a/spark-executor
+++ b/spark-executor
@@ -19,4 +19,4 @@
 
 FWDIR="`dirname $0`"
 echo "Running spark-executor with framework dir = $FWDIR"
-exec $FWDIR/spark-class spark.executor.MesosExecutorBackend
+exec $FWDIR/spark-class org.apache.spark.executor.MesosExecutorBackend
diff --git a/spark-shell b/spark-shell
index 4d379c5cfb9f3adbabe28d9d0db881143d24bd0d..9608bd3f30be79825a2c9915d39820e3787753a3 100755
--- a/spark-shell
+++ b/spark-shell
@@ -79,7 +79,7 @@ if [[ ! $? ]]; then
   saved_stty=""
 fi
 
-$FWDIR/spark-class $OPTIONS spark.repl.Main "$@"
+$FWDIR/spark-class $OPTIONS org.apache.spark.repl.Main "$@"
 
 # record the exit status lest it be overwritten:
 # then reenable echo and propagate the code.
diff --git a/spark-shell.cmd b/spark-shell.cmd
index ec65eabb7413fdb5ab102f1ee17c8ba81ad113c5..b9b4d4bfb2b760049d54bb5d8458de65e14484a5 100644
--- a/spark-shell.cmd
+++ b/spark-shell.cmd
@@ -19,4 +19,4 @@ rem
 
 set FWDIR=%~dp0
 set SPARK_LAUNCH_WITH_SCALA=1
-cmd /V /E /C %FWDIR%run2.cmd spark.repl.Main %*
+cmd /V /E /C %FWDIR%run2.cmd org.apache.spark.repl.Main %*
diff --git a/streaming/pom.xml b/streaming/pom.xml
index 5c0582d6fb631d80c604cab85f789d19d549a2d4..7bea069b6158a2292c47bd6f6c91bead6e9fd3f9 100644
--- a/streaming/pom.xml
+++ b/streaming/pom.xml
@@ -19,17 +19,17 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <parent>
-    <groupId>org.spark-project</groupId>
+    <groupId>org.apache.spark</groupId>
     <artifactId>spark-parent</artifactId>
     <version>0.8.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
-  <groupId>org.spark-project</groupId>
+  <groupId>org.apache.spark</groupId>
   <artifactId>spark-streaming</artifactId>
   <packaging>jar</packaging>
   <name>Spark Project Streaming</name>
-  <url>http://spark-project.org/</url>
+  <url>http://spark.incubator.apache.org/</url>
 
   <repositories>
     <!-- A repository in the local filesystem for the Kafka JAR, which we modified for Scala 2.9 -->
@@ -41,7 +41,7 @@
 
   <dependencies>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-core</artifactId>
       <version>${project.version}</version>
     </dependency>
diff --git a/streaming/src/main/scala/spark/streaming/Checkpoint.scala b/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala
similarity index 98%
rename from streaming/src/main/scala/spark/streaming/Checkpoint.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala
index 070d930b5eb8f5bf0783f42007bc4cfd99813625..2d8f07262426f00373a7afc4ea0064887b60ff2f 100644
--- a/streaming/src/main/scala/spark/streaming/Checkpoint.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
 import java.io._
 import java.util.concurrent.Executors
@@ -24,8 +24,8 @@ import java.util.concurrent.RejectedExecutionException
 import org.apache.hadoop.fs.Path
 import org.apache.hadoop.conf.Configuration
 
-import spark.Logging
-import spark.io.CompressionCodec
+import org.apache.spark.Logging
+import org.apache.spark.io.CompressionCodec
 
 
 private[streaming]
diff --git a/streaming/src/main/scala/spark/streaming/DStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/DStream.scala
similarity index 98%
rename from streaming/src/main/scala/spark/streaming/DStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/DStream.scala
index 684d3abb564566d0d1f56397bc97305f448a05ff..80da6bd30b4b99663f7517531e5d005f62ba8c1c 100644
--- a/streaming/src/main/scala/spark/streaming/DStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/DStream.scala
@@ -15,14 +15,17 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
-import spark.streaming.dstream._
+import org.apache.spark.streaming.dstream._
 import StreamingContext._
+import org.apache.spark.util.MetadataCleaner
+
 //import Time._
 
-import spark.{RDD, Logging}
-import spark.storage.StorageLevel
+import org.apache.spark.Logging
+import org.apache.spark.rdd.RDD
+import org.apache.spark.storage.StorageLevel
 
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.HashMap
@@ -34,7 +37,7 @@ import org.apache.hadoop.conf.Configuration
 
 /**
  * A Discretized Stream (DStream), the basic abstraction in Spark Streaming, is a continuous
- * sequence of RDDs (of the same type) representing a continuous stream of data (see [[spark.RDD]]
+ * sequence of RDDs (of the same type) representing a continuous stream of data (see [[org.apache.spark.RDD]]
  * for more details on RDDs). DStreams can either be created from live data (such as, data from
  * HDFS, Kafka or Flume) or it can be generated by transformation existing DStreams using operations
  * such as `map`, `window` and `reduceByKeyAndWindow`. While a Spark Streaming program is running, each
@@ -42,7 +45,7 @@ import org.apache.hadoop.conf.Configuration
  * by a parent DStream.
  *
  * This class contains the basic operations available on all DStreams, such as `map`, `filter` and
- * `window`. In addition, [[spark.streaming.PairDStreamFunctions]] contains operations available
+ * `window`. In addition, [[org.apache.spark.streaming.PairDStreamFunctions]] contains operations available
  * only on DStreams of key-value pairs, such as `groupByKeyAndWindow` and `join`. These operations
  * are automatically available on any DStream of the right type (e.g., DStream[(Int, Int)] through
  * implicit conversions when `spark.streaming.StreamingContext._` is imported.
@@ -209,7 +212,7 @@ abstract class DStream[T: ClassManifest] (
         checkpointDuration + "). Please set it to higher than " + checkpointDuration + "."
     )
 
-    val metadataCleanerDelay = spark.util.MetadataCleaner.getDelaySeconds
+    val metadataCleanerDelay = MetadataCleaner.getDelaySeconds
     logInfo("metadataCleanupDelay = " + metadataCleanerDelay)
     assert(
       metadataCleanerDelay < 0 || rememberDuration.milliseconds < metadataCleanerDelay * 1000,
diff --git a/streaming/src/main/scala/spark/streaming/DStreamCheckpointData.scala b/streaming/src/main/scala/org/apache/spark/streaming/DStreamCheckpointData.scala
similarity index 98%
rename from streaming/src/main/scala/spark/streaming/DStreamCheckpointData.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/DStreamCheckpointData.scala
index 399ca1c63d8b7c1644d1e8cf74fdc884609d4707..58a0da28705411a8aa64ba4d7cf44b5438878cb6 100644
--- a/streaming/src/main/scala/spark/streaming/DStreamCheckpointData.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/DStreamCheckpointData.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
 import org.apache.hadoop.fs.Path
 import org.apache.hadoop.fs.FileSystem
 import org.apache.hadoop.conf.Configuration
 import collection.mutable.HashMap
-import spark.Logging
+import org.apache.spark.Logging
 
 
 
diff --git a/streaming/src/main/scala/spark/streaming/DStreamGraph.scala b/streaming/src/main/scala/org/apache/spark/streaming/DStreamGraph.scala
similarity index 98%
rename from streaming/src/main/scala/spark/streaming/DStreamGraph.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/DStreamGraph.scala
index c09a332d44cc5a8811ad9ac66d030787915c5fdf..b9a58fded67614d40d7a4363badb6ac30dc844fd 100644
--- a/streaming/src/main/scala/spark/streaming/DStreamGraph.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/DStreamGraph.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
 import dstream.InputDStream
 import java.io.{ObjectInputStream, IOException, ObjectOutputStream}
 import collection.mutable.ArrayBuffer
-import spark.Logging
+import org.apache.spark.Logging
 
 final private[streaming] class DStreamGraph extends Serializable with Logging {
   initLogging()
diff --git a/streaming/src/main/scala/spark/streaming/Duration.scala b/streaming/src/main/scala/org/apache/spark/streaming/Duration.scala
similarity index 86%
rename from streaming/src/main/scala/spark/streaming/Duration.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/Duration.scala
index 12a14e233d93779cb44075583c463c5606f3b543..6bf275f5afcb2168ece712ea34fe72f2e623739c 100644
--- a/streaming/src/main/scala/spark/streaming/Duration.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/Duration.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
-import spark.Utils
+import org.apache.spark.util.Utils
 
 case class Duration (private val millis: Long) {
 
@@ -57,7 +57,7 @@ case class Duration (private val millis: Long) {
 }
 
 /**
- * Helper object that creates instance of [[spark.streaming.Duration]] representing
+ * Helper object that creates instance of [[org.apache.spark.streaming.Duration]] representing
  * a given number of milliseconds.
  */
 object Milliseconds {
@@ -65,7 +65,7 @@ object Milliseconds {
 }
 
 /**
- * Helper object that creates instance of [[spark.streaming.Duration]] representing
+ * Helper object that creates instance of [[org.apache.spark.streaming.Duration]] representing
  * a given number of seconds.
  */
 object Seconds {
@@ -73,7 +73,7 @@ object Seconds {
 }
 
 /**
- * Helper object that creates instance of [[spark.streaming.Duration]] representing
+ * Helper object that creates instance of [[org.apache.spark.streaming.Duration]] representing
  * a given number of minutes.
  */
 object Minutes {
diff --git a/streaming/src/main/scala/spark/streaming/Interval.scala b/streaming/src/main/scala/org/apache/spark/streaming/Interval.scala
similarity index 98%
rename from streaming/src/main/scala/spark/streaming/Interval.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/Interval.scala
index b30cd969e9fd0706842813c32a50652dc757a571..04c994c136932a19bce2a12e088444e5feacb280 100644
--- a/streaming/src/main/scala/spark/streaming/Interval.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/Interval.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
 private[streaming]
 class Interval(val beginTime: Time, val endTime: Time) {
diff --git a/streaming/src/main/scala/spark/streaming/Job.scala b/streaming/src/main/scala/org/apache/spark/streaming/Job.scala
similarity index 97%
rename from streaming/src/main/scala/spark/streaming/Job.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/Job.scala
index ceb3f92b65b51c707830ac2700d656e12effa5a3..2128b7c7a64c27c98a8d88db6d27f801b8cf606e 100644
--- a/streaming/src/main/scala/spark/streaming/Job.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/Job.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
 import java.util.concurrent.atomic.AtomicLong
 
diff --git a/streaming/src/main/scala/spark/streaming/JobManager.scala b/streaming/src/main/scala/org/apache/spark/streaming/JobManager.scala
similarity index 96%
rename from streaming/src/main/scala/spark/streaming/JobManager.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/JobManager.scala
index a31230689f95f3c26b0657b77e692141aa50cc9c..5233129506f9e8b2cdd4b0a208d8d01972564d0d 100644
--- a/streaming/src/main/scala/spark/streaming/JobManager.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/JobManager.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
-import spark.Logging 
-import spark.SparkEnv
+import org.apache.spark.Logging
+import org.apache.spark.SparkEnv
 import java.util.concurrent.Executors
 import collection.mutable.HashMap
 import collection.mutable.ArrayBuffer
diff --git a/streaming/src/main/scala/spark/streaming/NetworkInputTracker.scala b/streaming/src/main/scala/org/apache/spark/streaming/NetworkInputTracker.scala
similarity index 95%
rename from streaming/src/main/scala/spark/streaming/NetworkInputTracker.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/NetworkInputTracker.scala
index d4cf2e568ca34062596edfe46c5be83a037591c8..aae79a4e6fefd61d199fe46c95bf73be4a2a8c4b 100644
--- a/streaming/src/main/scala/spark/streaming/NetworkInputTracker.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/NetworkInputTracker.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
-import spark.streaming.dstream.{NetworkInputDStream, NetworkReceiver}
-import spark.streaming.dstream.{StopReceiver, ReportBlock, ReportError}
-import spark.Logging
-import spark.SparkEnv
-import spark.SparkContext._
+import org.apache.spark.streaming.dstream.{NetworkInputDStream, NetworkReceiver}
+import org.apache.spark.streaming.dstream.{StopReceiver, ReportBlock, ReportError}
+import org.apache.spark.Logging
+import org.apache.spark.SparkEnv
+import org.apache.spark.SparkContext._
 
 import scala.collection.mutable.HashMap
 import scala.collection.mutable.Queue
diff --git a/streaming/src/main/scala/spark/streaming/PairDStreamFunctions.scala b/streaming/src/main/scala/org/apache/spark/streaming/PairDStreamFunctions.scala
similarity index 95%
rename from streaming/src/main/scala/spark/streaming/PairDStreamFunctions.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/PairDStreamFunctions.scala
index 47bf07bee1318c24ba20ceec58559a6d4be74021..757bc98981ca9dcc1810a34303394afe20e9b04e 100644
--- a/streaming/src/main/scala/spark/streaming/PairDStreamFunctions.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/PairDStreamFunctions.scala
@@ -15,16 +15,17 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
-import spark.streaming.StreamingContext._
-import spark.streaming.dstream.{ReducedWindowedDStream, StateDStream}
-import spark.streaming.dstream.{CoGroupedDStream, ShuffledDStream}
-import spark.streaming.dstream.{MapValuedDStream, FlatMapValuedDStream}
+import org.apache.spark.streaming.StreamingContext._
+import org.apache.spark.streaming.dstream.{ReducedWindowedDStream, StateDStream}
+import org.apache.spark.streaming.dstream.{CoGroupedDStream, ShuffledDStream}
+import org.apache.spark.streaming.dstream.{MapValuedDStream, FlatMapValuedDStream}
 
-import spark.{Manifests, RDD, Partitioner, HashPartitioner}
-import spark.SparkContext._
-import spark.storage.StorageLevel
+import org.apache.spark.{Partitioner, HashPartitioner}
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.{Manifests, RDD, PairRDDFunctions}
+import org.apache.spark.storage.StorageLevel
 
 import scala.collection.mutable.ArrayBuffer
 
@@ -60,7 +61,7 @@ extends Serializable {
   }
 
   /**
-   * Return a new DStream by applying `groupByKey` on each RDD. The supplied [[spark.Partitioner]]
+   * Return a new DStream by applying `groupByKey` on each RDD. The supplied [[org.apache.spark.Partitioner]]
    * is used to control the partitioning of each RDD.
    */
   def groupByKey(partitioner: Partitioner): DStream[(K, Seq[V])] = {
@@ -91,7 +92,7 @@ extends Serializable {
 
   /**
    * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are
-   * merged using the supplied reduce function. [[spark.Partitioner]] is used to control the
+   * merged using the supplied reduce function. [[org.apache.spark.Partitioner]] is used to control the
    * partitioning of each RDD.
    */
   def reduceByKey(reduceFunc: (V, V) => V, partitioner: Partitioner): DStream[(K, V)] = {
@@ -101,8 +102,8 @@ extends Serializable {
 
   /**
    * Combine elements of each key in DStream's RDDs using custom functions. This is similar to the
-   * combineByKey for RDDs. Please refer to combineByKey in [[spark.PairRDDFunctions]] for more
-   * information.
+   * combineByKey for RDDs. Please refer to combineByKey in
+   * [[org.apache.spark.rdd.PairRDDFunctions]] for more information.
    */
   def combineByKey[C: ClassManifest](
     createCombiner: V => C,
@@ -360,7 +361,7 @@ extends Serializable {
   /**
    * Create a new "state" DStream where the state for each key is updated by applying
    * the given function on the previous state of the key and the new values of the key.
-   * [[spark.Partitioner]] is used to control the partitioning of each RDD.
+   * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD.
    * @param updateFunc State update function. If `this` function returns None, then
    *                   corresponding state key-value pair will be eliminated.
    * @param partitioner Partitioner for controlling the partitioning of each RDD in the new DStream.
@@ -379,7 +380,7 @@ extends Serializable {
   /**
    * Return a new "state" DStream where the state for each key is updated by applying
    * the given function on the previous state of the key and the new values of each key.
-   * [[spark.Paxrtitioner]] is used to control the partitioning of each RDD.
+   * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD.
    * @param updateFunc State update function. If `this` function returns None, then
    *                   corresponding state key-value pair will be eliminated. Note, that
    *                   this function may generate a different a tuple with a different key
diff --git a/streaming/src/main/scala/spark/streaming/Scheduler.scala b/streaming/src/main/scala/org/apache/spark/streaming/Scheduler.scala
similarity index 95%
rename from streaming/src/main/scala/spark/streaming/Scheduler.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/Scheduler.scala
index 252cc2a30327c2945f6cad68cfa3966e953fabe6..ed892e33e6c3467f9d9aeef923dc2199a711b89b 100644
--- a/streaming/src/main/scala/spark/streaming/Scheduler.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/Scheduler.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
 import util.{ManualClock, RecurringTimer, Clock}
-import spark.SparkEnv
-import spark.Logging
+import org.apache.spark.SparkEnv
+import org.apache.spark.Logging
 
 private[streaming]
 class Scheduler(ssc: StreamingContext) extends Logging {
@@ -34,7 +34,8 @@ class Scheduler(ssc: StreamingContext) extends Logging {
     null
   }
 
-  val clockClass = System.getProperty("spark.streaming.clock", "spark.streaming.util.SystemClock")
+  val clockClass = System.getProperty(
+    "spark.streaming.clock", "org.apache.spark.streaming.util.SystemClock")
   val clock = Class.forName(clockClass).newInstance().asInstanceOf[Clock]
   val timer = new RecurringTimer(clock, ssc.graph.batchDuration.milliseconds,
     longTime => generateJobs(new Time(longTime)))
diff --git a/streaming/src/main/scala/spark/streaming/StreamingContext.scala b/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
similarity index 97%
rename from streaming/src/main/scala/spark/streaming/StreamingContext.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
index 62c95b573aaa3d04f75bb966680a58f43d02b9e0..878725c705db71807c567c909b1b6ab0f48f7f94 100644
--- a/streaming/src/main/scala/spark/streaming/StreamingContext.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
@@ -15,21 +15,22 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
 import akka.actor.Props
 import akka.actor.SupervisorStrategy
 import akka.zeromq.Subscribe
 
-import spark.streaming.dstream._
+import org.apache.spark.streaming.dstream._
 
-import spark._
-import spark.streaming.receivers.ActorReceiver
-import spark.streaming.receivers.ReceiverSupervisorStrategy
-import spark.streaming.receivers.ZeroMQReceiver
-import spark.storage.StorageLevel
-import spark.util.MetadataCleaner
-import spark.streaming.receivers.ActorReceiver
+import org.apache.spark._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.streaming.receivers.ActorReceiver
+import org.apache.spark.streaming.receivers.ReceiverSupervisorStrategy
+import org.apache.spark.streaming.receivers.ZeroMQReceiver
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.util.MetadataCleaner
+import org.apache.spark.streaming.receivers.ActorReceiver
 
 import scala.collection.mutable.Queue
 import scala.collection.Map
diff --git a/streaming/src/main/scala/spark/streaming/Time.scala b/streaming/src/main/scala/org/apache/spark/streaming/Time.scala
similarity index 98%
rename from streaming/src/main/scala/spark/streaming/Time.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/Time.scala
index ad5eab9dd2aad52d5dca5a5f6bcbb7defdd5609f..2678334f53844f2c388d83e393ff305224071c66 100644
--- a/streaming/src/main/scala/spark/streaming/Time.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/Time.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
 /**
  * This is a simple class that represents an absolute instant of time.
diff --git a/streaming/src/main/scala/spark/streaming/api/java/JavaDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStream.scala
similarity index 90%
rename from streaming/src/main/scala/spark/streaming/api/java/JavaDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStream.scala
index 7dcb1d713d922c70e58402c10ddf9f816e93c5d4..d1932b6b05a093fbf814f274dc6dc488bca73053 100644
--- a/streaming/src/main/scala/spark/streaming/api/java/JavaDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStream.scala
@@ -15,17 +15,17 @@
  * limitations under the License.
  */
 
-package spark.streaming.api.java
+package org.apache.spark.streaming.api.java
 
-import spark.streaming.{Duration, Time, DStream}
-import spark.api.java.function.{Function => JFunction}
-import spark.api.java.JavaRDD
-import spark.storage.StorageLevel
-import spark.RDD
+import org.apache.spark.streaming.{Duration, Time, DStream}
+import org.apache.spark.api.java.function.{Function => JFunction}
+import org.apache.spark.api.java.JavaRDD
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.rdd.RDD
 
 /**
  * A Discretized Stream (DStream), the basic abstraction in Spark Streaming, is a continuous
- * sequence of RDDs (of the same type) representing a continuous stream of data (see [[spark.RDD]]
+ * sequence of RDDs (of the same type) representing a continuous stream of data (see [[org.apache.spark.RDD]]
  * for more details on RDDs). DStreams can either be created from live data (such as, data from
  * HDFS, Kafka or Flume) or it can be generated by transformation existing DStreams using operations
  * such as `map`, `window` and `reduceByKeyAndWindow`. While a Spark Streaming program is running, each
@@ -33,7 +33,7 @@ import spark.RDD
  * by a parent DStream.
  *
  * This class contains the basic operations available on all DStreams, such as `map`, `filter` and
- * `window`. In addition, [[spark.streaming.api.java.JavaPairDStream]] contains operations available
+ * `window`. In addition, [[org.apache.spark.streaming.api.java.JavaPairDStream]] contains operations available
  * only on DStreams of key-value pairs, such as `groupByKeyAndWindow` and `join`.
  *
  * DStreams internally is characterized by a few basic properties:
diff --git a/streaming/src/main/scala/spark/streaming/api/java/JavaDStreamLike.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
similarity index 97%
rename from streaming/src/main/scala/spark/streaming/api/java/JavaDStreamLike.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
index 3ab5c1fddeb297192f2be6afaa3f8705927743d0..459695b7cabab6c70da9a646d5697592ae35703b 100644
--- a/streaming/src/main/scala/spark/streaming/api/java/JavaDStreamLike.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
@@ -15,18 +15,18 @@
  * limitations under the License.
  */
 
-package spark.streaming.api.java
+package org.apache.spark.streaming.api.java
 
 import java.util.{List => JList}
 import java.lang.{Long => JLong}
 
 import scala.collection.JavaConversions._
 
-import spark.streaming._
-import spark.api.java.{JavaPairRDD, JavaRDDLike, JavaRDD}
-import spark.api.java.function.{Function2 => JFunction2, Function => JFunction, _}
+import org.apache.spark.streaming._
+import org.apache.spark.api.java.{JavaPairRDD, JavaRDDLike, JavaRDD}
+import org.apache.spark.api.java.function.{Function2 => JFunction2, Function => JFunction, _}
 import java.util
-import spark.RDD
+import org.apache.spark.rdd.RDD
 import JavaDStream._
 
 trait JavaDStreamLike[T, This <: JavaDStreamLike[T, This, R], R <: JavaRDDLike[T, R]]
diff --git a/streaming/src/main/scala/spark/streaming/api/java/JavaPairDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala
similarity index 97%
rename from streaming/src/main/scala/spark/streaming/api/java/JavaPairDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala
index ea08fb38267941807a524d3f38330798bfc0b4ff..978fca33ad03e344a355c72dd3f3eff0658acc35 100644
--- a/streaming/src/main/scala/spark/streaming/api/java/JavaPairDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala
@@ -15,24 +15,25 @@
  * limitations under the License.
  */
 
-package spark.streaming.api.java
+package org.apache.spark.streaming.api.java
 
 import java.util.{List => JList}
 import java.lang.{Long => JLong}
 
 import scala.collection.JavaConversions._
 
-import spark.streaming._
-import spark.streaming.StreamingContext._
-import spark.api.java.function.{Function => JFunction, Function2 => JFunction2}
-import spark.{RDD, Partitioner}
+import org.apache.spark.streaming._
+import org.apache.spark.streaming.StreamingContext._
+import org.apache.spark.api.java.function.{Function => JFunction, Function2 => JFunction2}
+import org.apache.spark.Partitioner
 import org.apache.hadoop.mapred.{JobConf, OutputFormat}
 import org.apache.hadoop.mapreduce.{OutputFormat => NewOutputFormat}
 import org.apache.hadoop.conf.Configuration
-import spark.api.java.{JavaUtils, JavaRDD, JavaPairRDD}
-import spark.storage.StorageLevel
+import org.apache.spark.api.java.{JavaUtils, JavaRDD, JavaPairRDD}
+import org.apache.spark.storage.StorageLevel
 import com.google.common.base.Optional
-import spark.RDD
+import org.apache.spark.rdd.RDD
+import org.apache.spark.rdd.PairRDDFunctions
 
 class JavaPairDStream[K, V](val dstream: DStream[(K, V)])(
     implicit val kManifiest: ClassManifest[K],
@@ -114,7 +115,7 @@ class JavaPairDStream[K, V](val dstream: DStream[(K, V)])(
   /**
    * Return a new DStream by applying `groupByKey` on each RDD of `this` DStream.
    * Therefore, the values for each key in `this` DStream's RDDs are grouped into a
-   * single sequence to generate the RDDs of the new DStream. [[spark.Partitioner]]
+   * single sequence to generate the RDDs of the new DStream. [[org.apache.spark.Partitioner]]
    * is used to control the partitioning of each RDD.
    */
   def groupByKey(partitioner: Partitioner): JavaPairDStream[K, JList[V]] =
@@ -138,7 +139,7 @@ class JavaPairDStream[K, V](val dstream: DStream[(K, V)])(
 
   /**
    * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are
-   * merged using the supplied reduce function. [[spark.Partitioner]] is used to control the
+   * merged using the supplied reduce function. [[org.apache.spark.Partitioner]] is used to control the
    * partitioning of each RDD.
    */
   def reduceByKey(func: JFunction2[V, V, V], partitioner: Partitioner): JavaPairDStream[K, V] = {
@@ -147,7 +148,7 @@ class JavaPairDStream[K, V](val dstream: DStream[(K, V)])(
 
   /**
    * Combine elements of each key in DStream's RDDs using custom function. This is similar to the
-   * combineByKey for RDDs. Please refer to combineByKey in [[spark.PairRDDFunctions]] for more
+   * combineByKey for RDDs. Please refer to combineByKey in [[PairRDDFunctions]] for more
    * information.
    */
   def combineByKey[C](createCombiner: JFunction[V, C],
@@ -445,7 +446,7 @@ class JavaPairDStream[K, V](val dstream: DStream[(K, V)])(
   /**
    * Create a new "state" DStream where the state for each key is updated by applying
    * the given function on the previous state of the key and the new values of the key.
-   * [[spark.Partitioner]] is used to control the partitioning of each RDD.
+   * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD.
    * @param updateFunc State update function. If `this` function returns None, then
    *                   corresponding state key-value pair will be eliminated.
    * @param partitioner Partitioner for controlling the partitioning of each RDD in the new DStream.
diff --git a/streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
similarity index 97%
rename from streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
index b7720ad0ea1b527dbdc1d99afdf90f0eb46becc5..54ba3e602590f806906f75f45ec6df22bde66da3 100644
--- a/streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
@@ -15,25 +15,29 @@
  * limitations under the License.
  */
 
-package spark.streaming.api.java
-
-import spark.streaming._
-import receivers.{ActorReceiver, ReceiverSupervisorStrategy}
-import spark.streaming.dstream._
-import spark.storage.StorageLevel
-import spark.api.java.function.{Function => JFunction, Function2 => JFunction2}
-import spark.api.java.{JavaSparkContext, JavaRDD}
+package org.apache.spark.streaming.api.java
+
+import java.lang.{Long => JLong, Integer => JInt}
+import java.io.InputStream
+import java.util.{Map => JMap}
+
+import scala.collection.JavaConversions._
+
 import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat}
 import twitter4j.Status
 import akka.actor.Props
 import akka.actor.SupervisorStrategy
 import akka.zeromq.Subscribe
-import scala.collection.JavaConversions._
-import java.lang.{Long => JLong, Integer => JInt}
-import java.io.InputStream
-import java.util.{Map => JMap}
 import twitter4j.auth.Authorization
 
+import org.apache.spark.rdd.RDD
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.api.java.function.{Function => JFunction, Function2 => JFunction2}
+import org.apache.spark.api.java.{JavaSparkContext, JavaRDD}
+import org.apache.spark.streaming._
+import org.apache.spark.streaming.dstream._
+import org.apache.spark.streaming.receivers.{ActorReceiver, ReceiverSupervisorStrategy}
+
 /**
  * A StreamingContext is the main entry point for Spark Streaming functionality. Besides the basic
  * information (such as, cluster URL and job name) to internally create a SparkContext, it provides
@@ -537,7 +541,7 @@ class JavaStreamingContext(val ssc: StreamingContext) {
   def queueStream[T](queue: java.util.Queue[JavaRDD[T]]): JavaDStream[T] = {
     implicit val cm: ClassManifest[T] =
       implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[T]]
-    val sQueue = new scala.collection.mutable.Queue[spark.RDD[T]]
+    val sQueue = new scala.collection.mutable.Queue[RDD[T]]
     sQueue.enqueue(queue.map(_.rdd).toSeq: _*)
     ssc.queueStream(sQueue)
   }
@@ -554,7 +558,7 @@ class JavaStreamingContext(val ssc: StreamingContext) {
   def queueStream[T](queue: java.util.Queue[JavaRDD[T]], oneAtATime: Boolean): JavaDStream[T] = {
     implicit val cm: ClassManifest[T] =
       implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[T]]
-    val sQueue = new scala.collection.mutable.Queue[spark.RDD[T]]
+    val sQueue = new scala.collection.mutable.Queue[RDD[T]]
     sQueue.enqueue(queue.map(_.rdd).toSeq: _*)
     ssc.queueStream(sQueue, oneAtATime)
   }
@@ -575,7 +579,7 @@ class JavaStreamingContext(val ssc: StreamingContext) {
       defaultRDD: JavaRDD[T]): JavaDStream[T] = {
     implicit val cm: ClassManifest[T] =
       implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[T]]
-    val sQueue = new scala.collection.mutable.Queue[spark.RDD[T]]
+    val sQueue = new scala.collection.mutable.Queue[RDD[T]]
     sQueue.enqueue(queue.map(_.rdd).toSeq: _*)
     ssc.queueStream(sQueue, oneAtATime, defaultRDD.rdd)
   }
diff --git a/streaming/src/main/scala/spark/streaming/dstream/CoGroupedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/CoGroupedDStream.scala
similarity index 89%
rename from streaming/src/main/scala/spark/streaming/dstream/CoGroupedDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/CoGroupedDStream.scala
index 99553d295d55b168694947e5a49f4ca02dc9d58a..4eddc755b97f46e0e055917c3815363ae8f8b789 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/CoGroupedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/CoGroupedDStream.scala
@@ -15,11 +15,12 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.{RDD, Partitioner}
-import spark.rdd.CoGroupedRDD
-import spark.streaming.{Time, DStream, Duration}
+import org.apache.spark.Partitioner
+import org.apache.spark.rdd.RDD
+import org.apache.spark.rdd.CoGroupedRDD
+import org.apache.spark.streaming.{Time, DStream, Duration}
 
 private[streaming]
 class CoGroupedDStream[K : ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/ConstantInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ConstantInputDStream.scala
similarity index 89%
rename from streaming/src/main/scala/spark/streaming/dstream/ConstantInputDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/ConstantInputDStream.scala
index 095137092a96735ab91ab7283cf6c4b1e0298c6f..a9a05c9981f7436af96dc15b28bd282002c66c19 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/ConstantInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ConstantInputDStream.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.RDD
-import spark.streaming.{Time, StreamingContext}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.streaming.{Time, StreamingContext}
 
 /**
  * An input stream that always returns the same RDD on each timestep. Useful for testing.
diff --git a/streaming/src/main/scala/spark/streaming/dstream/FileInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala
similarity index 97%
rename from streaming/src/main/scala/spark/streaming/dstream/FileInputDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala
index de0536125de0e43412ca2af8f668661d07c7f4f7..fea0573b77046df0eec6f7753723ec963383105c 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/FileInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.RDD
-import spark.rdd.UnionRDD
-import spark.streaming.{DStreamCheckpointData, StreamingContext, Time}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.rdd.UnionRDD
+import org.apache.spark.streaming.{DStreamCheckpointData, StreamingContext, Time}
 
 import org.apache.hadoop.fs.{FileSystem, Path, PathFilter}
 import org.apache.hadoop.conf.Configuration
diff --git a/streaming/src/main/scala/spark/streaming/dstream/FilteredDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FilteredDStream.scala
similarity index 89%
rename from streaming/src/main/scala/spark/streaming/dstream/FilteredDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/FilteredDStream.scala
index 9d8c5c3175b9af7090b59026cc4c15d13d622272..91ee2c1a36fa383a42a2c7ca940fe8a913ce974b 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/FilteredDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FilteredDStream.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.rdd.RDD
 
 private[streaming]
 class FilteredDStream[T: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/FlatMapValuedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMapValuedDStream.scala
similarity index 88%
rename from streaming/src/main/scala/spark/streaming/dstream/FlatMapValuedDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMapValuedDStream.scala
index 78d7117f0f71c9e5400f71ec3c33e761bbb4ea67..ca7d7ca49effd727cf04eb0629e13b6fe63b7de9 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/FlatMapValuedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMapValuedDStream.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
-import spark.SparkContext._
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.SparkContext._
 
 private[streaming]
 class FlatMapValuedDStream[K: ClassManifest, V: ClassManifest, U: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/FlatMappedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMappedDStream.scala
similarity index 90%
rename from streaming/src/main/scala/spark/streaming/dstream/FlatMappedDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMappedDStream.scala
index d13bebb10f63f032fedf0f36a8079e3866b85f02..b37966f9a79bd94da5460aee48f085eb1041f89b 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/FlatMappedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMappedDStream.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.rdd.RDD
 
 private[streaming]
 class FlatMappedDStream[T: ClassManifest, U: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/FlumeInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlumeInputDStream.scala
similarity index 96%
rename from streaming/src/main/scala/spark/streaming/dstream/FlumeInputDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/FlumeInputDStream.scala
index 4906f503c29c65f520cb8850239a6d22d46d6e31..18de772946805607fca9a421845e87af23c0683b 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/FlumeInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlumeInputDStream.scala
@@ -15,12 +15,13 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.streaming.StreamingContext
+import java.net.InetSocketAddress
+import java.io.{ObjectInput, ObjectOutput, Externalizable}
+import java.nio.ByteBuffer
 
-import spark.Utils
-import spark.storage.StorageLevel
+import scala.collection.JavaConversions._
 
 import org.apache.flume.source.avro.AvroSourceProtocol
 import org.apache.flume.source.avro.AvroFlumeEvent
@@ -28,11 +29,9 @@ import org.apache.flume.source.avro.Status
 import org.apache.avro.ipc.specific.SpecificResponder
 import org.apache.avro.ipc.NettyServer
 
-import scala.collection.JavaConversions._
-
-import java.net.InetSocketAddress
-import java.io.{ObjectInput, ObjectOutput, Externalizable}
-import java.nio.ByteBuffer
+import org.apache.spark.streaming.StreamingContext
+import org.apache.spark.util.Utils
+import org.apache.spark.storage.StorageLevel
 
 private[streaming]
 class FlumeInputDStream[T: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/ForEachDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ForEachDStream.scala
similarity index 90%
rename from streaming/src/main/scala/spark/streaming/dstream/ForEachDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/ForEachDStream.scala
index 7df537eb560e27d6c79669d38d7aa61f39fd5a3a..e21bac460255c7e59416fc4d5f2f664654dc04a7 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/ForEachDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ForEachDStream.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.RDD
-import spark.streaming.{Duration, DStream, Job, Time}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.streaming.{Duration, DStream, Job, Time}
 
 private[streaming]
 class ForEachDStream[T: ClassManifest] (
diff --git a/streaming/src/main/scala/spark/streaming/dstream/GlommedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/GlommedDStream.scala
similarity index 89%
rename from streaming/src/main/scala/spark/streaming/dstream/GlommedDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/GlommedDStream.scala
index 06fda6fe8e40f8f3ad6081ae5afe8f90930fb759..4294b07d910f14f9f7bf10b4c2d834dc55fd3597 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/GlommedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/GlommedDStream.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.rdd.RDD
 
 private[streaming]
 class GlommedDStream[T: ClassManifest](parent: DStream[T])
diff --git a/streaming/src/main/scala/spark/streaming/dstream/InputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/InputDStream.scala
similarity index 95%
rename from streaming/src/main/scala/spark/streaming/dstream/InputDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/InputDStream.scala
index 4dbdec459d025de6a7d45f32e4e274de08f887eb..674b27118caef7fb07cbe6c3c3a2faa90d78051f 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/InputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/InputDStream.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.streaming.{Time, Duration, StreamingContext, DStream}
+import org.apache.spark.streaming.{Time, Duration, StreamingContext, DStream}
 
 /**
  * This is the abstract base class for all input streams. This class provides to methods
diff --git a/streaming/src/main/scala/spark/streaming/dstream/KafkaInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala
similarity index 96%
rename from streaming/src/main/scala/spark/streaming/dstream/KafkaInputDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala
index 6ee588af15451fd7df12b957bc0d583fc973f9ab..51e913675d24acbfd04534e6d94ed2edc2cdc4af 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/KafkaInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.Logging
-import spark.storage.StorageLevel
-import spark.streaming.{Time, DStreamCheckpointData, StreamingContext}
+import org.apache.spark.Logging
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.{Time, DStreamCheckpointData, StreamingContext}
 
 import java.util.Properties
 import java.util.concurrent.Executors
diff --git a/streaming/src/main/scala/spark/streaming/dstream/MapPartitionedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapPartitionedDStream.scala
similarity index 90%
rename from streaming/src/main/scala/spark/streaming/dstream/MapPartitionedDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/MapPartitionedDStream.scala
index af41a1b9acdf1b5402164b27c22ae0951035516b..5329601a6f949820c41650f1bd3f08e82b40065d 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/MapPartitionedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapPartitionedDStream.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.rdd.RDD
 
 private[streaming]
 class MapPartitionedDStream[T: ClassManifest, U: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/MapValuedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapValuedDStream.scala
similarity index 87%
rename from streaming/src/main/scala/spark/streaming/dstream/MapValuedDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/MapValuedDStream.scala
index 8d8a6161c6750e81dc994be08026c4cceeea469c..8290df90a2894d15d22daa3950da70fb1b01d89b 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/MapValuedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapValuedDStream.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
-import spark.SparkContext._
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.SparkContext._
 
 private[streaming]
 class MapValuedDStream[K: ClassManifest, V: ClassManifest, U: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/MappedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MappedDStream.scala
similarity index 89%
rename from streaming/src/main/scala/spark/streaming/dstream/MappedDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/MappedDStream.scala
index 3fda84a38a189b5a289305c7c51da60e42571188..b1682afea39244d72ed4f415619ce3475a842bd1 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/MappedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MappedDStream.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.rdd.RDD
 
 private[streaming]
 class MappedDStream[T: ClassManifest, U: ClassManifest] (
diff --git a/streaming/src/main/scala/spark/streaming/dstream/NetworkInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/NetworkInputDStream.scala
similarity index 93%
rename from streaming/src/main/scala/spark/streaming/dstream/NetworkInputDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/NetworkInputDStream.scala
index 1db0a69a2f109869a6a70b0d474565ef6fb66a50..31f9891560372723c27e9f488e5517d2f9599988 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/NetworkInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/NetworkInputDStream.scala
@@ -15,30 +15,29 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.streaming.{Time, StreamingContext, AddBlocks, RegisterReceiver, DeregisterReceiver}
-
-import spark.{Logging, SparkEnv, RDD}
-import spark.rdd.BlockRDD
-import spark.storage.StorageLevel
+import java.util.concurrent.ArrayBlockingQueue
+import java.nio.ByteBuffer
 
 import scala.collection.mutable.ArrayBuffer
 
-import java.nio.ByteBuffer
-
 import akka.actor.{Props, Actor}
 import akka.pattern.ask
 import akka.dispatch.Await
 import akka.util.duration._
-import spark.streaming.util.{RecurringTimer, SystemClock}
-import java.util.concurrent.ArrayBlockingQueue
+
+import org.apache.spark.streaming.util.{RecurringTimer, SystemClock}
+import org.apache.spark.streaming._
+import org.apache.spark.{Logging, SparkEnv}
+import org.apache.spark.rdd.{RDD, BlockRDD}
+import org.apache.spark.storage.StorageLevel
 
 /**
  * Abstract class for defining any InputDStream that has to start a receiver on worker
  * nodes to receive external data. Specific implementations of NetworkInputDStream must
  * define the getReceiver() function that gets the receiver object of type
- * [[spark.streaming.dstream.NetworkReceiver]] that will be sent to the workers to receive
+ * [[org.apache.spark.streaming.dstream.NetworkReceiver]] that will be sent to the workers to receive
  * data.
  * @param ssc_ Streaming context that will execute this input stream
  * @tparam T Class type of the object of this stream
@@ -83,7 +82,7 @@ private[streaming] case class ReportError(msg: String) extends NetworkReceiverMe
 
 /**
  * Abstract class of a receiver that can be run on worker nodes to receive external data. See
- * [[spark.streaming.dstream.NetworkInputDStream]] for an explanation.
+ * [[org.apache.spark.streaming.dstream.NetworkInputDStream]] for an explanation.
  */
 abstract class NetworkReceiver[T: ClassManifest]() extends Serializable with Logging {
 
@@ -202,7 +201,7 @@ abstract class NetworkReceiver[T: ClassManifest]() extends Serializable with Log
   }
 
   /**
-   * Batches objects created by a [[spark.streaming.dstream.NetworkReceiver]] and puts them into
+   * Batches objects created by a [[org.apache.spark.streaming.dstream.NetworkReceiver]] and puts them into
    * appropriately named blocks at regular intervals. This class starts two threads,
    * one to periodically start a new batch and prepare the previous batch of as a block,
    * the other to push the blocks into the block manager.
diff --git a/streaming/src/main/scala/spark/streaming/dstream/PluggableInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/PluggableInputDStream.scala
similarity index 91%
rename from streaming/src/main/scala/spark/streaming/dstream/PluggableInputDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/PluggableInputDStream.scala
index 33f7cd063f3d1da6a81bc7329b08b37b7ea48c9e..15782f5c119054555d9b9d0548e49bd0e4d7c3d3 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/PluggableInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/PluggableInputDStream.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.streaming.StreamingContext
+import org.apache.spark.streaming.StreamingContext
 
 private[streaming]
 class PluggableInputDStream[T: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/QueueInputDStream.scala
similarity index 90%
rename from streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/QueueInputDStream.scala
index b269061b7370f63f7e2b58e8df62e542b1106220..7d9f3521b1ce7d2362019320ebe60a02b6e425b9 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/QueueInputDStream.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.RDD
-import spark.rdd.UnionRDD
+import org.apache.spark.rdd.RDD
+import org.apache.spark.rdd.UnionRDD
 
 import scala.collection.mutable.Queue
 import scala.collection.mutable.ArrayBuffer
-import spark.streaming.{Time, StreamingContext}
+import org.apache.spark.streaming.{Time, StreamingContext}
 
 private[streaming]
 class QueueInputDStream[T: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/RawInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/RawInputDStream.scala
similarity index 95%
rename from streaming/src/main/scala/spark/streaming/dstream/RawInputDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/RawInputDStream.scala
index 236f74f575e9d7c284b0d5b8a1637023af6bed82..c91f12ecd7afd8f74857a1c4731718c0fca517da 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/RawInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/RawInputDStream.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.Logging
-import spark.storage.StorageLevel
-import spark.streaming.StreamingContext
+import org.apache.spark.Logging
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.StreamingContext
 
 import java.net.InetSocketAddress
 import java.nio.ByteBuffer
diff --git a/streaming/src/main/scala/spark/streaming/dstream/ReducedWindowedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ReducedWindowedDStream.scala
similarity index 94%
rename from streaming/src/main/scala/spark/streaming/dstream/ReducedWindowedDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/ReducedWindowedDStream.scala
index 96260501ab7d25fc0d05000832c4d200bd043fb0..b88a4db9596be7394a11d7e647dfb47c6c4d552c 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/ReducedWindowedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ReducedWindowedDStream.scala
@@ -15,18 +15,18 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.StreamingContext._
 
-import spark.RDD
-import spark.rdd.{CoGroupedRDD, MapPartitionsRDD}
-import spark.Partitioner
-import spark.SparkContext._
-import spark.storage.StorageLevel
+import org.apache.spark.rdd.RDD
+import org.apache.spark.rdd.{CoGroupedRDD, MapPartitionsRDD}
+import org.apache.spark.Partitioner
+import org.apache.spark.SparkContext._
+import org.apache.spark.storage.StorageLevel
 
 import scala.collection.mutable.ArrayBuffer
-import spark.streaming.{Duration, Interval, Time, DStream}
+import org.apache.spark.streaming.{Duration, Interval, Time, DStream}
 
 private[streaming]
 class ReducedWindowedDStream[K: ClassManifest, V: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/ShuffledDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ShuffledDStream.scala
similarity index 87%
rename from streaming/src/main/scala/spark/streaming/dstream/ShuffledDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/ShuffledDStream.scala
index 83b57b27f77f36795f62397a2e98ebd70708f442..a95e66d7615ce2a06a171abda978d4d829b557bf 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/ShuffledDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ShuffledDStream.scala
@@ -15,11 +15,12 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.{RDD, Partitioner}
-import spark.SparkContext._
-import spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.Partitioner
+import org.apache.spark.rdd.RDD
+import org.apache.spark.SparkContext._
+import org.apache.spark.streaming.{Duration, DStream, Time}
 
 private[streaming]
 class ShuffledDStream[K: ClassManifest, V: ClassManifest, C: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/SocketInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/SocketInputDStream.scala
similarity index 93%
rename from streaming/src/main/scala/spark/streaming/dstream/SocketInputDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/SocketInputDStream.scala
index 5877b10e0e16ae0160dd5cbf3665a8a364b40d6f..e2539c73961380769d842574eaf31a2011662914 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/SocketInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/SocketInputDStream.scala
@@ -15,11 +15,11 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.streaming.StreamingContext
-import spark.storage.StorageLevel
-import spark.util.NextIterator
+import org.apache.spark.streaming.StreamingContext
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.util.NextIterator
 
 import java.io._
 import java.net.Socket
diff --git a/streaming/src/main/scala/spark/streaming/dstream/StateDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/StateDStream.scala
similarity index 94%
rename from streaming/src/main/scala/spark/streaming/dstream/StateDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/StateDStream.scala
index 4b46613d5e102d1c39b872594295e44e9c4933ce..362a6bf4cc429f033859ec0a22b1aee3f4a7b8aa 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/StateDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/StateDStream.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.RDD
-import spark.Partitioner
-import spark.SparkContext._
-import spark.storage.StorageLevel
-import spark.streaming.{Duration, Time, DStream}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.Partitioner
+import org.apache.spark.SparkContext._
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.{Duration, Time, DStream}
 
 private[streaming]
 class StateDStream[K: ClassManifest, V: ClassManifest, S: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/TransformedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/TransformedDStream.scala
similarity index 90%
rename from streaming/src/main/scala/spark/streaming/dstream/TransformedDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/TransformedDStream.scala
index e7fbc5bbcfa8e148655810113976dc8aa5b935c0..60485adef9594124f4dcfdb4a91115c03dbe5a63 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/TransformedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/TransformedDStream.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.RDD
-import spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.streaming.{Duration, DStream, Time}
 
 private[streaming]
 class TransformedDStream[T: ClassManifest, U: ClassManifest] (
diff --git a/streaming/src/main/scala/spark/streaming/dstream/TwitterInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/TwitterInputDStream.scala
similarity index 97%
rename from streaming/src/main/scala/spark/streaming/dstream/TwitterInputDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/TwitterInputDStream.scala
index f09a8b9f90757b44e6c91aa298c499dfaae6fffe..387e15b0e6d809f94fcbe94f3ac00ddd8026ebe8 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/TwitterInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/TwitterInputDStream.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark._
-import spark.streaming._
+import org.apache.spark._
+import org.apache.spark.streaming._
 import storage.StorageLevel
 import twitter4j._
 import twitter4j.auth.Authorization
diff --git a/streaming/src/main/scala/spark/streaming/dstream/UnionDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/UnionDStream.scala
similarity index 91%
rename from streaming/src/main/scala/spark/streaming/dstream/UnionDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/UnionDStream.scala
index 3eaa9a7e7f4c6c469aa6f595bb29f9302a0df72b..c696bb70a8fb6602c6bd688ea61ce6cd76038b3f 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/UnionDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/UnionDStream.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.rdd.RDD
 import collection.mutable.ArrayBuffer
-import spark.rdd.UnionRDD
+import org.apache.spark.rdd.UnionRDD
 
 private[streaming]
 class UnionDStream[T: ClassManifest](parents: Array[DStream[T]])
diff --git a/streaming/src/main/scala/spark/streaming/dstream/WindowedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/WindowedDStream.scala
similarity index 89%
rename from streaming/src/main/scala/spark/streaming/dstream/WindowedDStream.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/dstream/WindowedDStream.scala
index fd24d61730d4fa82db234f8de3ae056025aadf3d..3c5729426902cf407de53e89b4f83693702ffa05 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/WindowedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/WindowedDStream.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
 
-import spark.RDD
-import spark.rdd.UnionRDD
-import spark.storage.StorageLevel
-import spark.streaming.{Duration, Interval, Time, DStream}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.rdd.UnionRDD
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.{Duration, Interval, Time, DStream}
 
 private[streaming]
 class WindowedDStream[T: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/receivers/ActorReceiver.scala b/streaming/src/main/scala/org/apache/spark/streaming/receivers/ActorReceiver.scala
similarity index 97%
rename from streaming/src/main/scala/spark/streaming/receivers/ActorReceiver.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/receivers/ActorReceiver.scala
index abeeff11b93b52b6d5fdc185a342eb7020687a9d..4b5d8c467e64fc1332a131fe385f0798e6262a99 100644
--- a/streaming/src/main/scala/spark/streaming/receivers/ActorReceiver.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/receivers/ActorReceiver.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.streaming.receivers
+package org.apache.spark.streaming.receivers
 
 import akka.actor.{ Actor, PoisonPill, Props, SupervisorStrategy }
 import akka.actor.{ actorRef2Scala, ActorRef }
 import akka.actor.{ PossiblyHarmful, OneForOneStrategy }
 
-import spark.storage.StorageLevel
-import spark.streaming.dstream.NetworkReceiver
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.dstream.NetworkReceiver
 
 import java.util.concurrent.atomic.AtomicInteger
 
diff --git a/streaming/src/main/scala/spark/streaming/receivers/ZeroMQReceiver.scala b/streaming/src/main/scala/org/apache/spark/streaming/receivers/ZeroMQReceiver.scala
similarity index 95%
rename from streaming/src/main/scala/spark/streaming/receivers/ZeroMQReceiver.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/receivers/ZeroMQReceiver.scala
index 22d554e7e4944312195377fa9ca83aa1e0a0e108..043bb8c8bf7959f23c07e6c5d753e4120e2d73d9 100644
--- a/streaming/src/main/scala/spark/streaming/receivers/ZeroMQReceiver.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/receivers/ZeroMQReceiver.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.streaming.receivers
+package org.apache.spark.streaming.receivers
 
 import akka.actor.Actor
 import akka.zeromq._
 
-import spark.Logging
+import org.apache.spark.Logging
 
 /**
  * A receiver to subscribe to ZeroMQ stream.
diff --git a/streaming/src/main/scala/spark/streaming/util/Clock.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/Clock.scala
similarity index 98%
rename from streaming/src/main/scala/spark/streaming/util/Clock.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/util/Clock.scala
index d9ac722df5caebd7c9adbce555941ed943b258fc..f67bb2f6ac51a5bf90fb50ba10ac296ed6e7285f 100644
--- a/streaming/src/main/scala/spark/streaming/util/Clock.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/Clock.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.streaming.util
+package org.apache.spark.streaming.util
 
 private[streaming]
 trait Clock {
diff --git a/streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/MasterFailureTest.scala
similarity index 98%
rename from streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/util/MasterFailureTest.scala
index 8ce5d8daf52dddc8ea15d48475039fdeed12aa8b..69779571266599826a34a02caac4b178df8b9590 100644
--- a/streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/MasterFailureTest.scala
@@ -15,11 +15,12 @@
  * limitations under the License.
  */
 
-package spark.streaming.util
+package org.apache.spark.streaming.util
 
-import spark.{Logging, RDD}
-import spark.streaming._
-import spark.streaming.dstream.ForEachDStream
+import org.apache.spark.Logging
+import org.apache.spark.rdd.RDD
+import org.apache.spark.streaming._
+import org.apache.spark.streaming.dstream.ForEachDStream
 import StreamingContext._
 
 import scala.util.Random
diff --git a/streaming/src/main/scala/spark/streaming/util/RawTextHelper.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextHelper.scala
similarity index 96%
rename from streaming/src/main/scala/spark/streaming/util/RawTextHelper.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/util/RawTextHelper.scala
index bf04120293e17b44967cdc4ea5f4a3cc3d4db577..4e6ce6eabd7ba2fd3753b9f1b3ea72a17e6616a9 100644
--- a/streaming/src/main/scala/spark/streaming/util/RawTextHelper.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextHelper.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming.util
+package org.apache.spark.streaming.util
 
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
 import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap}
 import scala.collection.JavaConversions.mapAsScalaMap
 
diff --git a/streaming/src/main/scala/spark/streaming/util/RawTextSender.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala
similarity index 93%
rename from streaming/src/main/scala/spark/streaming/util/RawTextSender.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala
index 5cc6ad9dee30025e7dbb29b947c0eb0601835c9e..fc8655a0839d0ece4e4cd079e8befc726eb6e3c6 100644
--- a/streaming/src/main/scala/spark/streaming/util/RawTextSender.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala
@@ -15,15 +15,16 @@
  * limitations under the License.
  */
 
-package spark.streaming.util
+package org.apache.spark.streaming.util
 
 import java.nio.ByteBuffer
-import spark.util.{RateLimitedOutputStream, IntParam}
+import org.apache.spark.util.{RateLimitedOutputStream, IntParam}
 import java.net.ServerSocket
-import spark.{Logging, KryoSerializer}
+import org.apache.spark.{Logging}
 import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream
 import scala.io.Source
 import java.io.IOException
+import org.apache.spark.serializer.KryoSerializer
 
 /**
  * A helper program that sends blocks of Kryo-serialized text strings out on a socket at a
diff --git a/streaming/src/main/scala/spark/streaming/util/RecurringTimer.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/RecurringTimer.scala
similarity index 98%
rename from streaming/src/main/scala/spark/streaming/util/RecurringTimer.scala
rename to streaming/src/main/scala/org/apache/spark/streaming/util/RecurringTimer.scala
index 7ecc44236d4168d8375061a780a74b6878b07b34..d644240405caa478f6b838473a8d7f7475615942 100644
--- a/streaming/src/main/scala/spark/streaming/util/RecurringTimer.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/RecurringTimer.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.streaming.util
+package org.apache.spark.streaming.util
 
 private[streaming]
 class RecurringTimer(val clock: Clock, val period: Long, val callback: (Long) => Unit) {
diff --git a/streaming/src/test/java/spark/streaming/JavaAPISuite.java b/streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java
similarity index 98%
rename from streaming/src/test/java/spark/streaming/JavaAPISuite.java
rename to streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java
index 3b93790baa8ecb07c22f24692a5d4ba55e0b80d4..c0d729ff8767373549991e872906a2b63d5fd05d 100644
--- a/streaming/src/test/java/spark/streaming/JavaAPISuite.java
+++ b/streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.streaming;
+package org.apache.spark.streaming;
 
 import com.google.common.base.Optional;
 import com.google.common.collect.Lists;
@@ -28,20 +28,20 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import scala.Tuple2;
-import spark.HashPartitioner;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaRDDLike;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.*;
-import spark.storage.StorageLevel;
-import spark.streaming.api.java.JavaDStream;
-import spark.streaming.api.java.JavaPairDStream;
-import spark.streaming.api.java.JavaStreamingContext;
-import spark.streaming.JavaTestUtils;
-import spark.streaming.JavaCheckpointTestUtils;
-import spark.streaming.InputStreamsSuite;
+import org.apache.spark.HashPartitioner;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaRDDLike;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.*;
+import org.apache.spark.storage.StorageLevel;
+import org.apache.spark.streaming.api.java.JavaDStream;
+import org.apache.spark.streaming.api.java.JavaPairDStream;
+import org.apache.spark.streaming.api.java.JavaStreamingContext;
+import org.apache.spark.streaming.JavaTestUtils;
+import org.apache.spark.streaming.JavaCheckpointTestUtils;
+import org.apache.spark.streaming.InputStreamsSuite;
 
 import java.io.*;
 import java.util.*;
@@ -59,7 +59,7 @@ public class JavaAPISuite implements Serializable {
 
   @Before
   public void setUp() {
-      System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock");
+      System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock");
       ssc = new JavaStreamingContext("local[2]", "test", new Duration(1000));
     ssc.checkpoint("checkpoint");
   }
diff --git a/streaming/src/test/java/spark/streaming/JavaTestUtils.scala b/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala
similarity index 84%
rename from streaming/src/test/java/spark/streaming/JavaTestUtils.scala
rename to streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala
index f9d25db8da669d3052509a460192d0dc68da6c9a..8a6604904de57d39626eac6ec5f2b19405dad833 100644
--- a/streaming/src/test/java/spark/streaming/JavaTestUtils.scala
+++ b/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala
@@ -15,20 +15,21 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
 import collection.mutable.{SynchronizedBuffer, ArrayBuffer}
 import java.util.{List => JList}
-import spark.streaming.api.java.{JavaPairDStream, JavaDStreamLike, JavaDStream, JavaStreamingContext}
-import spark.streaming._
+import org.apache.spark.streaming.api.java.{JavaPairDStream, JavaDStreamLike, JavaDStream, JavaStreamingContext}
+import org.apache.spark.streaming._
 import java.util.ArrayList
 import collection.JavaConversions._
+import org.apache.spark.api.java.JavaRDDLike
 
 /** Exposes streaming test functionality in a Java-friendly way. */
 trait JavaTestBase extends TestSuiteBase {
 
   /**
-   * Create a [[spark.streaming.TestInputStream]] and attach it to the supplied context.
+   * Create a [[org.apache.spark.streaming.TestInputStream]] and attach it to the supplied context.
    * The stream will be derived from the supplied lists of Java objects.
    **/
   def attachTestInputStream[T](
@@ -46,11 +47,11 @@ trait JavaTestBase extends TestSuiteBase {
 
   /**
    * Attach a provided stream to it's associated StreamingContext as a
-   * [[spark.streaming.TestOutputStream]].
+   * [[org.apache.spark.streaming.TestOutputStream]].
    **/
-  def attachTestOutputStream[T, This <: spark.streaming.api.java.JavaDStreamLike[T, This, R],
-      R <: spark.api.java.JavaRDDLike[T, R]](
-    dstream: JavaDStreamLike[T, This, R]) = {
+  def attachTestOutputStream[T, This <: JavaDStreamLike[T, This, R], R <: JavaRDDLike[T, R]](
+    dstream: JavaDStreamLike[T, This, R]) =
+  {
     implicit val cm: ClassManifest[T] =
       implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[T]]
     val ostream = new TestOutputStream(dstream.dstream,
diff --git a/streaming/src/test/scala/spark/streaming/BasicOperationsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala
similarity index 98%
rename from streaming/src/test/scala/spark/streaming/BasicOperationsSuite.scala
rename to streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala
index 67e3e0cd30c57a7544bce9a57c9bef44c0efbd2a..11586f72b6ddb081be0e4bc9e29f989ada9e7876 100644
--- a/streaming/src/test/scala/spark/streaming/BasicOperationsSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.StreamingContext._
 import scala.runtime.RichInt
 import util.ManualClock
 
@@ -26,7 +26,7 @@ class BasicOperationsSuite extends TestSuiteBase {
   override def framework() = "BasicOperationsSuite"
 
   before {
-    System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock")
+    System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock")
   }
 
   after {
diff --git a/streaming/src/test/scala/spark/streaming/CheckpointSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala
similarity index 98%
rename from streaming/src/test/scala/spark/streaming/CheckpointSuite.scala
rename to streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala
index 8c639648f00971278cdd650f9559a5d8547e8e1e..a327de80b3eb3a0f98bcbd2486e0580225b93a8a 100644
--- a/streaming/src/test/scala/spark/streaming/CheckpointSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
 import dstream.FileInputDStream
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.StreamingContext._
 import java.io.File
 import runtime.RichInt
 import org.scalatest.BeforeAndAfter
@@ -36,7 +36,7 @@ import com.google.common.io.Files
  */
 class CheckpointSuite extends TestSuiteBase with BeforeAndAfter {
 
-  System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock")
+  System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock")
 
   before {
     FileUtils.deleteDirectory(new File(checkpointDir))
@@ -63,7 +63,7 @@ class CheckpointSuite extends TestSuiteBase with BeforeAndAfter {
 
     assert(batchDuration === Milliseconds(500), "batchDuration for this test must be 1 second")
 
-    System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock")
+    System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock")
 
     val stateStreamCheckpointInterval = Seconds(1)
 
diff --git a/streaming/src/test/scala/spark/streaming/FailureSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala
similarity index 93%
rename from streaming/src/test/scala/spark/streaming/FailureSuite.scala
rename to streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala
index 7fc649fe2705f409100e70c711ca05d2088c62af..6337c5359c3dcac1d8206d8881b6b28864484522 100644
--- a/streaming/src/test/scala/spark/streaming/FailureSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
-import spark.Logging
-import spark.streaming.util.MasterFailureTest
+import org.apache.spark.Logging
+import org.apache.spark.streaming.util.MasterFailureTest
 import StreamingContext._
 
 import org.scalatest.{FunSuite, BeforeAndAfter}
diff --git a/streaming/src/test/scala/spark/streaming/InputStreamsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
similarity index 97%
rename from streaming/src/test/scala/spark/streaming/InputStreamsSuite.scala
rename to streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
index 1c5419b16dab12780d3924751fa989cda19588b5..42e3e51e3fa15b18de981fe46f40fe999786a276 100644
--- a/streaming/src/test/scala/spark/streaming/InputStreamsSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
 import akka.actor.Actor
 import akka.actor.IO
@@ -29,9 +29,9 @@ import java.io.{File, BufferedWriter, OutputStreamWriter}
 import java.util.concurrent.{TimeUnit, ArrayBlockingQueue}
 import collection.mutable.{SynchronizedBuffer, ArrayBuffer}
 import util.ManualClock
-import spark.storage.StorageLevel
-import spark.streaming.receivers.Receiver
-import spark.Logging
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.receivers.Receiver
+import org.apache.spark.Logging
 import scala.util.Random
 import org.apache.commons.io.FileUtils
 import org.scalatest.BeforeAndAfter
@@ -52,7 +52,7 @@ class InputStreamsSuite extends TestSuiteBase with BeforeAndAfter {
   override def checkpointDir = "checkpoint"
 
   before {
-    System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock")
+    System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock")
   }
 
   after {
@@ -207,7 +207,7 @@ class InputStreamsSuite extends TestSuiteBase with BeforeAndAfter {
     FileUtils.deleteDirectory(testDir)
 
     // Enable manual clock back again for other tests
-    System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock")
+    System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock")
   }
 
 
diff --git a/streaming/src/test/scala/spark/streaming/TestSuiteBase.scala b/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
similarity index 98%
rename from streaming/src/test/scala/spark/streaming/TestSuiteBase.scala
rename to streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
index cb34b5a7cc48a3fded3ba91b0489f448b3a796ce..37dd9c4cc61d29d28f247bbdf9044b80e8fce76b 100644
--- a/streaming/src/test/scala/spark/streaming/TestSuiteBase.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
@@ -15,12 +15,10 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
-import spark.streaming.dstream.{InputDStream, ForEachDStream}
-import spark.streaming.util.ManualClock
-
-import spark.{RDD, Logging}
+import org.apache.spark.streaming.dstream.{InputDStream, ForEachDStream}
+import org.apache.spark.streaming.util.ManualClock
 
 import collection.mutable.ArrayBuffer
 import collection.mutable.SynchronizedBuffer
@@ -29,6 +27,9 @@ import java.io.{ObjectInputStream, IOException}
 
 import org.scalatest.{BeforeAndAfter, FunSuite}
 
+import org.apache.spark.Logging
+import org.apache.spark.rdd.RDD
+
 /**
  * This is a input stream just for the testsuites. This is equivalent to a checkpointable,
  * replayable, reliable message queue like Kafka. It requires a sequence as input, and
diff --git a/streaming/src/test/scala/spark/streaming/WindowOperationsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/WindowOperationsSuite.scala
similarity index 98%
rename from streaming/src/test/scala/spark/streaming/WindowOperationsSuite.scala
rename to streaming/src/test/scala/org/apache/spark/streaming/WindowOperationsSuite.scala
index 894b765fc625f68b458ab4ccd6649829f3720b09..f50e05c0d883b97efbd0a92e7a3b9aedb9526fbb 100644
--- a/streaming/src/test/scala/spark/streaming/WindowOperationsSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/WindowOperationsSuite.scala
@@ -15,14 +15,14 @@
  * limitations under the License.
  */
 
-package spark.streaming
+package org.apache.spark.streaming
 
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.StreamingContext._
 import collection.mutable.ArrayBuffer
 
 class WindowOperationsSuite extends TestSuiteBase {
 
-  System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock")
+  System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock")
 
   override def framework = "WindowOperationsSuite"
 
diff --git a/tools/pom.xml b/tools/pom.xml
index 95b5e80e5b1bc8c9839afc15747aa7940420cb11..77646a68165adea8bb810259f91ff368367f652d 100644
--- a/tools/pom.xml
+++ b/tools/pom.xml
@@ -18,26 +18,26 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <parent>
-    <groupId>org.spark-project</groupId>
+    <groupId>org.apache.spark</groupId>
     <artifactId>spark-parent</artifactId>
     <version>0.8.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
-  <groupId>org.spark-project</groupId>
+  <groupId>org.apache.spark</groupId>
   <artifactId>spark-tools</artifactId>
   <packaging>jar</packaging>
   <name>Spark Project Tools</name>
-  <url>http://spark-project.org/</url>
+  <url>http://spark.incubator.apache.org/</url>
 
   <dependencies>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-core</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.spark-project</groupId>
+      <groupId>org.apache.spark</groupId>
       <artifactId>spark-streaming</artifactId>
       <version>${project.version}</version>
     </dependency>
diff --git a/tools/src/main/scala/spark/tools/JavaAPICompletenessChecker.scala b/tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala
similarity index 64%
rename from tools/src/main/scala/spark/tools/JavaAPICompletenessChecker.scala
rename to tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala
index f45d0b281cf0a1a38def95993c6c6569f00abc42..f824c472aee62bff4b985b1aecf1ca81d0ca109c 100644
--- a/tools/src/main/scala/spark/tools/JavaAPICompletenessChecker.scala
+++ b/tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala
@@ -15,17 +15,17 @@
  * limitations under the License.
  */
 
-package spark.tools
+package org.apache.spark.tools
 
 import java.lang.reflect.Method
 
 import scala.collection.mutable.ArrayBuffer
 
-import spark._
-import spark.api.java._
-import spark.rdd.OrderedRDDFunctions
-import spark.streaming.{PairDStreamFunctions, DStream, StreamingContext}
-import spark.streaming.api.java.{JavaPairDStream, JavaDStream, JavaStreamingContext}
+import org.apache.spark._
+import org.apache.spark.api.java._
+import org.apache.spark.rdd.{RDD, DoubleRDDFunctions, PairRDDFunctions, OrderedRDDFunctions}
+import org.apache.spark.streaming.{PairDStreamFunctions, DStream, StreamingContext}
+import org.apache.spark.streaming.api.java.{JavaPairDStream, JavaDStream, JavaStreamingContext}
 
 
 private[spark] abstract class SparkType(val name: String)
@@ -129,7 +129,7 @@ object JavaAPICompletenessChecker {
       // TODO: the JavaStreamingContext API accepts Array arguments
       // instead of Lists, so this isn't a trivial translation / sub:
       "scala.collection.Seq" -> "java.util.List",
-      "scala.Function2" -> "spark.api.java.function.Function2",
+      "scala.Function2" -> "org.apache.spark.api.java.function.Function2",
       "scala.collection.Iterator" -> "java.util.Iterator",
       "scala.collection.mutable.Queue" -> "java.util.Queue",
       "double" -> "java.lang.Double"
@@ -139,7 +139,7 @@ object JavaAPICompletenessChecker {
       scalaType match {
         case ParameterizedType(name, parameters, typebounds) =>
           name match {
-            case "spark.RDD" =>
+            case "org.apache.spark.rdd.RDD" =>
               if (parameters(0).name == classOf[Tuple2[_, _]].getName) {
                 val tupleParams =
                   parameters(0).asInstanceOf[ParameterizedType].parameters.map(applySubs)
@@ -147,13 +147,13 @@ object JavaAPICompletenessChecker {
               } else {
                 ParameterizedType(classOf[JavaRDD[_]].getName, parameters.map(applySubs))
               }
-            case "spark.streaming.DStream" =>
+            case "org.apache.spark.streaming.DStream" =>
               if (parameters(0).name == classOf[Tuple2[_, _]].getName) {
                 val tupleParams =
                   parameters(0).asInstanceOf[ParameterizedType].parameters.map(applySubs)
-                ParameterizedType("spark.streaming.api.java.JavaPairDStream", tupleParams)
+                ParameterizedType("org.apache.spark.streaming.api.java.JavaPairDStream", tupleParams)
               } else {
-                ParameterizedType("spark.streaming.api.java.JavaDStream",
+                ParameterizedType("org.apache.spark.streaming.api.java.JavaDStream",
                   parameters.map(applySubs))
               }
             case "scala.Option" => {
@@ -167,14 +167,14 @@ object JavaAPICompletenessChecker {
               val firstParamName = parameters.last.name
               if (firstParamName.startsWith("scala.collection.Traversable") ||
                 firstParamName.startsWith("scala.collection.Iterator")) {
-                ParameterizedType("spark.api.java.function.FlatMapFunction",
+                ParameterizedType("org.apache.spark.api.java.function.FlatMapFunction",
                   Seq(parameters(0),
                     parameters.last.asInstanceOf[ParameterizedType].parameters(0)).map(applySubs))
               } else if (firstParamName == "scala.runtime.BoxedUnit") {
-                ParameterizedType("spark.api.java.function.VoidFunction",
+                ParameterizedType("org.apache.spark.api.java.function.VoidFunction",
                   parameters.dropRight(1).map(applySubs))
               } else {
-                ParameterizedType("spark.api.java.function.Function", parameters.map(applySubs))
+                ParameterizedType("org.apache.spark.api.java.function.Function", parameters.map(applySubs))
               }
             case _ =>
               ParameterizedType(renameSubstitutions.getOrElse(name, name),
@@ -211,85 +211,85 @@ object JavaAPICompletenessChecker {
     // This list also includes a few methods that are only used by the web UI or other
     // internal Spark components.
     val excludedNames = Seq(
-      "spark.RDD.origin",
-      "spark.RDD.elementClassManifest",
-      "spark.RDD.checkpointData",
-      "spark.RDD.partitioner",
-      "spark.RDD.partitions",
-      "spark.RDD.firstParent",
-      "spark.RDD.doCheckpoint",
-      "spark.RDD.markCheckpointed",
-      "spark.RDD.clearDependencies",
-      "spark.RDD.getDependencies",
-      "spark.RDD.getPartitions",
-      "spark.RDD.dependencies",
-      "spark.RDD.getPreferredLocations",
-      "spark.RDD.collectPartitions",
-      "spark.RDD.computeOrReadCheckpoint",
-      "spark.PairRDDFunctions.getKeyClass",
-      "spark.PairRDDFunctions.getValueClass",
-      "spark.SparkContext.stringToText",
-      "spark.SparkContext.makeRDD",
-      "spark.SparkContext.runJob",
-      "spark.SparkContext.runApproximateJob",
-      "spark.SparkContext.clean",
-      "spark.SparkContext.metadataCleaner",
-      "spark.SparkContext.ui",
-      "spark.SparkContext.newShuffleId",
-      "spark.SparkContext.newRddId",
-      "spark.SparkContext.cleanup",
-      "spark.SparkContext.receiverJobThread",
-      "spark.SparkContext.getRDDStorageInfo",
-      "spark.SparkContext.addedFiles",
-      "spark.SparkContext.addedJars",
-      "spark.SparkContext.persistentRdds",
-      "spark.SparkContext.executorEnvs",
-      "spark.SparkContext.checkpointDir",
-      "spark.SparkContext.getSparkHome",
-      "spark.SparkContext.executorMemoryRequested",
-      "spark.SparkContext.getExecutorStorageStatus",
-      "spark.streaming.DStream.generatedRDDs",
-      "spark.streaming.DStream.zeroTime",
-      "spark.streaming.DStream.rememberDuration",
-      "spark.streaming.DStream.storageLevel",
-      "spark.streaming.DStream.mustCheckpoint",
-      "spark.streaming.DStream.checkpointDuration",
-      "spark.streaming.DStream.checkpointData",
-      "spark.streaming.DStream.graph",
-      "spark.streaming.DStream.isInitialized",
-      "spark.streaming.DStream.parentRememberDuration",
-      "spark.streaming.DStream.initialize",
-      "spark.streaming.DStream.validate",
-      "spark.streaming.DStream.setContext",
-      "spark.streaming.DStream.setGraph",
-      "spark.streaming.DStream.remember",
-      "spark.streaming.DStream.getOrCompute",
-      "spark.streaming.DStream.generateJob",
-      "spark.streaming.DStream.clearOldMetadata",
-      "spark.streaming.DStream.addMetadata",
-      "spark.streaming.DStream.updateCheckpointData",
-      "spark.streaming.DStream.restoreCheckpointData",
-      "spark.streaming.DStream.isTimeValid",
-      "spark.streaming.StreamingContext.nextNetworkInputStreamId",
-      "spark.streaming.StreamingContext.networkInputTracker",
-      "spark.streaming.StreamingContext.checkpointDir",
-      "spark.streaming.StreamingContext.checkpointDuration",
-      "spark.streaming.StreamingContext.receiverJobThread",
-      "spark.streaming.StreamingContext.scheduler",
-      "spark.streaming.StreamingContext.initialCheckpoint",
-      "spark.streaming.StreamingContext.getNewNetworkStreamId",
-      "spark.streaming.StreamingContext.validate",
-      "spark.streaming.StreamingContext.createNewSparkContext",
-      "spark.streaming.StreamingContext.rddToFileName",
-      "spark.streaming.StreamingContext.getSparkCheckpointDir",
-      "spark.streaming.StreamingContext.env",
-      "spark.streaming.StreamingContext.graph",
-      "spark.streaming.StreamingContext.isCheckpointPresent"
+      "org.apache.spark.rdd.RDD.origin",
+      "org.apache.spark.rdd.RDD.elementClassManifest",
+      "org.apache.spark.rdd.RDD.checkpointData",
+      "org.apache.spark.rdd.RDD.partitioner",
+      "org.apache.spark.rdd.RDD.partitions",
+      "org.apache.spark.rdd.RDD.firstParent",
+      "org.apache.spark.rdd.RDD.doCheckpoint",
+      "org.apache.spark.rdd.RDD.markCheckpointed",
+      "org.apache.spark.rdd.RDD.clearDependencies",
+      "org.apache.spark.rdd.RDD.getDependencies",
+      "org.apache.spark.rdd.RDD.getPartitions",
+      "org.apache.spark.rdd.RDD.dependencies",
+      "org.apache.spark.rdd.RDD.getPreferredLocations",
+      "org.apache.spark.rdd.RDD.collectPartitions",
+      "org.apache.spark.rdd.RDD.computeOrReadCheckpoint",
+      "org.apache.spark.rdd.PairRDDFunctions.getKeyClass",
+      "org.apache.spark.rdd.PairRDDFunctions.getValueClass",
+      "org.apache.spark.SparkContext.stringToText",
+      "org.apache.spark.SparkContext.makeRDD",
+      "org.apache.spark.SparkContext.runJob",
+      "org.apache.spark.SparkContext.runApproximateJob",
+      "org.apache.spark.SparkContext.clean",
+      "org.apache.spark.SparkContext.metadataCleaner",
+      "org.apache.spark.SparkContext.ui",
+      "org.apache.spark.SparkContext.newShuffleId",
+      "org.apache.spark.SparkContext.newRddId",
+      "org.apache.spark.SparkContext.cleanup",
+      "org.apache.spark.SparkContext.receiverJobThread",
+      "org.apache.spark.SparkContext.getRDDStorageInfo",
+      "org.apache.spark.SparkContext.addedFiles",
+      "org.apache.spark.SparkContext.addedJars",
+      "org.apache.spark.SparkContext.persistentRdds",
+      "org.apache.spark.SparkContext.executorEnvs",
+      "org.apache.spark.SparkContext.checkpointDir",
+      "org.apache.spark.SparkContext.getSparkHome",
+      "org.apache.spark.SparkContext.executorMemoryRequested",
+      "org.apache.spark.SparkContext.getExecutorStorageStatus",
+      "org.apache.spark.streaming.DStream.generatedRDDs",
+      "org.apache.spark.streaming.DStream.zeroTime",
+      "org.apache.spark.streaming.DStream.rememberDuration",
+      "org.apache.spark.streaming.DStream.storageLevel",
+      "org.apache.spark.streaming.DStream.mustCheckpoint",
+      "org.apache.spark.streaming.DStream.checkpointDuration",
+      "org.apache.spark.streaming.DStream.checkpointData",
+      "org.apache.spark.streaming.DStream.graph",
+      "org.apache.spark.streaming.DStream.isInitialized",
+      "org.apache.spark.streaming.DStream.parentRememberDuration",
+      "org.apache.spark.streaming.DStream.initialize",
+      "org.apache.spark.streaming.DStream.validate",
+      "org.apache.spark.streaming.DStream.setContext",
+      "org.apache.spark.streaming.DStream.setGraph",
+      "org.apache.spark.streaming.DStream.remember",
+      "org.apache.spark.streaming.DStream.getOrCompute",
+      "org.apache.spark.streaming.DStream.generateJob",
+      "org.apache.spark.streaming.DStream.clearOldMetadata",
+      "org.apache.spark.streaming.DStream.addMetadata",
+      "org.apache.spark.streaming.DStream.updateCheckpointData",
+      "org.apache.spark.streaming.DStream.restoreCheckpointData",
+      "org.apache.spark.streaming.DStream.isTimeValid",
+      "org.apache.spark.streaming.StreamingContext.nextNetworkInputStreamId",
+      "org.apache.spark.streaming.StreamingContext.networkInputTracker",
+      "org.apache.spark.streaming.StreamingContext.checkpointDir",
+      "org.apache.spark.streaming.StreamingContext.checkpointDuration",
+      "org.apache.spark.streaming.StreamingContext.receiverJobThread",
+      "org.apache.spark.streaming.StreamingContext.scheduler",
+      "org.apache.spark.streaming.StreamingContext.initialCheckpoint",
+      "org.apache.spark.streaming.StreamingContext.getNewNetworkStreamId",
+      "org.apache.spark.streaming.StreamingContext.validate",
+      "org.apache.spark.streaming.StreamingContext.createNewSparkContext",
+      "org.apache.spark.streaming.StreamingContext.rddToFileName",
+      "org.apache.spark.streaming.StreamingContext.getSparkCheckpointDir",
+      "org.apache.spark.streaming.StreamingContext.env",
+      "org.apache.spark.streaming.StreamingContext.graph",
+      "org.apache.spark.streaming.StreamingContext.isCheckpointPresent"
     )
     val excludedPatterns = Seq(
-      """^spark\.SparkContext\..*To.*Functions""",
-      """^spark\.SparkContext\..*WritableConverter""",
-      """^spark\.SparkContext\..*To.*Writable"""
+      """^org\.apache\.spark\.SparkContext\..*To.*Functions""",
+      """^org\.apache\.spark\.SparkContext\..*WritableConverter""",
+      """^org\.apache\.spark\.SparkContext\..*To.*Writable"""
     ).map(_.r)
     lazy val excludedByPattern =
       !excludedPatterns.map(_.findFirstIn(name)).filter(_.isDefined).isEmpty
@@ -298,7 +298,7 @@ object JavaAPICompletenessChecker {
 
   private def isExcludedByInterface(method: Method): Boolean = {
     val excludedInterfaces =
-      Set("spark.Logging", "org.apache.hadoop.mapreduce.HadoopMapReduceUtil")
+      Set("org.apache.spark.Logging", "org.apache.hadoop.mapreduce.HadoopMapReduceUtil")
     def toComparisionKey(method: Method) =
       (method.getReturnType, method.getName, method.getGenericReturnType)
     val interfaces = method.getDeclaringClass.getInterfaces.filter { i =>
diff --git a/yarn/pom.xml b/yarn/pom.xml
index 07dd170eae54fcbe58d97cbe50acc78e6814a244..fcacdac9eb861ab319214e087f4be348fc21e0b6 100644
--- a/yarn/pom.xml
+++ b/yarn/pom.xml
@@ -18,17 +18,17 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <parent>
-    <groupId>org.spark-project</groupId>
+    <groupId>org.apache.spark</groupId>
     <artifactId>spark-parent</artifactId>
     <version>0.8.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
-  <groupId>org.spark-project</groupId>
+  <groupId>org.apache.spark</groupId>
   <artifactId>spark-yarn</artifactId>
   <packaging>jar</packaging>
   <name>Spark Project YARN Support</name>
-  <url>http://spark-project.org/</url>
+  <url>http://spark.incubator.apache.org/</url>
 
   <build>
     <outputDirectory>target/scala-${scala.version}/classes</outputDirectory>
@@ -81,7 +81,7 @@
       <id>hadoop2-yarn</id>
       <dependencies>
         <dependency>
-          <groupId>org.spark-project</groupId>
+          <groupId>org.apache.spark</groupId>
           <artifactId>spark-core</artifactId>
           <version>${project.version}</version>
         </dependency>
diff --git a/yarn/src/main/scala/spark/deploy/yarn/ApplicationMaster.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
similarity index 99%
rename from yarn/src/main/scala/spark/deploy/yarn/ApplicationMaster.scala
rename to yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
index 0f3b6bc1a65d0c77e6d84d330529705ff5fd1238..139a977a0311f13384b948bac273c48b5a267254 100644
--- a/yarn/src/main/scala/spark/deploy/yarn/ApplicationMaster.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.yarn
+package org.apache.spark.deploy.yarn
 
 import java.net.Socket
 import java.util.concurrent.CopyOnWriteArrayList
@@ -29,7 +29,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration
 import org.apache.hadoop.yarn.ipc.YarnRPC
 import org.apache.hadoop.yarn.util.{ConverterUtils, Records}
 import scala.collection.JavaConversions._
-import spark.{SparkContext, Logging, Utils}
+import org.apache.spark.{SparkContext, Logging, Utils}
 import org.apache.hadoop.security.UserGroupInformation
 import java.security.PrivilegedExceptionAction
 
diff --git a/yarn/src/main/scala/spark/deploy/yarn/ApplicationMasterArguments.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMasterArguments.scala
similarity index 97%
rename from yarn/src/main/scala/spark/deploy/yarn/ApplicationMasterArguments.scala
rename to yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMasterArguments.scala
index 8de44b1f6617dabc7994acd26ae66c47b5fb3b4d..f47e23b63f4eaaae9f2b54213a1e68ef3af8d6a2 100644
--- a/yarn/src/main/scala/spark/deploy/yarn/ApplicationMasterArguments.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMasterArguments.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.deploy.yarn
+package org.apache.spark.deploy.yarn
 
-import spark.util.IntParam
+import org.apache.spark.util.IntParam
 import collection.mutable.ArrayBuffer
 
 class ApplicationMasterArguments(val args: Array[String]) {
diff --git a/yarn/src/main/scala/spark/deploy/yarn/Client.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
similarity index 99%
rename from yarn/src/main/scala/spark/deploy/yarn/Client.scala
rename to yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
index eb2a8cc6420765c88d9c87957f55f8540080f59f..48e737ed79774a38adc4dd408e38947c5283c74d 100644
--- a/yarn/src/main/scala/spark/deploy/yarn/Client.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.yarn
+package org.apache.spark.deploy.yarn
 
 import java.net.{InetSocketAddress, URI}
 import java.nio.ByteBuffer
@@ -33,10 +33,10 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration
 import org.apache.hadoop.yarn.ipc.YarnRPC
 import scala.collection.mutable.HashMap
 import scala.collection.JavaConversions._
-import spark.{Logging, Utils}
+import org.apache.spark.{Logging, Utils}
 import org.apache.hadoop.yarn.util.{Apps, Records, ConverterUtils}
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment
-import spark.deploy.SparkHadoopUtil
+import org.apache.spark.deploy.SparkHadoopUtil
 
 class Client(conf: Configuration, args: ClientArguments) extends YarnClientImpl with Logging {
   
diff --git a/yarn/src/main/scala/spark/deploy/yarn/ClientArguments.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala
similarity index 95%
rename from yarn/src/main/scala/spark/deploy/yarn/ClientArguments.scala
rename to yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala
index 67aff03781cb40527edcf3f962c34b698896edf9..6cbfadc23be93d609fb349c8ba135038c9f7ac70 100644
--- a/yarn/src/main/scala/spark/deploy/yarn/ClientArguments.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala
@@ -15,12 +15,12 @@
  * limitations under the License.
  */
 
-package spark.deploy.yarn
+package org.apache.spark.deploy.yarn
 
-import spark.util.MemoryParam
-import spark.util.IntParam
+import org.apache.spark.util.MemoryParam
+import org.apache.spark.util.IntParam
 import collection.mutable.{ArrayBuffer, HashMap}
-import spark.scheduler.{InputFormatInfo, SplitInfo}
+import org.apache.spark.scheduler.{InputFormatInfo, SplitInfo}
 
 // TODO: Add code and support for ensuring that yarn resource 'asks' are location aware !
 class ClientArguments(val args: Array[String]) {
diff --git a/yarn/src/main/scala/spark/deploy/yarn/WorkerRunnable.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/WorkerRunnable.scala
similarity index 98%
rename from yarn/src/main/scala/spark/deploy/yarn/WorkerRunnable.scala
rename to yarn/src/main/scala/org/apache/spark/deploy/yarn/WorkerRunnable.scala
index 0e1fd9b680e3987a5f46280af4a7697de25648b1..72dcf7178eb304eedea6fe9738fa901d0c13eb78 100644
--- a/yarn/src/main/scala/spark/deploy/yarn/WorkerRunnable.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/WorkerRunnable.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package spark.deploy.yarn
+package org.apache.spark.deploy.yarn
 
 import java.net.URI
 import java.nio.ByteBuffer
@@ -37,7 +37,7 @@ import org.apache.hadoop.yarn.api.ApplicationConstants.Environment
 import scala.collection.JavaConversions._
 import scala.collection.mutable.HashMap
 
-import spark.{Logging, Utils}
+import org.apache.spark.{Logging, Utils}
 
 class WorkerRunnable(container: Container, conf: Configuration, masterAddress: String,
     slaveId: String, hostname: String, workerMemory: Int, workerCores: Int) 
@@ -119,7 +119,7 @@ class WorkerRunnable(container: Container, conf: Configuration, masterAddress: S
       // TODO: If the OOM is not recoverable by rescheduling it on different node, then do 'something' to fail job ... akin to blacklisting trackers in mapred ?
       " -XX:OnOutOfMemoryError='kill %p' " +
       JAVA_OPTS +
-      " spark.executor.StandaloneExecutorBackend " +
+      " org.apache.spark.executor.StandaloneExecutorBackend " +
       masterAddress + " " +
       slaveId + " " +
       hostname + " " +
diff --git a/yarn/src/main/scala/spark/deploy/yarn/YarnAllocationHandler.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocationHandler.scala
similarity index 99%
rename from yarn/src/main/scala/spark/deploy/yarn/YarnAllocationHandler.scala
rename to yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocationHandler.scala
index b0af8baf08c152293e96cd87aef2cda27e39bba9..26ff214e122420314938f82dd634d719a7d99817 100644
--- a/yarn/src/main/scala/spark/deploy/yarn/YarnAllocationHandler.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocationHandler.scala
@@ -15,13 +15,13 @@
  * limitations under the License.
  */
 
-package spark.deploy.yarn
+package org.apache.spark.deploy.yarn
 
-import spark.{Logging, Utils}
-import spark.scheduler.SplitInfo
+import org.apache.spark.{Logging, Utils}
+import org.apache.spark.scheduler.SplitInfo
 import scala.collection
 import org.apache.hadoop.yarn.api.records.{AMResponse, ApplicationAttemptId, ContainerId, Priority, Resource, ResourceRequest, ContainerStatus, Container}
-import spark.scheduler.cluster.{ClusterScheduler, StandaloneSchedulerBackend}
+import org.apache.spark.scheduler.cluster.{ClusterScheduler, StandaloneSchedulerBackend}
 import org.apache.hadoop.yarn.api.protocolrecords.{AllocateRequest, AllocateResponse}
 import org.apache.hadoop.yarn.util.{RackResolver, Records}
 import java.util.concurrent.{CopyOnWriteArrayList, ConcurrentHashMap}
diff --git a/yarn/src/main/scala/spark/deploy/yarn/YarnSparkHadoopUtil.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
similarity index 95%
rename from yarn/src/main/scala/spark/deploy/yarn/YarnSparkHadoopUtil.scala
rename to yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
index 77c4ee7f3f67f9afec64bc3aa978c09ce43e35be..ca2f1e2565b9a8a7956ffe00e2b453779956853a 100644
--- a/yarn/src/main/scala/spark/deploy/yarn/YarnSparkHadoopUtil.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
@@ -15,9 +15,9 @@
  * limitations under the License.
  */
 
-package spark.deploy.yarn
+package org.apache.spark.deploy.yarn
 
-import spark.deploy.SparkHadoopUtil
+import org.apache.spark.deploy.SparkHadoopUtil
 import collection.mutable.HashMap
 import org.apache.hadoop.mapred.JobConf
 import org.apache.hadoop.security.UserGroupInformation
diff --git a/yarn/src/main/scala/spark/scheduler/cluster/YarnClusterScheduler.scala b/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClusterScheduler.scala
similarity index 93%
rename from yarn/src/main/scala/spark/scheduler/cluster/YarnClusterScheduler.scala
rename to yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClusterScheduler.scala
index bb58353e0cc68e657b20c5e06478d12545fb08ee..3828ddfc4f14afe0d2bd0665b3476c79ad0cb648 100644
--- a/yarn/src/main/scala/spark/scheduler/cluster/YarnClusterScheduler.scala
+++ b/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClusterScheduler.scala
@@ -15,10 +15,10 @@
  * limitations under the License.
  */
 
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
 
-import spark._
-import spark.deploy.yarn.{ApplicationMaster, YarnAllocationHandler}
+import org.apache.spark._
+import org.apache.spark.deploy.yarn.{ApplicationMaster, YarnAllocationHandler}
 import org.apache.hadoop.conf.Configuration
 
 /**