Skip to content
Snippets Groups Projects
Commit d4cd3233 authored by Raymond Liu's avatar Raymond Liu
Browse files

Some fixes for previous master merge commits

parent a60620b7
No related branches found
No related tags found
No related merge requests found
...@@ -21,7 +21,6 @@ import java.util.concurrent.TimeoutException ...@@ -21,7 +21,6 @@ import java.util.concurrent.TimeoutException
import scala.concurrent.duration._ import scala.concurrent.duration._
import scala.concurrent.Await import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import akka.actor._ import akka.actor._
import akka.actor.Terminated import akka.actor.Terminated
...@@ -84,6 +83,7 @@ private[spark] class Client( ...@@ -84,6 +83,7 @@ private[spark] class Client(
def registerWithMaster() { def registerWithMaster() {
tryRegisterAllMasters() tryRegisterAllMasters()
import context.dispatcher
var retries = 0 var retries = 0
lazy val retryTimer: Cancellable = lazy val retryTimer: Cancellable =
context.system.scheduler.schedule(REGISTRATION_TIMEOUT, REGISTRATION_TIMEOUT) { context.system.scheduler.schedule(REGISTRATION_TIMEOUT, REGISTRATION_TIMEOUT) {
......
...@@ -24,7 +24,6 @@ import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet} ...@@ -24,7 +24,6 @@ import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import scala.concurrent.Await import scala.concurrent.Await
import scala.concurrent.duration._ import scala.concurrent.duration._
import scala.concurrent.duration.{ Duration, FiniteDuration } import scala.concurrent.duration.{ Duration, FiniteDuration }
import scala.concurrent.ExecutionContext.Implicits.global
import akka.actor._ import akka.actor._
import akka.pattern.ask import akka.pattern.ask
...@@ -58,6 +57,8 @@ import java.util.concurrent.TimeUnit ...@@ -58,6 +57,8 @@ import java.util.concurrent.TimeUnit
private[spark] class Master(host: String, port: Int, webUiPort: Int) extends Actor with Logging { private[spark] class Master(host: String, port: Int, webUiPort: Int) extends Actor with Logging {
import context.dispatcher
val DATE_FORMAT = new SimpleDateFormat("yyyyMMddHHmmss") // For application IDs val DATE_FORMAT = new SimpleDateFormat("yyyyMMddHHmmss") // For application IDs
val WORKER_TIMEOUT = System.getProperty("spark.worker.timeout", "60").toLong * 1000 val WORKER_TIMEOUT = System.getProperty("spark.worker.timeout", "60").toLong * 1000
val RETAINED_APPLICATIONS = System.getProperty("spark.deploy.retainedApplications", "200").toInt val RETAINED_APPLICATIONS = System.getProperty("spark.deploy.retainedApplications", "200").toInt
......
...@@ -23,7 +23,6 @@ import java.io.File ...@@ -23,7 +23,6 @@ import java.io.File
import scala.collection.mutable.HashMap import scala.collection.mutable.HashMap
import scala.concurrent.duration._ import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import akka.actor._ import akka.actor._
import akka.remote.{RemotingLifecycleEvent, AssociationErrorEvent, DisassociatedEvent} import akka.remote.{RemotingLifecycleEvent, AssociationErrorEvent, DisassociatedEvent}
...@@ -61,6 +60,7 @@ private[spark] class Worker( ...@@ -61,6 +60,7 @@ private[spark] class Worker(
masterUrls: Array[String], masterUrls: Array[String],
workDirPath: String = null) workDirPath: String = null)
extends Actor with Logging { extends Actor with Logging {
import context.dispatcher
Utils.checkHost(host, "Expected hostname") Utils.checkHost(host, "Expected hostname")
assert (port > 0) assert (port > 0)
...@@ -175,8 +175,6 @@ private[spark] class Worker( ...@@ -175,8 +175,6 @@ private[spark] class Worker(
retryTimer // start timer retryTimer // start timer
} }
import context.dispatcher
override def receive = { override def receive = {
case RegisteredWorker(masterUrl, masterWebUiUrl) => case RegisteredWorker(masterUrl, masterWebUiUrl) =>
logInfo("Successfully registered with master " + masterUrl) logInfo("Successfully registered with master " + masterUrl)
......
...@@ -227,6 +227,7 @@ object SparkBuild extends Build { ...@@ -227,6 +227,7 @@ object SparkBuild extends Build {
"org.apache.hadoop" % "hadoop-client" % hadoopVersion excludeAll(excludeJackson, excludeNetty, excludeAsm, excludeCglib), "org.apache.hadoop" % "hadoop-client" % hadoopVersion excludeAll(excludeJackson, excludeNetty, excludeAsm, excludeCglib),
"org.apache.avro" % "avro" % "1.7.4", "org.apache.avro" % "avro" % "1.7.4",
"org.apache.avro" % "avro-ipc" % "1.7.4" excludeAll(excludeNetty), "org.apache.avro" % "avro-ipc" % "1.7.4" excludeAll(excludeNetty),
"org.apache.zookeeper" % "zookeeper" % "3.4.5" excludeAll(excludeNetty),
"com.codahale.metrics" % "metrics-core" % "3.0.0", "com.codahale.metrics" % "metrics-core" % "3.0.0",
"com.codahale.metrics" % "metrics-jvm" % "3.0.0", "com.codahale.metrics" % "metrics-jvm" % "3.0.0",
"com.codahale.metrics" % "metrics-json" % "3.0.0", "com.codahale.metrics" % "metrics-json" % "3.0.0",
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment