diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index bf449afae695fbead432956090d9fcf6f5340b25..153eee3bc5889e0a3b7998b8192c01b3327c6e7e 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -381,16 +381,19 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) { object SparkSubmitArguments { /** Load properties present in the given file. */ def getPropertiesFromFile(file: File): Seq[(String, String)] = { - require(file.exists(), s"Properties file ${file.getName} does not exist") + require(file.exists(), s"Properties file $file does not exist") + require(file.isFile(), s"Properties file $file is not a normal file") val inputStream = new FileInputStream(file) - val properties = new Properties() try { + val properties = new Properties() properties.load(inputStream) + properties.stringPropertyNames().toSeq.map(k => (k, properties(k).trim)) } catch { case e: IOException => - val message = s"Failed when loading Spark properties file ${file.getName}" + val message = s"Failed when loading Spark properties file $file" throw new SparkException(message, e) + } finally { + inputStream.close() } - properties.stringPropertyNames().toSeq.map(k => (k, properties(k).trim)) } }