diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/AbstractParams.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/AbstractParams.scala index ae6057758d6fcc53d307fe8175632234641c1346..8985c8565c5317e039a1d4a6b1e9059f49dd1fce 100644 --- a/examples/src/main/scala/org/apache/spark/examples/mllib/AbstractParams.scala +++ b/examples/src/main/scala/org/apache/spark/examples/mllib/AbstractParams.scala @@ -38,7 +38,7 @@ abstract class AbstractParams[T: TypeTag] { */ override def toString: String = { val tpe = tag.tpe - val allAccessors = tpe.declarations.collect { + val allAccessors = tpe.decls.collect { case m: MethodSymbol if m.isCaseAccessor => m } val mirror = runtimeMirror(getClass.getClassLoader) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala index 02cb2d9a2b118ebebd309805270ce2615bddf31f..4f1911c77905aadea5593e5e81e995c776d23e17 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala @@ -774,9 +774,9 @@ trait ScalaReflection { } protected def constructParams(tpe: Type): Seq[Symbol] = { - val constructorSymbol = tpe.member(nme.CONSTRUCTOR) + val constructorSymbol = tpe.member(termNames.CONSTRUCTOR) val params = if (constructorSymbol.isMethod) { - constructorSymbol.asMethod.paramss + constructorSymbol.asMethod.paramLists } else { // Find the primary constructor, and use its parameter ordering. val primaryConstructorSymbol: Option[Symbol] = constructorSymbol.asTerm.alternatives.find( @@ -784,7 +784,7 @@ trait ScalaReflection { if (primaryConstructorSymbol.isEmpty) { sys.error("Internal SQL error: Product object did not have a primary constructor.") } else { - primaryConstructorSymbol.get.asMethod.paramss + primaryConstructorSymbol.get.asMethod.paramLists } } params.flatten diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/package.scala index 41128fe389d464a9be818446175ab53b8e05f919..382c718f2d7ebb4d0089a41389d1315023573a15 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/package.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/package.scala @@ -51,7 +51,7 @@ package object codegen { val classLoader = generatedClass .getClassLoader - .asInstanceOf[scala.tools.nsc.interpreter.AbstractFileClassLoader] + .asInstanceOf[scala.reflect.internal.util.AbstractFileClassLoader] val generatedBytes = classLoader.classBytes(generatedClass.getName) val packageDir = new java.io.File(dumpDirectory, generatedClass.getPackage.getName) diff --git a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala index a947fac1d751df9ca6983259bf9215d28b18f0be..3eed6aee65bb9eed5b17a978f623a1d604b2ec88 100644 --- a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala +++ b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala @@ -44,10 +44,10 @@ object GenerateMIMAIgnore { private def isDeveloperApi(sym: unv.Symbol) = - sym.annotations.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.DeveloperApi]) + sym.annotations.exists(_.tree.tpe =:= unv.typeOf[org.apache.spark.annotation.DeveloperApi]) private def isExperimental(sym: unv.Symbol) = - sym.annotations.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.Experimental]) + sym.annotations.exists(_.tree.tpe =:= unv.typeOf[org.apache.spark.annotation.Experimental]) private def isPackagePrivate(sym: unv.Symbol) = diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMasterArguments.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMasterArguments.scala index 5af3941c6023e31fdb15896994c882d9fc40b072..87f50875e366f2efbbd1bb8d292f9317386cc664 100644 --- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMasterArguments.scala +++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMasterArguments.scala @@ -87,7 +87,7 @@ class ApplicationMasterArguments(val args: Array[String]) { System.exit(-1) } - userArgs = userArgsBuffer.readOnly + userArgs = userArgsBuffer.toList } def printUsageAndExit(exitCode: Int, unknownParam: Any = null) {