diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
index f3b5455574d1ad752a9040d4181f2a25588001ca..cef92abbdc586485bce1360676d0b7edf8db923a 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
@@ -37,7 +37,7 @@ private[sql] object JDBCRDD extends Logging {
    * @param sqlType - A field of java.sql.Types
    * @return The Catalyst type corresponding to sqlType.
    */
-  private def getCatalystType(sqlType: Int): DataType = {
+  private def getCatalystType(sqlType: Int, precision: Int, scale: Int): DataType = {
     val answer = sqlType match {
       case java.sql.Types.ARRAY         => null
       case java.sql.Types.BIGINT        => LongType
@@ -49,6 +49,8 @@ private[sql] object JDBCRDD extends Logging {
       case java.sql.Types.CLOB          => StringType
       case java.sql.Types.DATALINK      => null
       case java.sql.Types.DATE          => DateType
+      case java.sql.Types.DECIMAL
+        if precision != 0 || scale != 0 => DecimalType(precision, scale)
       case java.sql.Types.DECIMAL       => DecimalType.Unlimited
       case java.sql.Types.DISTINCT      => null
       case java.sql.Types.DOUBLE        => DoubleType
@@ -61,6 +63,8 @@ private[sql] object JDBCRDD extends Logging {
       case java.sql.Types.NCHAR         => StringType
       case java.sql.Types.NCLOB         => StringType
       case java.sql.Types.NULL          => null
+      case java.sql.Types.NUMERIC
+        if precision != 0 || scale != 0 => DecimalType(precision, scale)
       case java.sql.Types.NUMERIC       => DecimalType.Unlimited
       case java.sql.Types.NVARCHAR      => StringType
       case java.sql.Types.OTHER         => null
@@ -109,10 +113,11 @@ private[sql] object JDBCRDD extends Logging {
           val dataType = rsmd.getColumnType(i + 1)
           val typeName = rsmd.getColumnTypeName(i + 1)
           val fieldSize = rsmd.getPrecision(i + 1)
+          val fieldScale = rsmd.getScale(i + 1)
           val nullable = rsmd.isNullable(i + 1) != ResultSetMetaData.columnNoNulls
           val metadata = new MetadataBuilder().putString("name", columnName)
           var columnType = quirks.getCatalystType(dataType, typeName, fieldSize, metadata)
-          if (columnType == null) columnType = getCatalystType(dataType)
+          if (columnType == null) columnType = getCatalystType(dataType, fieldSize, fieldScale)
           fields(i) = StructField(columnName, columnType, nullable, metadata.build())
           i = i + 1
         }
@@ -307,6 +312,7 @@ private[sql] class JDBCRDD(
       case BooleanType           => BooleanConversion
       case DateType              => DateConversion
       case DecimalType.Unlimited => DecimalConversion
+      case DecimalType.Fixed(d)  => DecimalConversion
       case DoubleType            => DoubleConversion
       case FloatType             => FloatConversion
       case IntegerType           => IntegerConversion
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index 856a806781ec42b7232f20814c2a8e8359e75cdd..b165ab2b1deb3868d4fcfc19e28c748df58ba770 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -22,6 +22,7 @@ import java.sql.DriverManager
 import java.util.{Calendar, GregorianCalendar, Properties}
 
 import org.apache.spark.sql.test._
+import org.apache.spark.sql.types._
 import org.h2.jdbc.JdbcSQLException
 import org.scalatest.{FunSuite, BeforeAndAfter}
 import TestSQLContext._
@@ -271,6 +272,7 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
     assert(rows(0).getDouble(1) === 1.00000011920928955) // Yes, I meant ==.
     assert(rows(0).getAs[BigDecimal](2)
         .equals(new BigDecimal("123456789012345.54321543215432100000")))
+    assert(rows(0).schema.fields(2).dataType === DecimalType(40, 20))
   }
 
   test("SQL query as table name") {