diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index 353fbf8a0b48d..2bfec1dbf0906 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -727,9 +727,6 @@ case class Cast( private[this] def timestampToDouble(ts: Long): Double = { ts / MICROS_PER_SECOND.toDouble } - private[this] def timeToDouble(timeNanos: Long): Double = { - timeNanos / NANOS_PER_SECOND.toDouble - } private[this] def timeToLong(timeNanos: Long): Long = { Math.floorDiv(timeNanos, NANOS_PER_SECOND) } @@ -1043,11 +1040,15 @@ case class Cast( b => toPrecision(if (b) Decimal.ONE else Decimal.ZERO, target, getContextOrNull())) case DateType => buildCast[Int](_, d => null) // date can't cast to decimal in Hive - case TimestampType => - // Note that we lose precision here. - buildCast[Long](_, t => changePrecision(Decimal(timestampToDouble(t)), target)) - case _: TimeType => - buildCast[Long](_, t => changePrecision(Decimal(timeToDouble(t)), target)) + case TimestampType => buildCast[Long](_, t => changePrecision( + // 19 digits is enough to represent any TIMESTAMP value in Long. + // 6 digits of scale is for microseconds precision of TIMESTAMP values. + Decimal.apply(t, 19, 6), target)) + case _: TimeType => buildCast[Long](_, t => changePrecision( + // 14 digits is enough to cover the full range of TIME value [0, 24:00) which is + // [0, 24 * 60 * 60 * 1000 * 1000 * 1000) = [0, 86400000000000). + // 9 digits of scale is for nanoseconds precision of TIME values. + Decimal.apply(t, precision = 14, scale = 9), target)) case dt: DecimalType => b => toPrecision(b.asInstanceOf[Decimal], target, getContextOrNull()) case t: IntegralType => @@ -1506,18 +1507,15 @@ case class Cast( // date can't cast to decimal in Hive (c, evPrim, evNull) => code"$evNull = true;" case TimestampType => - // Note that we lose precision here. (c, evPrim, evNull) => code""" - Decimal $tmp = Decimal.apply( - scala.math.BigDecimal.valueOf(${timestampToDoubleCode(c)})); + Decimal $tmp = Decimal.apply($c, 19, 6); ${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast, ctx)} """ case _: TimeType => (c, evPrim, evNull) => code""" - Decimal $tmp = Decimal.apply( - scala.math.BigDecimal.valueOf(${timeToDoubleCode(c)})); + Decimal $tmp = Decimal.apply($c, 14, 9); ${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast, ctx)} """ case DecimalType() => @@ -1767,8 +1765,6 @@ case class Cast( private[this] def timestampToDoubleCode(ts: ExprValue): Block = code"$ts / (double)$MICROS_PER_SECOND" - private[this] def timeToDoubleCode(ts: ExprValue): Block = - code"$ts / (double)$NANOS_PER_SECOND" private[this] def timeToLongCode(timeValue: ExprValue): Block = code"Math.floorDiv($timeValue, ${NANOS_PER_SECOND}L)" diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastWithAnsiOnSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastWithAnsiOnSuite.scala index db50621a5275c..d9a3e30449838 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastWithAnsiOnSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastWithAnsiOnSuite.scala @@ -810,7 +810,7 @@ class CastWithAnsiOnSuite extends CastSuiteBase with QueryErrorsBase { ), condition = "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION", parameters = Map( - "value" -> "86399.123456", + "value" -> "86399.123456000", "precision" -> "2", "scale" -> "0", "config" -> """"spark.sql.ansi.enabled"""" diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/cast.sql.out index 7ac7b72e43332..ca2f739113f13 100644 --- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out @@ -2305,7 +2305,7 @@ org.apache.spark.SparkArithmeticException "config" : "\"spark.sql.ansi.enabled\"", "precision" : "1", "scale" : "0", - "value" : "60.0" + "value" : "60.000000000" }, "queryContext" : [ { "objectType" : "", @@ -2330,7 +2330,7 @@ org.apache.spark.SparkArithmeticException "config" : "\"spark.sql.ansi.enabled\"", "precision" : "3", "scale" : "0", - "value" : "3600.0" + "value" : "3600.000000000" }, "queryContext" : [ { "objectType" : "", @@ -2355,7 +2355,7 @@ org.apache.spark.SparkArithmeticException "config" : "\"spark.sql.ansi.enabled\"", "precision" : "5", "scale" : "2", - "value" : "36000.0" + "value" : "36000.000000000" }, "queryContext" : [ { "objectType" : "",