Skip to content

Commit fac4433

Browse files
dengzimingMaxGekk
authored andcommitted
[SPARK-52779][SQL][CONNECT] Support TimeType literal in Connect
### What changes were proposed in this pull request? Support TimeType literal in Spark Connect. ### Why are the changes needed? Part of SPARK-51162 to introduce TimeType ### Does this PR introduce _any_ user-facing change? Yes, TimeType literals are supported in Connect, we can use functions such as `org.apache.spark.sql.functions.lit()` to pass a time literal. ### How was this patch tested? Added some unit tests. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #51462 from dengziming/SPARK-51162. Authored-by: dengziming <[email protected]> Signed-off-by: Max Gekk <[email protected]>
1 parent 5cd50eb commit fac4433

File tree

13 files changed

+226
-66
lines changed

13 files changed

+226
-66
lines changed

python/pyspark/sql/connect/proto/expressions_pb2.py

Lines changed: 65 additions & 63 deletions
Large diffs are not rendered by default.

python/pyspark/sql/connect/proto/expressions_pb2.pyi

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -654,6 +654,36 @@ class Expression(google.protobuf.message.Message):
654654
| None
655655
): ...
656656

657+
class Time(google.protobuf.message.Message):
658+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
659+
660+
NANO_FIELD_NUMBER: builtins.int
661+
PRECISION_FIELD_NUMBER: builtins.int
662+
nano: builtins.int
663+
precision: builtins.int
664+
"""The precision of this time, if omitted, uses the default value of MICROS_PRECISION."""
665+
def __init__(
666+
self,
667+
*,
668+
nano: builtins.int = ...,
669+
precision: builtins.int | None = ...,
670+
) -> None: ...
671+
def HasField(
672+
self,
673+
field_name: typing_extensions.Literal[
674+
"_precision", b"_precision", "precision", b"precision"
675+
],
676+
) -> builtins.bool: ...
677+
def ClearField(
678+
self,
679+
field_name: typing_extensions.Literal[
680+
"_precision", b"_precision", "nano", b"nano", "precision", b"precision"
681+
],
682+
) -> None: ...
683+
def WhichOneof(
684+
self, oneof_group: typing_extensions.Literal["_precision", b"_precision"]
685+
) -> typing_extensions.Literal["precision"] | None: ...
686+
657687
NULL_FIELD_NUMBER: builtins.int
658688
BINARY_FIELD_NUMBER: builtins.int
659689
BOOLEAN_FIELD_NUMBER: builtins.int
@@ -675,6 +705,7 @@ class Expression(google.protobuf.message.Message):
675705
MAP_FIELD_NUMBER: builtins.int
676706
STRUCT_FIELD_NUMBER: builtins.int
677707
SPECIALIZED_ARRAY_FIELD_NUMBER: builtins.int
708+
TIME_FIELD_NUMBER: builtins.int
678709
@property
679710
def null(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
680711
binary: builtins.bytes
@@ -706,6 +737,8 @@ class Expression(google.protobuf.message.Message):
706737
def struct(self) -> global___Expression.Literal.Struct: ...
707738
@property
708739
def specialized_array(self) -> global___Expression.Literal.SpecializedArray: ...
740+
@property
741+
def time(self) -> global___Expression.Literal.Time: ...
709742
def __init__(
710743
self,
711744
*,
@@ -730,6 +763,7 @@ class Expression(google.protobuf.message.Message):
730763
map: global___Expression.Literal.Map | None = ...,
731764
struct: global___Expression.Literal.Struct | None = ...,
732765
specialized_array: global___Expression.Literal.SpecializedArray | None = ...,
766+
time: global___Expression.Literal.Time | None = ...,
733767
) -> None: ...
734768
def HasField(
735769
self,
@@ -772,6 +806,8 @@ class Expression(google.protobuf.message.Message):
772806
b"string",
773807
"struct",
774808
b"struct",
809+
"time",
810+
b"time",
775811
"timestamp",
776812
b"timestamp",
777813
"timestamp_ntz",
@@ -821,6 +857,8 @@ class Expression(google.protobuf.message.Message):
821857
b"string",
822858
"struct",
823859
b"struct",
860+
"time",
861+
b"time",
824862
"timestamp",
825863
b"timestamp",
826864
"timestamp_ntz",
@@ -854,6 +892,7 @@ class Expression(google.protobuf.message.Message):
854892
"map",
855893
"struct",
856894
"specialized_array",
895+
"time",
857896
]
858897
| None
859898
): ...

sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/PlanGenerationTestSuite.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3319,6 +3319,7 @@ class PlanGenerationTestSuite
33193319
fn.lit(java.sql.Date.valueOf("2023-02-23")),
33203320
fn.lit(java.time.Duration.ofSeconds(200L)),
33213321
fn.lit(java.time.Period.ofDays(100)),
3322+
fn.lit(java.time.LocalTime.of(23, 59, 59, 999999999)),
33223323
fn.lit(new CalendarInterval(2, 20, 100L)))
33233324
}
33243325

@@ -3389,6 +3390,7 @@ class PlanGenerationTestSuite
33893390
fn.typedLit(java.sql.Date.valueOf("2023-02-23")),
33903391
fn.typedLit(java.time.Duration.ofSeconds(200L)),
33913392
fn.typedLit(java.time.Period.ofDays(100)),
3393+
fn.typedLit(java.time.LocalTime.of(23, 59, 59, 999999999)),
33923394
fn.typedLit(new CalendarInterval(2, 20, 100L)),
33933395

33943396
// Handle parameterized scala types e.g.: List, Seq and Map.

sql/connect/common/src/main/protobuf/spark/connect/expressions.proto

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -195,6 +195,7 @@ message Expression {
195195
Struct struct = 24;
196196

197197
SpecializedArray specialized_array = 25;
198+
Time time = 26;
198199
}
199200

200201
message Decimal {
@@ -240,6 +241,12 @@ message Expression {
240241
Strings strings = 6;
241242
}
242243
}
244+
245+
message Time {
246+
int64 nano = 1;
247+
// The precision of this time, if omitted, uses the default value of MICROS_PRECISION.
248+
optional int32 precision = 2;
249+
}
243250
}
244251

245252
// An unresolved attribute that is not explicitly bound to a specific column, but the column

sql/connect/common/src/main/scala/org/apache/spark/sql/connect/common/LiteralValueProtoConverter.scala

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,11 @@ object LiteralValueProtoConverter {
9696
case v: Date => builder.setDate(SparkDateTimeUtils.fromJavaDate(v))
9797
case v: Duration => builder.setDayTimeInterval(SparkIntervalUtils.durationToMicros(v))
9898
case v: Period => builder.setYearMonthInterval(SparkIntervalUtils.periodToMonths(v))
99+
case v: LocalTime =>
100+
builder.setTime(
101+
builder.getTimeBuilder
102+
.setNano(SparkDateTimeUtils.localTimeToNanos(v))
103+
.setPrecision(TimeType.DEFAULT_PRECISION))
99104
case v: Array[_] => builder.setArray(arrayBuilder(v))
100105
case v: CalendarInterval =>
101106
builder.setCalendarInterval(calendarIntervalBuilder(v.months, v.days, v.microseconds))
@@ -181,6 +186,11 @@ object LiteralValueProtoConverter {
181186
} else {
182187
builder.setNull(toConnectProtoType(dataType))
183188
}
189+
case (v: LocalTime, timeType: TimeType) =>
190+
builder.setTime(
191+
builder.getTimeBuilder
192+
.setNano(SparkDateTimeUtils.localTimeToNanos(v))
193+
.setPrecision(timeType.precision))
184194
case _ => toLiteralProtoBuilder(literal)
185195
}
186196
}
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
Project [id#0L, id#0L, true AS true#0, 68 AS 68#0, 9872 AS 9872#0, -8726532 AS -8726532#0, 7834609328726532 AS 7834609328726532#0L, 2.718281828459045 AS 2.718281828459045#0, -0.8 AS -0.8#0, 89.97620 AS 89.97620#0, 89889.7667231 AS 89889.7667231#0, connect! AS connect!#0, T AS T#0, ABCDEFGHIJ AS ABCDEFGHIJ#0, 0x78797A7B7C7D7E7F808182838485868788898A8B8C8D8E AS X'78797A7B7C7D7E7F808182838485868788898A8B8C8D8E'#0, 0x0806 AS X'0806'#0, [8,6] AS ARRAY(8, 6)#0, null AS NULL#0, 2020-10-10 AS DATE '2020-10-10'#0, 8.997620 AS 8.997620#0, 2023-02-23 04:31:59.808 AS TIMESTAMP '2023-02-23 04:31:59.808'#0, 1969-12-31 16:00:12.345 AS TIMESTAMP '1969-12-31 16:00:12.345'#0, 2023-02-23 20:36:00 AS TIMESTAMP_NTZ '2023-02-23 20:36:00'#0, 2023-02-23 AS DATE '2023-02-23'#0, INTERVAL '0 00:03:20' DAY TO SECOND AS INTERVAL '0 00:03:20' DAY TO SECOND#0, ... 2 more fields]
1+
Project [id#0L, id#0L, true AS true#0, 68 AS 68#0, 9872 AS 9872#0, -8726532 AS -8726532#0, 7834609328726532 AS 7834609328726532#0L, 2.718281828459045 AS 2.718281828459045#0, -0.8 AS -0.8#0, 89.97620 AS 89.97620#0, 89889.7667231 AS 89889.7667231#0, connect! AS connect!#0, T AS T#0, ABCDEFGHIJ AS ABCDEFGHIJ#0, 0x78797A7B7C7D7E7F808182838485868788898A8B8C8D8E AS X'78797A7B7C7D7E7F808182838485868788898A8B8C8D8E'#0, 0x0806 AS X'0806'#0, [8,6] AS ARRAY(8, 6)#0, null AS NULL#0, 2020-10-10 AS DATE '2020-10-10'#0, 8.997620 AS 8.997620#0, 2023-02-23 04:31:59.808 AS TIMESTAMP '2023-02-23 04:31:59.808'#0, 1969-12-31 16:00:12.345 AS TIMESTAMP '1969-12-31 16:00:12.345'#0, 2023-02-23 20:36:00 AS TIMESTAMP_NTZ '2023-02-23 20:36:00'#0, 2023-02-23 AS DATE '2023-02-23'#0, INTERVAL '0 00:03:20' DAY TO SECOND AS INTERVAL '0 00:03:20' DAY TO SECOND#0, ... 3 more fields]
22
+- LocalRelation <empty>, [id#0L, a#0, b#0]
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
Project [id#0L, id#0L, 1 AS 1#0, null AS NULL#0, true AS true#0, 68 AS 68#0, 9872 AS 9872#0, -8726532 AS -8726532#0, 7834609328726532 AS 7834609328726532#0L, 2.718281828459045 AS 2.718281828459045#0, -0.8 AS -0.8#0, 89.97620 AS 89.97620#0, 89889.7667231 AS 89889.7667231#0, connect! AS connect!#0, T AS T#0, ABCDEFGHIJ AS ABCDEFGHIJ#0, 0x78797A7B7C7D7E7F808182838485868788898A8B8C8D8E AS X'78797A7B7C7D7E7F808182838485868788898A8B8C8D8E'#0, 0x0806 AS X'0806'#0, [8,6] AS ARRAY(8, 6)#0, null AS NULL#0, 2020-10-10 AS DATE '2020-10-10'#0, 8.997620 AS 8.997620#0, 2023-02-23 04:31:59.808 AS TIMESTAMP '2023-02-23 04:31:59.808'#0, 1969-12-31 16:00:12.345 AS TIMESTAMP '1969-12-31 16:00:12.345'#0, 2023-02-23 20:36:00 AS TIMESTAMP_NTZ '2023-02-23 20:36:00'#0, ... 18 more fields]
1+
Project [id#0L, id#0L, 1 AS 1#0, null AS NULL#0, true AS true#0, 68 AS 68#0, 9872 AS 9872#0, -8726532 AS -8726532#0, 7834609328726532 AS 7834609328726532#0L, 2.718281828459045 AS 2.718281828459045#0, -0.8 AS -0.8#0, 89.97620 AS 89.97620#0, 89889.7667231 AS 89889.7667231#0, connect! AS connect!#0, T AS T#0, ABCDEFGHIJ AS ABCDEFGHIJ#0, 0x78797A7B7C7D7E7F808182838485868788898A8B8C8D8E AS X'78797A7B7C7D7E7F808182838485868788898A8B8C8D8E'#0, 0x0806 AS X'0806'#0, [8,6] AS ARRAY(8, 6)#0, null AS NULL#0, 2020-10-10 AS DATE '2020-10-10'#0, 8.997620 AS 8.997620#0, 2023-02-23 04:31:59.808 AS TIMESTAMP '2023-02-23 04:31:59.808'#0, 1969-12-31 16:00:12.345 AS TIMESTAMP '1969-12-31 16:00:12.345'#0, 2023-02-23 20:36:00 AS TIMESTAMP_NTZ '2023-02-23 20:36:00'#0, ... 19 more fields]
22
+- LocalRelation <empty>, [id#0L, a#0, b#0]

sql/connect/common/src/test/resources/query-tests/queries/function_lit.json

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -582,6 +582,30 @@
582582
}
583583
}
584584
}
585+
}, {
586+
"literal": {
587+
"time": {
588+
"nano": "86399999999999",
589+
"precision": 6
590+
}
591+
},
592+
"common": {
593+
"origin": {
594+
"jvmOrigin": {
595+
"stackTrace": [{
596+
"classLoaderName": "app",
597+
"declaringClass": "org.apache.spark.sql.functions$",
598+
"methodName": "lit",
599+
"fileName": "functions.scala"
600+
}, {
601+
"classLoaderName": "app",
602+
"declaringClass": "org.apache.spark.sql.PlanGenerationTestSuite",
603+
"methodName": "~~trimmed~anonfun~~",
604+
"fileName": "PlanGenerationTestSuite.scala"
605+
}]
606+
}
607+
}
608+
}
585609
}, {
586610
"literal": {
587611
"calendarInterval": {
195 Bytes
Binary file not shown.

sql/connect/common/src/test/resources/query-tests/queries/function_typedLit.json

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -627,6 +627,30 @@
627627
}
628628
}
629629
}
630+
}, {
631+
"literal": {
632+
"time": {
633+
"nano": "86399999999999",
634+
"precision": 6
635+
}
636+
},
637+
"common": {
638+
"origin": {
639+
"jvmOrigin": {
640+
"stackTrace": [{
641+
"classLoaderName": "app",
642+
"declaringClass": "org.apache.spark.sql.functions$",
643+
"methodName": "typedLit",
644+
"fileName": "functions.scala"
645+
}, {
646+
"classLoaderName": "app",
647+
"declaringClass": "org.apache.spark.sql.PlanGenerationTestSuite",
648+
"methodName": "~~trimmed~anonfun~~",
649+
"fileName": "PlanGenerationTestSuite.scala"
650+
}]
651+
}
652+
}
653+
}
630654
}, {
631655
"literal": {
632656
"calendarInterval": {

0 commit comments

Comments
 (0)