@@ -479,23 +479,27 @@ class Expression(google.protobuf.message.Message):
479
479
def element_type (self ) -> pyspark .sql .connect .proto .types_pb2 .DataType :
480
480
"""(Deprecated) The element type of the array.
481
481
482
- This field is deprecated since Spark 4.1+ and should only be set
483
- if the data_type field is not set. Use data_type field instead.
482
+ This field is deprecated since Spark 4.1+. Use data_type field instead.
484
483
"""
485
484
@property
486
485
def elements (
487
486
self ,
488
487
) -> google .protobuf .internal .containers .RepeatedCompositeFieldContainer [
489
488
global___Expression .Literal
490
489
]:
491
- """The literal values that make up the array elements."""
490
+ """The literal values that make up the array elements.
491
+
492
+ For inferring the data_type.element_type, only the first element needs to
493
+ contain the type information.
494
+ """
492
495
@property
493
496
def data_type (self ) -> pyspark .sql .connect .proto .types_pb2 .DataType .Array :
494
- """The type of the array.
497
+ """The type of the array. You don't need to set this field if the type information is not needed.
495
498
496
499
If the element type can be inferred from the first element of the elements field,
497
- then you don't need to set data_type.element_type to save space. On the other hand,
498
- redundant type information is also acceptable.
500
+ then you don't need to set data_type.element_type to save space.
501
+
502
+ On the other hand, redundant type information is also acceptable.
499
503
"""
500
504
def __init__ (
501
505
self ,
@@ -534,8 +538,7 @@ class Expression(google.protobuf.message.Message):
534
538
def key_type (self ) -> pyspark .sql .connect .proto .types_pb2 .DataType :
535
539
"""(Deprecated) The key type of the map.
536
540
537
- This field is deprecated since Spark 4.1+ and should only be set
538
- if the data_type field is not set. Use data_type field instead.
541
+ This field is deprecated since Spark 4.1+. Use data_type field instead.
539
542
"""
540
543
@property
541
544
def value_type (self ) -> pyspark .sql .connect .proto .types_pb2 .DataType :
@@ -550,20 +553,29 @@ class Expression(google.protobuf.message.Message):
550
553
) -> google .protobuf .internal .containers .RepeatedCompositeFieldContainer [
551
554
global___Expression .Literal
552
555
]:
553
- """The literal keys that make up the map."""
556
+ """The literal keys that make up the map.
557
+
558
+ For inferring the data_type.key_type, only the first key needs to
559
+ contain the type information.
560
+ """
554
561
@property
555
562
def values (
556
563
self ,
557
564
) -> google .protobuf .internal .containers .RepeatedCompositeFieldContainer [
558
565
global___Expression .Literal
559
566
]:
560
- """The literal values that make up the map."""
567
+ """The literal values that make up the map.
568
+
569
+ For inferring the data_type.value_type, only the first value needs to
570
+ contain the type information.
571
+ """
561
572
@property
562
573
def data_type (self ) -> pyspark .sql .connect .proto .types_pb2 .DataType .Map :
563
- """The type of the map.
574
+ """The type of the map. You don't need to set this field if the type information is not needed.
564
575
565
576
If the key/value types can be inferred from the first element of the keys/values fields,
566
577
then you don't need to set data_type.key_type/data_type.value_type to save space.
578
+
567
579
On the other hand, redundant type information is also acceptable.
568
580
"""
569
581
def __init__ (
@@ -608,8 +620,7 @@ class Expression(google.protobuf.message.Message):
608
620
"""(Deprecated) The type of the struct.
609
621
610
622
This field is deprecated since Spark 4.1+ because using DataType as the type of a struct
611
- is ambiguous. This field should only be set if the data_type_struct field is not set.
612
- Use data_type_struct field instead.
623
+ is ambiguous. Use data_type_struct field instead.
613
624
"""
614
625
@property
615
626
def elements (
@@ -620,7 +631,7 @@ class Expression(google.protobuf.message.Message):
620
631
"""(Required) The literal values that make up the struct elements."""
621
632
@property
622
633
def data_type_struct (self ) -> pyspark .sql .connect .proto .types_pb2 .DataType .Struct :
623
- """The type of the struct.
634
+ """The type of the struct. You don't need to set this field if the type information is not needed.
624
635
625
636
Whether data_type_struct.fields.data_type should be set depends on
626
637
whether each field's type can be inferred from the elements field.
0 commit comments