Skip to content

Commit 37d61e7

Browse files
committed
[SPARK-54043] Update Spark Connect-generated Swift source code with 4.1.0-preview3 RC1
1 parent 8816b7c commit 37d61e7

File tree

11 files changed

+1959
-256
lines changed

11 files changed

+1959
-256
lines changed

Sources/SparkConnect/ArrowData.swift

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -39,19 +39,19 @@ public class ArrowData {
3939
) throws {
4040
let infoType = arrowType.info
4141
switch infoType {
42-
case let .primitiveInfo(typeId):
42+
case .primitiveInfo(let typeId):
4343
if typeId == ArrowTypeId.unknown {
4444
throw ArrowError.unknownType("Unknown primitive type for data")
4545
}
46-
case let .variableInfo(typeId):
46+
case .variableInfo(let typeId):
4747
if typeId == ArrowTypeId.unknown {
4848
throw ArrowError.unknownType("Unknown variable type for data")
4949
}
50-
case let .timeInfo(typeId):
50+
case .timeInfo(let typeId):
5151
if typeId == ArrowTypeId.unknown {
5252
throw ArrowError.unknownType("Unknown time type for data")
5353
}
54-
case let .complexInfo(typeId):
54+
case .complexInfo(let typeId):
5555
if typeId == ArrowTypeId.unknown {
5656
throw ArrowError.unknownType("Unknown complex type for data")
5757
}

Sources/SparkConnect/Extension.swift

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -182,14 +182,15 @@ extension String {
182182
}
183183
}
184184

185-
var toDatasetType: DatasetType {
185+
var toOutputType: OutputType {
186186
let mode =
187187
switch self {
188-
case "unspecified": DatasetType.unspecified
189-
case "materializedView": DatasetType.materializedView
190-
case "table": DatasetType.table
191-
case "temporaryView": DatasetType.temporaryView
192-
default: DatasetType.UNRECOGNIZED(-1)
188+
case "unspecified": OutputType.unspecified
189+
case "materializedView": OutputType.materializedView
190+
case "table": OutputType.table
191+
case "temporaryView": OutputType.temporaryView
192+
case "sink": OutputType.sink
193+
default: OutputType.UNRECOGNIZED(-1)
193194
}
194195
return mode
195196
}

Sources/SparkConnect/SparkConnectClient.swift

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -145,8 +145,8 @@ public actor SparkConnectClient {
145145
throw SparkConnectError.InvalidViewName
146146
case let m where m.contains("DATA_SOURCE_NOT_FOUND"):
147147
throw SparkConnectError.DataSourceNotFound
148-
case let m where m.contains("DATASET_TYPE_UNSPECIFIED"):
149-
throw SparkConnectError.DatasetTypeUnspecified
148+
case let m where m.contains("OUTPUT_TYPE_UNSPECIFIED"):
149+
throw SparkConnectError.OutputTypeUnspecified
150150
default:
151151
throw error
152152
}
@@ -1240,27 +1240,27 @@ public actor SparkConnectClient {
12401240
}
12411241

12421242
@discardableResult
1243-
func defineDataset(
1243+
func defineOutput(
12441244
_ dataflowGraphID: String,
1245-
_ datasetName: String,
1246-
_ datasetType: String,
1245+
_ outputName: String,
1246+
_ outputType: String,
12471247
_ comment: String? = nil
12481248
) async throws -> Bool {
12491249
try await withGPRC { client in
12501250
if UUID(uuidString: dataflowGraphID) == nil {
12511251
throw SparkConnectError.InvalidArgument
12521252
}
12531253

1254-
var defineDataset = Spark_Connect_PipelineCommand.DefineDataset()
1255-
defineDataset.dataflowGraphID = dataflowGraphID
1256-
defineDataset.datasetName = datasetName
1257-
defineDataset.datasetType = datasetType.toDatasetType
1254+
var defineOutput = Spark_Connect_PipelineCommand.DefineOutput()
1255+
defineOutput.dataflowGraphID = dataflowGraphID
1256+
defineOutput.outputName = outputName
1257+
defineOutput.outputType = outputType.toOutputType
12581258
if let comment {
1259-
defineDataset.comment = comment
1259+
defineOutput.comment = comment
12601260
}
12611261

12621262
var pipelineCommand = Spark_Connect_PipelineCommand()
1263-
pipelineCommand.commandType = .defineDataset(defineDataset)
1263+
pipelineCommand.commandType = .defineOutput(defineOutput)
12641264

12651265
var command = Spark_Connect_Command()
12661266
command.commandType = .pipelineCommand(pipelineCommand)
@@ -1288,7 +1288,7 @@ public actor SparkConnectClient {
12881288
defineFlow.dataflowGraphID = dataflowGraphID
12891289
defineFlow.flowName = flowName
12901290
defineFlow.targetDatasetName = targetDatasetName
1291-
defineFlow.relation = relation
1291+
// defineFlow.relation = relation
12921292

12931293
var pipelineCommand = Spark_Connect_PipelineCommand()
12941294
pipelineCommand.commandType = .defineFlow(defineFlow)

Sources/SparkConnect/SparkConnectError.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,11 +22,11 @@ public enum SparkConnectError: Error {
2222
case CatalogNotFound
2323
case ColumnNotFound
2424
case DataSourceNotFound
25-
case DatasetTypeUnspecified
2625
case InvalidArgument
2726
case InvalidSessionID
2827
case InvalidType
2928
case InvalidViewName
29+
case OutputTypeUnspecified
3030
case ParseSyntaxError
3131
case SchemaNotFound
3232
case SessionClosed

Sources/SparkConnect/TypeAliases.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ typealias AnalyzePlanResponse = Spark_Connect_AnalyzePlanResponse
2323
typealias Command = Spark_Connect_Command
2424
typealias ConfigRequest = Spark_Connect_ConfigRequest
2525
typealias DataSource = Spark_Connect_Read.DataSource
26-
typealias DatasetType = Spark_Connect_DatasetType
2726
typealias DataType = Spark_Connect_DataType
2827
typealias DayTimeInterval = Spark_Connect_DataType.DayTimeInterval
2928
typealias Drop = Spark_Connect_Drop
@@ -45,6 +44,7 @@ typealias MergeIntoTableCommand = Spark_Connect_MergeIntoTableCommand
4544
typealias NamedTable = Spark_Connect_Read.NamedTable
4645
typealias OneOf_Analyze = AnalyzePlanRequest.OneOf_Analyze
4746
typealias OneOf_CatType = Spark_Connect_Catalog.OneOf_CatType
47+
typealias OutputType = Spark_Connect_OutputType
4848
typealias Plan = Spark_Connect_Plan
4949
typealias Project = Spark_Connect_Project
5050
typealias Range = Spark_Connect_Range

0 commit comments

Comments
 (0)