Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 13 additions & 13 deletions Sources/SparkConnect/base.grpc.swift
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,13 @@ import GRPCProtobuf

/// Namespace containing generated types for the "spark.connect.SparkConnectService" service.
@available(macOS 15.0, iOS 18.0, watchOS 11.0, tvOS 18.0, visionOS 2.0, *)
internal enum Spark_Connect_SparkConnectService {
internal enum Spark_Connect_SparkConnectService: Sendable {
/// Service descriptor for the "spark.connect.SparkConnectService" service.
internal static let descriptor = GRPCCore.ServiceDescriptor(fullyQualifiedService: "spark.connect.SparkConnectService")
/// Namespace for method metadata.
internal enum Method {
internal enum Method: Sendable {
/// Namespace for "ExecutePlan" metadata.
internal enum ExecutePlan {
internal enum ExecutePlan: Sendable {
/// Request type for "ExecutePlan".
internal typealias Input = Spark_Connect_ExecutePlanRequest
/// Response type for "ExecutePlan".
Expand All @@ -49,7 +49,7 @@ internal enum Spark_Connect_SparkConnectService {
)
}
/// Namespace for "AnalyzePlan" metadata.
internal enum AnalyzePlan {
internal enum AnalyzePlan: Sendable {
/// Request type for "AnalyzePlan".
internal typealias Input = Spark_Connect_AnalyzePlanRequest
/// Response type for "AnalyzePlan".
Expand All @@ -61,7 +61,7 @@ internal enum Spark_Connect_SparkConnectService {
)
}
/// Namespace for "Config" metadata.
internal enum Config {
internal enum Config: Sendable {
/// Request type for "Config".
internal typealias Input = Spark_Connect_ConfigRequest
/// Response type for "Config".
Expand All @@ -73,7 +73,7 @@ internal enum Spark_Connect_SparkConnectService {
)
}
/// Namespace for "AddArtifacts" metadata.
internal enum AddArtifacts {
internal enum AddArtifacts: Sendable {
/// Request type for "AddArtifacts".
internal typealias Input = Spark_Connect_AddArtifactsRequest
/// Response type for "AddArtifacts".
Expand All @@ -85,7 +85,7 @@ internal enum Spark_Connect_SparkConnectService {
)
}
/// Namespace for "ArtifactStatus" metadata.
internal enum ArtifactStatus {
internal enum ArtifactStatus: Sendable {
/// Request type for "ArtifactStatus".
internal typealias Input = Spark_Connect_ArtifactStatusesRequest
/// Response type for "ArtifactStatus".
Expand All @@ -97,7 +97,7 @@ internal enum Spark_Connect_SparkConnectService {
)
}
/// Namespace for "Interrupt" metadata.
internal enum Interrupt {
internal enum Interrupt: Sendable {
/// Request type for "Interrupt".
internal typealias Input = Spark_Connect_InterruptRequest
/// Response type for "Interrupt".
Expand All @@ -109,7 +109,7 @@ internal enum Spark_Connect_SparkConnectService {
)
}
/// Namespace for "ReattachExecute" metadata.
internal enum ReattachExecute {
internal enum ReattachExecute: Sendable {
/// Request type for "ReattachExecute".
internal typealias Input = Spark_Connect_ReattachExecuteRequest
/// Response type for "ReattachExecute".
Expand All @@ -121,7 +121,7 @@ internal enum Spark_Connect_SparkConnectService {
)
}
/// Namespace for "ReleaseExecute" metadata.
internal enum ReleaseExecute {
internal enum ReleaseExecute: Sendable {
/// Request type for "ReleaseExecute".
internal typealias Input = Spark_Connect_ReleaseExecuteRequest
/// Response type for "ReleaseExecute".
Expand All @@ -133,7 +133,7 @@ internal enum Spark_Connect_SparkConnectService {
)
}
/// Namespace for "ReleaseSession" metadata.
internal enum ReleaseSession {
internal enum ReleaseSession: Sendable {
/// Request type for "ReleaseSession".
internal typealias Input = Spark_Connect_ReleaseSessionRequest
/// Response type for "ReleaseSession".
Expand All @@ -145,7 +145,7 @@ internal enum Spark_Connect_SparkConnectService {
)
}
/// Namespace for "FetchErrorDetails" metadata.
internal enum FetchErrorDetails {
internal enum FetchErrorDetails: Sendable {
/// Request type for "FetchErrorDetails".
internal typealias Input = Spark_Connect_FetchErrorDetailsRequest
/// Response type for "FetchErrorDetails".
Expand All @@ -157,7 +157,7 @@ internal enum Spark_Connect_SparkConnectService {
)
}
/// Namespace for "CloneSession" metadata.
internal enum CloneSession {
internal enum CloneSession: Sendable {
/// Request type for "CloneSession".
internal typealias Input = Spark_Connect_CloneSessionRequest
/// Response type for "CloneSession".
Expand Down
172 changes: 168 additions & 4 deletions Sources/SparkConnect/base.pb.swift
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,46 @@ fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAP
typealias Version = _2
}

/// Compression codec for plan compression.
enum Spark_Connect_CompressionCodec: SwiftProtobuf.Enum, Swift.CaseIterable {
typealias RawValue = Int
case unspecified // = 0
case zstd // = 1
case UNRECOGNIZED(Int)

init() {
self = .unspecified
}

init?(rawValue: Int) {
switch rawValue {
case 0: self = .unspecified
case 1: self = .zstd
default: self = .UNRECOGNIZED(rawValue)
}
}

var rawValue: Int {
switch self {
case .unspecified: return 0
case .zstd: return 1
case .UNRECOGNIZED(let i): return i
}
}

// The compiler won't synthesize support with the UNRECOGNIZED case.
static let allCases: [Spark_Connect_CompressionCodec] = [
.unspecified,
.zstd,
]

}

/// A [[Plan]] is the structure that carries the runtime information for the execution from the
/// client to the server. A [[Plan]] can either be of the type [[Relation]] which is a reference
/// to the underlying logical plan or it can be of the [[Command]] type that is used to execute
/// commands on the server.
/// client to the server. A [[Plan]] can be one of the following:
/// - [[Relation]]: a reference to the underlying logical plan.
/// - [[Command]]: used to execute commands on the server.
/// - [[CompressedOperation]]: a compressed representation of either a Relation or a Command.
struct Spark_Connect_Plan: Sendable {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
Expand All @@ -64,12 +100,75 @@ struct Spark_Connect_Plan: Sendable {
set {opType = .command(newValue)}
}

var compressedOperation: Spark_Connect_Plan.CompressedOperation {
get {
if case .compressedOperation(let v)? = opType {return v}
return Spark_Connect_Plan.CompressedOperation()
}
set {opType = .compressedOperation(newValue)}
}

var unknownFields = SwiftProtobuf.UnknownStorage()

enum OneOf_OpType: Equatable, Sendable {
case root(Spark_Connect_Relation)
case command(Spark_Connect_Command)
case compressedOperation(Spark_Connect_Plan.CompressedOperation)

}

struct CompressedOperation: Sendable {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.

var data: Data = Data()

var opType: Spark_Connect_Plan.CompressedOperation.OpType = .unspecified

var compressionCodec: Spark_Connect_CompressionCodec = .unspecified

var unknownFields = SwiftProtobuf.UnknownStorage()

enum OpType: SwiftProtobuf.Enum, Swift.CaseIterable {
typealias RawValue = Int
case unspecified // = 0
case relation // = 1
case command // = 2
case UNRECOGNIZED(Int)

init() {
self = .unspecified
}

init?(rawValue: Int) {
switch rawValue {
case 0: self = .unspecified
case 1: self = .relation
case 2: self = .command
default: self = .UNRECOGNIZED(rawValue)
}
}

var rawValue: Int {
switch self {
case .unspecified: return 0
case .relation: return 1
case .command: return 2
case .UNRECOGNIZED(let i): return i
}
}

// The compiler won't synthesize support with the UNRECOGNIZED case.
static let allCases: [Spark_Connect_Plan.CompressedOperation.OpType] = [
.unspecified,
.relation,
.command,
]

}

init() {}
}

init() {}
Expand Down Expand Up @@ -3246,9 +3345,13 @@ struct Spark_Connect_CloneSessionResponse: Sendable {

fileprivate let _protobuf_package = "spark.connect"

extension Spark_Connect_CompressionCodec: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: "\0\u{2}\0COMPRESSION_CODEC_UNSPECIFIED\0\u{1}COMPRESSION_CODEC_ZSTD\0")
}

extension Spark_Connect_Plan: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".Plan"
static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: "\0\u{1}root\0\u{1}command\0")
static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: "\0\u{1}root\0\u{1}command\0\u{3}compressed_operation\0")

mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
Expand Down Expand Up @@ -3282,6 +3385,19 @@ extension Spark_Connect_Plan: SwiftProtobuf.Message, SwiftProtobuf._MessageImple
self.opType = .command(v)
}
}()
case 3: try {
var v: Spark_Connect_Plan.CompressedOperation?
var hadOneofValue = false
if let current = self.opType {
hadOneofValue = true
if case .compressedOperation(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
self.opType = .compressedOperation(v)
}
}()
default: break
}
}
Expand All @@ -3301,6 +3417,10 @@ extension Spark_Connect_Plan: SwiftProtobuf.Message, SwiftProtobuf._MessageImple
guard case .command(let v)? = self.opType else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 2)
}()
case .compressedOperation?: try {
guard case .compressedOperation(let v)? = self.opType else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 3)
}()
case nil: break
}
try unknownFields.traverse(visitor: &visitor)
Expand All @@ -3313,6 +3433,50 @@ extension Spark_Connect_Plan: SwiftProtobuf.Message, SwiftProtobuf._MessageImple
}
}

extension Spark_Connect_Plan.CompressedOperation: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Spark_Connect_Plan.protoMessageName + ".CompressedOperation"
static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: "\0\u{1}data\0\u{3}op_type\0\u{3}compression_codec\0")

mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularBytesField(value: &self.data) }()
case 2: try { try decoder.decodeSingularEnumField(value: &self.opType) }()
case 3: try { try decoder.decodeSingularEnumField(value: &self.compressionCodec) }()
default: break
}
}
}

func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.data.isEmpty {
try visitor.visitSingularBytesField(value: self.data, fieldNumber: 1)
}
if self.opType != .unspecified {
try visitor.visitSingularEnumField(value: self.opType, fieldNumber: 2)
}
if self.compressionCodec != .unspecified {
try visitor.visitSingularEnumField(value: self.compressionCodec, fieldNumber: 3)
}
try unknownFields.traverse(visitor: &visitor)
}

static func ==(lhs: Spark_Connect_Plan.CompressedOperation, rhs: Spark_Connect_Plan.CompressedOperation) -> Bool {
if lhs.data != rhs.data {return false}
if lhs.opType != rhs.opType {return false}
if lhs.compressionCodec != rhs.compressionCodec {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}

extension Spark_Connect_Plan.CompressedOperation.OpType: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: "\0\u{2}\0OP_TYPE_UNSPECIFIED\0\u{1}OP_TYPE_RELATION\0\u{1}OP_TYPE_COMMAND\0")
}

extension Spark_Connect_UserContext: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".UserContext"
static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: "\0\u{3}user_id\0\u{3}user_name\0\u{2}e\u{f}extensions\0")
Expand Down
2 changes: 1 addition & 1 deletion Sources/SparkConnect/expressions.pb.swift
Original file line number Diff line number Diff line change
Expand Up @@ -2994,7 +2994,7 @@ extension Spark_Connect_Expression.Cast.EvalMode: SwiftProtobuf._ProtoNameProvid

extension Spark_Connect_Expression.Literal: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Spark_Connect_Expression.protoMessageName + ".Literal"
static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: "\0\u{1}null\0\u{1}binary\0\u{1}boolean\0\u{1}byte\0\u{1}short\0\u{1}integer\0\u{1}long\0\u{2}\u{3}float\0\u{1}double\0\u{1}decimal\0\u{1}string\0\u{2}\u{3}date\0\u{1}timestamp\0\u{3}timestamp_ntz\0\u{3}calendar_interval\0\u{3}year_month_interval\0\u{3}day_time_interval\0\u{1}array\0\u{1}map\0\u{1}struct\0\u{3}specialized_array\0\u{1}time\0\u{4}J\u{1}data_type\0")
static let _protobuf_nameMap = SwiftProtobuf._NameMap(bytecode: "\0\u{1}null\0\u{1}binary\0\u{1}boolean\0\u{1}byte\0\u{1}short\0\u{1}integer\0\u{1}long\0\u{2}\u{3}float\0\u{1}double\0\u{1}decimal\0\u{1}string\0\u{2}\u{3}date\0\u{1}timestamp\0\u{3}timestamp_ntz\0\u{3}calendar_interval\0\u{3}year_month_interval\0\u{3}day_time_interval\0\u{1}array\0\u{1}map\0\u{1}struct\0\u{3}specialized_array\0\u{1}time\0\u{4}J\u{1}data_type\0\u{c}\u{1b}\u{1}\u{c}\u{1c}\u{1}")

mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
Expand Down
Loading
Loading