Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql

import java.util.Locale
import java.util.{Locale, Properties}

import scala.collection.JavaConverters._

Expand Down Expand Up @@ -345,6 +345,44 @@ final class DataFrameWriter[T] private[sql] (ds: Dataset[T]) {
})
}

/**
* Saves the content of the `DataFrame` to an external database table via JDBC. In the case the
* table already exists in the external database, behavior of this function depends on the save
* mode, specified by the `mode` function (default to throwing an exception).
*
* Don't create too many partitions in parallel on a large cluster; otherwise Spark might crash
* your external database systems.
*
* JDBC-specific option and parameter documentation for storing tables via JDBC in <a
* href="https://spark.apache.org/docs/latest/sql-data-sources-jdbc.html#data-source-option">
* Data Source Option</a> in the version you use.
*
* @param table
* Name of the table in the external database.
* @param connectionProperties
* JDBC database connection arguments, a list of arbitrary string tag/value. Normally at least
* a "user" and "password" property should be included. "batchsize" can be used to control the
* number of rows per insert. "isolationLevel" can be one of "NONE", "READ_COMMITTED",
* "READ_UNCOMMITTED", "REPEATABLE_READ", or "SERIALIZABLE", corresponding to standard
* transaction isolation levels defined by JDBC's Connection object, with default of
* "READ_UNCOMMITTED".
* @since 3.4.0
*/
def jdbc(url: String, table: String, connectionProperties: Properties): Unit = {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we can remove ProblemFilters.exclude[Problem]("org.apache.spark.sql.DataFrameWriter.jdbc") from CheckConnectJvmClientCompatibility in this pr
`

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thank you for the reminder.

// connectionProperties should override settings in extraOptions.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I have a question @hvanhovell @beliefer . For the connect-client api, should we verify the parameters on the client side or on the server side?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think verify parameters on the server side is a robust way. Certainly, the work on client side will reduce the pressure on the server side.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Server side. There are a couple of reasons for this:

  • The server cannot trust the client to implement the verification properly. I am sure we will get it right for Scala and Python, but there are potentially a plethora of other frontends that need to do the same.
  • Keeping the client simple and reduce duplication. If we need to do this for every client we'll end up with a lot of duplication and increase client complexity.

this.extraOptions ++= connectionProperties.asScala
// explicit url and dbtable should override all
this.extraOptions ++= Seq("url" -> url, "dbtable" -> table)
format("jdbc")
executeWriteOperation(builder => {
builder.setTable(
proto.WriteOperation.SaveTable
.newBuilder()
.setTableName(table)
.setSaveMethod(proto.WriteOperation.SaveTable.TableSaveMethod.TABLE_SAVE_METHOD_SAVE))
})
}

/**
* Saves the content of the `DataFrame` in JSON format (<a href="http://jsonlines.org/"> JSON
* Lines text format or newline-delimited JSON</a>) at the specified path. This is equivalent
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ package org.apache.spark.sql

import java.io.{ByteArrayOutputStream, PrintStream}
import java.nio.file.Files
import java.util.Properties

import scala.collection.JavaConverters._

Expand Down Expand Up @@ -611,6 +612,19 @@ class ClientE2ETestSuite extends RemoteSparkSession with SQLHelper {
session.createDataFrame(list.asScala.toSeq))
}

test("write jdbc") {
val rows = java.util.Arrays.asList[Row](Row("dave", 42), Row("mary", 222))
val schema = StructType(
StructField("name", StringType) ::
StructField("id", IntegerType) :: Nil)
val df = spark.createDataFrame(rows, schema)
df.write.jdbc("jdbc:h2:mem:testdb2", "TEST.BASICCREATETEST", new Properties())

checkSameResult(
rows.asScala.map(row => Row(row.getString(0), row.getInt(1))),
spark.read.jdbc("jdbc:h2:mem:testdb2", "TEST.BASICCREATETEST", new Properties()))
}

test("SparkSession newSession") {
val oldId = spark.sql("SELECT 1").analyze.getSessionId
val newId = spark.newSession().sql("SELECT 1").analyze.getSessionId
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@ message WriteOperation {
TABLE_SAVE_METHOD_UNSPECIFIED = 0;
TABLE_SAVE_METHOD_SAVE_AS_TABLE = 1;
TABLE_SAVE_METHOD_INSERT_INTO = 2;
TABLE_SAVE_METHOD_SAVE = 3;
}
}

Expand Down
11 changes: 5 additions & 6 deletions connector/connect/server/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,11 @@
<artifactId>grpc-stub</artifactId>
<version>${io.grpc.version}</version>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>2.1.214</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-codec-http2</artifactId>
Expand Down Expand Up @@ -215,12 +220,6 @@
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>2.1.214</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@

package org.apache.spark.sql.connect.planner

import java.sql.DriverManager
import java.util.Properties

import scala.collection.JavaConverters._
import scala.collection.mutable

Expand Down Expand Up @@ -1741,6 +1744,9 @@ class SparkConnectPlanner(val session: SparkSession) {
w.saveAsTable(tableName)
case proto.WriteOperation.SaveTable.TableSaveMethod.TABLE_SAVE_METHOD_INSERT_INTO =>
w.insertInto(tableName)
case proto.WriteOperation.SaveTable.TableSaveMethod.TABLE_SAVE_METHOD_SAVE =>
createJDBCTestSchema()
w.save()
case _ =>
throw new UnsupportedOperationException(
"WriteOperation:SaveTable:TableSaveMethod not supported "
Expand All @@ -1753,6 +1759,15 @@ class SparkConnectPlanner(val session: SparkSession) {
}
}

private def createJDBCTestSchema(): Unit = {
Utils.classForName("org.h2.Driver")

val conn = DriverManager.getConnection("jdbc:h2:mem:testdb2")
conn.prepareStatement("create schema test").executeUpdate()
conn.commit()
conn.close()
}

/**
* Transforms the write operation and executes it.
*
Expand Down
30 changes: 15 additions & 15 deletions python/pyspark/sql/connect/proto/commands_pb2.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@


DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x1cspark/connect/commands.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/relations.proto"\xe9\x03\n\x07\x43ommand\x12]\n\x11register_function\x18\x01 \x01(\x0b\x32..spark.connect.CommonInlineUserDefinedFunctionH\x00R\x10registerFunction\x12H\n\x0fwrite_operation\x18\x02 \x01(\x0b\x32\x1d.spark.connect.WriteOperationH\x00R\x0ewriteOperation\x12_\n\x15\x63reate_dataframe_view\x18\x03 \x01(\x0b\x32).spark.connect.CreateDataFrameViewCommandH\x00R\x13\x63reateDataframeView\x12O\n\x12write_operation_v2\x18\x04 \x01(\x0b\x32\x1f.spark.connect.WriteOperationV2H\x00R\x10writeOperationV2\x12<\n\x0bsql_command\x18\x05 \x01(\x0b\x32\x19.spark.connect.SqlCommandH\x00R\nsqlCommand\x12\x35\n\textension\x18\xe7\x07 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00R\textensionB\x0e\n\x0c\x63ommand_type"\x90\x01\n\nSqlCommand\x12\x10\n\x03sql\x18\x01 \x01(\tR\x03sql\x12\x37\n\x04\x61rgs\x18\x02 \x03(\x0b\x32#.spark.connect.SqlCommand.ArgsEntryR\x04\x61rgs\x1a\x37\n\tArgsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01"\x96\x01\n\x1a\x43reateDataFrameViewCommand\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x1b\n\tis_global\x18\x03 \x01(\x08R\x08isGlobal\x12\x18\n\x07replace\x18\x04 \x01(\x08R\x07replace"\x9b\x08\n\x0eWriteOperation\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x1b\n\x06source\x18\x02 \x01(\tH\x01R\x06source\x88\x01\x01\x12\x14\n\x04path\x18\x03 \x01(\tH\x00R\x04path\x12?\n\x05table\x18\x04 \x01(\x0b\x32\'.spark.connect.WriteOperation.SaveTableH\x00R\x05table\x12:\n\x04mode\x18\x05 \x01(\x0e\x32&.spark.connect.WriteOperation.SaveModeR\x04mode\x12*\n\x11sort_column_names\x18\x06 \x03(\tR\x0fsortColumnNames\x12\x31\n\x14partitioning_columns\x18\x07 \x03(\tR\x13partitioningColumns\x12\x43\n\tbucket_by\x18\x08 \x01(\x0b\x32&.spark.connect.WriteOperation.BucketByR\x08\x62ucketBy\x12\x44\n\x07options\x18\t \x03(\x0b\x32*.spark.connect.WriteOperation.OptionsEntryR\x07options\x1a:\n\x0cOptionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a\x82\x02\n\tSaveTable\x12\x1d\n\ntable_name\x18\x01 \x01(\tR\ttableName\x12X\n\x0bsave_method\x18\x02 \x01(\x0e\x32\x37.spark.connect.WriteOperation.SaveTable.TableSaveMethodR\nsaveMethod"|\n\x0fTableSaveMethod\x12!\n\x1dTABLE_SAVE_METHOD_UNSPECIFIED\x10\x00\x12#\n\x1fTABLE_SAVE_METHOD_SAVE_AS_TABLE\x10\x01\x12!\n\x1dTABLE_SAVE_METHOD_INSERT_INTO\x10\x02\x1a[\n\x08\x42ucketBy\x12.\n\x13\x62ucket_column_names\x18\x01 \x03(\tR\x11\x62ucketColumnNames\x12\x1f\n\x0bnum_buckets\x18\x02 \x01(\x05R\nnumBuckets"\x89\x01\n\x08SaveMode\x12\x19\n\x15SAVE_MODE_UNSPECIFIED\x10\x00\x12\x14\n\x10SAVE_MODE_APPEND\x10\x01\x12\x17\n\x13SAVE_MODE_OVERWRITE\x10\x02\x12\x1d\n\x19SAVE_MODE_ERROR_IF_EXISTS\x10\x03\x12\x14\n\x10SAVE_MODE_IGNORE\x10\x04\x42\x0b\n\tsave_typeB\t\n\x07_source"\xad\x06\n\x10WriteOperationV2\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x1d\n\ntable_name\x18\x02 \x01(\tR\ttableName\x12\x1f\n\x08provider\x18\x03 \x01(\tH\x00R\x08provider\x88\x01\x01\x12L\n\x14partitioning_columns\x18\x04 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x13partitioningColumns\x12\x46\n\x07options\x18\x05 \x03(\x0b\x32,.spark.connect.WriteOperationV2.OptionsEntryR\x07options\x12_\n\x10table_properties\x18\x06 \x03(\x0b\x32\x34.spark.connect.WriteOperationV2.TablePropertiesEntryR\x0ftableProperties\x12\x38\n\x04mode\x18\x07 \x01(\x0e\x32$.spark.connect.WriteOperationV2.ModeR\x04mode\x12J\n\x13overwrite_condition\x18\x08 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x12overwriteCondition\x1a:\n\x0cOptionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a\x42\n\x14TablePropertiesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01"\x9f\x01\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x0f\n\x0bMODE_CREATE\x10\x01\x12\x12\n\x0eMODE_OVERWRITE\x10\x02\x12\x1d\n\x19MODE_OVERWRITE_PARTITIONS\x10\x03\x12\x0f\n\x0bMODE_APPEND\x10\x04\x12\x10\n\x0cMODE_REPLACE\x10\x05\x12\x1a\n\x16MODE_CREATE_OR_REPLACE\x10\x06\x42\x0b\n\t_providerB"\n\x1eorg.apache.spark.connect.protoP\x01\x62\x06proto3'
b'\n\x1cspark/connect/commands.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/relations.proto"\xe9\x03\n\x07\x43ommand\x12]\n\x11register_function\x18\x01 \x01(\x0b\x32..spark.connect.CommonInlineUserDefinedFunctionH\x00R\x10registerFunction\x12H\n\x0fwrite_operation\x18\x02 \x01(\x0b\x32\x1d.spark.connect.WriteOperationH\x00R\x0ewriteOperation\x12_\n\x15\x63reate_dataframe_view\x18\x03 \x01(\x0b\x32).spark.connect.CreateDataFrameViewCommandH\x00R\x13\x63reateDataframeView\x12O\n\x12write_operation_v2\x18\x04 \x01(\x0b\x32\x1f.spark.connect.WriteOperationV2H\x00R\x10writeOperationV2\x12<\n\x0bsql_command\x18\x05 \x01(\x0b\x32\x19.spark.connect.SqlCommandH\x00R\nsqlCommand\x12\x35\n\textension\x18\xe7\x07 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00R\textensionB\x0e\n\x0c\x63ommand_type"\x90\x01\n\nSqlCommand\x12\x10\n\x03sql\x18\x01 \x01(\tR\x03sql\x12\x37\n\x04\x61rgs\x18\x02 \x03(\x0b\x32#.spark.connect.SqlCommand.ArgsEntryR\x04\x61rgs\x1a\x37\n\tArgsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01"\x96\x01\n\x1a\x43reateDataFrameViewCommand\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x1b\n\tis_global\x18\x03 \x01(\x08R\x08isGlobal\x12\x18\n\x07replace\x18\x04 \x01(\x08R\x07replace"\xb8\x08\n\x0eWriteOperation\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x1b\n\x06source\x18\x02 \x01(\tH\x01R\x06source\x88\x01\x01\x12\x14\n\x04path\x18\x03 \x01(\tH\x00R\x04path\x12?\n\x05table\x18\x04 \x01(\x0b\x32\'.spark.connect.WriteOperation.SaveTableH\x00R\x05table\x12:\n\x04mode\x18\x05 \x01(\x0e\x32&.spark.connect.WriteOperation.SaveModeR\x04mode\x12*\n\x11sort_column_names\x18\x06 \x03(\tR\x0fsortColumnNames\x12\x31\n\x14partitioning_columns\x18\x07 \x03(\tR\x13partitioningColumns\x12\x43\n\tbucket_by\x18\x08 \x01(\x0b\x32&.spark.connect.WriteOperation.BucketByR\x08\x62ucketBy\x12\x44\n\x07options\x18\t \x03(\x0b\x32*.spark.connect.WriteOperation.OptionsEntryR\x07options\x1a:\n\x0cOptionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a\x9f\x02\n\tSaveTable\x12\x1d\n\ntable_name\x18\x01 \x01(\tR\ttableName\x12X\n\x0bsave_method\x18\x02 \x01(\x0e\x32\x37.spark.connect.WriteOperation.SaveTable.TableSaveMethodR\nsaveMethod"\x98\x01\n\x0fTableSaveMethod\x12!\n\x1dTABLE_SAVE_METHOD_UNSPECIFIED\x10\x00\x12#\n\x1fTABLE_SAVE_METHOD_SAVE_AS_TABLE\x10\x01\x12!\n\x1dTABLE_SAVE_METHOD_INSERT_INTO\x10\x02\x12\x1a\n\x16TABLE_SAVE_METHOD_SAVE\x10\x03\x1a[\n\x08\x42ucketBy\x12.\n\x13\x62ucket_column_names\x18\x01 \x03(\tR\x11\x62ucketColumnNames\x12\x1f\n\x0bnum_buckets\x18\x02 \x01(\x05R\nnumBuckets"\x89\x01\n\x08SaveMode\x12\x19\n\x15SAVE_MODE_UNSPECIFIED\x10\x00\x12\x14\n\x10SAVE_MODE_APPEND\x10\x01\x12\x17\n\x13SAVE_MODE_OVERWRITE\x10\x02\x12\x1d\n\x19SAVE_MODE_ERROR_IF_EXISTS\x10\x03\x12\x14\n\x10SAVE_MODE_IGNORE\x10\x04\x42\x0b\n\tsave_typeB\t\n\x07_source"\xad\x06\n\x10WriteOperationV2\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x1d\n\ntable_name\x18\x02 \x01(\tR\ttableName\x12\x1f\n\x08provider\x18\x03 \x01(\tH\x00R\x08provider\x88\x01\x01\x12L\n\x14partitioning_columns\x18\x04 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x13partitioningColumns\x12\x46\n\x07options\x18\x05 \x03(\x0b\x32,.spark.connect.WriteOperationV2.OptionsEntryR\x07options\x12_\n\x10table_properties\x18\x06 \x03(\x0b\x32\x34.spark.connect.WriteOperationV2.TablePropertiesEntryR\x0ftableProperties\x12\x38\n\x04mode\x18\x07 \x01(\x0e\x32$.spark.connect.WriteOperationV2.ModeR\x04mode\x12J\n\x13overwrite_condition\x18\x08 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x12overwriteCondition\x1a:\n\x0cOptionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a\x42\n\x14TablePropertiesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01"\x9f\x01\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x0f\n\x0bMODE_CREATE\x10\x01\x12\x12\n\x0eMODE_OVERWRITE\x10\x02\x12\x1d\n\x19MODE_OVERWRITE_PARTITIONS\x10\x03\x12\x0f\n\x0bMODE_APPEND\x10\x04\x12\x10\n\x0cMODE_REPLACE\x10\x05\x12\x1a\n\x16MODE_CREATE_OR_REPLACE\x10\x06\x42\x0b\n\t_providerB"\n\x1eorg.apache.spark.connect.protoP\x01\x62\x06proto3'
)


Expand Down Expand Up @@ -193,23 +193,23 @@
_CREATEDATAFRAMEVIEWCOMMAND._serialized_start = 778
_CREATEDATAFRAMEVIEWCOMMAND._serialized_end = 928
_WRITEOPERATION._serialized_start = 931
_WRITEOPERATION._serialized_end = 1982
_WRITEOPERATION._serialized_end = 2011
_WRITEOPERATION_OPTIONSENTRY._serialized_start = 1406
_WRITEOPERATION_OPTIONSENTRY._serialized_end = 1464
_WRITEOPERATION_SAVETABLE._serialized_start = 1467
_WRITEOPERATION_SAVETABLE._serialized_end = 1725
_WRITEOPERATION_SAVETABLE_TABLESAVEMETHOD._serialized_start = 1601
_WRITEOPERATION_SAVETABLE_TABLESAVEMETHOD._serialized_end = 1725
_WRITEOPERATION_BUCKETBY._serialized_start = 1727
_WRITEOPERATION_BUCKETBY._serialized_end = 1818
_WRITEOPERATION_SAVEMODE._serialized_start = 1821
_WRITEOPERATION_SAVEMODE._serialized_end = 1958
_WRITEOPERATIONV2._serialized_start = 1985
_WRITEOPERATIONV2._serialized_end = 2798
_WRITEOPERATION_SAVETABLE._serialized_end = 1754
_WRITEOPERATION_SAVETABLE_TABLESAVEMETHOD._serialized_start = 1602
_WRITEOPERATION_SAVETABLE_TABLESAVEMETHOD._serialized_end = 1754
_WRITEOPERATION_BUCKETBY._serialized_start = 1756
_WRITEOPERATION_BUCKETBY._serialized_end = 1847
_WRITEOPERATION_SAVEMODE._serialized_start = 1850
_WRITEOPERATION_SAVEMODE._serialized_end = 1987
_WRITEOPERATIONV2._serialized_start = 2014
_WRITEOPERATIONV2._serialized_end = 2827
_WRITEOPERATIONV2_OPTIONSENTRY._serialized_start = 1406
_WRITEOPERATIONV2_OPTIONSENTRY._serialized_end = 1464
_WRITEOPERATIONV2_TABLEPROPERTIESENTRY._serialized_start = 2557
_WRITEOPERATIONV2_TABLEPROPERTIESENTRY._serialized_end = 2623
_WRITEOPERATIONV2_MODE._serialized_start = 2626
_WRITEOPERATIONV2_MODE._serialized_end = 2785
_WRITEOPERATIONV2_TABLEPROPERTIESENTRY._serialized_start = 2586
_WRITEOPERATIONV2_TABLEPROPERTIESENTRY._serialized_end = 2652
_WRITEOPERATIONV2_MODE._serialized_start = 2655
_WRITEOPERATIONV2_MODE._serialized_end = 2814
# @@protoc_insertion_point(module_scope)
2 changes: 2 additions & 0 deletions python/pyspark/sql/connect/proto/commands_pb2.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -296,11 +296,13 @@ class WriteOperation(google.protobuf.message.Message):
TABLE_SAVE_METHOD_UNSPECIFIED: WriteOperation.SaveTable._TableSaveMethod.ValueType # 0
TABLE_SAVE_METHOD_SAVE_AS_TABLE: WriteOperation.SaveTable._TableSaveMethod.ValueType # 1
TABLE_SAVE_METHOD_INSERT_INTO: WriteOperation.SaveTable._TableSaveMethod.ValueType # 2
TABLE_SAVE_METHOD_SAVE: WriteOperation.SaveTable._TableSaveMethod.ValueType # 3

class TableSaveMethod(_TableSaveMethod, metaclass=_TableSaveMethodEnumTypeWrapper): ...
TABLE_SAVE_METHOD_UNSPECIFIED: WriteOperation.SaveTable.TableSaveMethod.ValueType # 0
TABLE_SAVE_METHOD_SAVE_AS_TABLE: WriteOperation.SaveTable.TableSaveMethod.ValueType # 1
TABLE_SAVE_METHOD_INSERT_INTO: WriteOperation.SaveTable.TableSaveMethod.ValueType # 2
TABLE_SAVE_METHOD_SAVE: WriteOperation.SaveTable.TableSaveMethod.ValueType # 3

TABLE_NAME_FIELD_NUMBER: builtins.int
SAVE_METHOD_FIELD_NUMBER: builtins.int
Expand Down