trait WriteOperationOrBuilder extends MessageOrBuilder
- Alphabetic
- By Inheritance
- WriteOperationOrBuilder
- MessageOrBuilder
- MessageLiteOrBuilder
- AnyRef
- Any
- Hide All
- Show All
- Public
- Protected
Abstract Value Members
- abstract def containsOptions(key: String): Boolean
(Optional) A list of configuration options.
(Optional) A list of configuration options.
map<string, string> options = 9; - abstract def findInitializationErrors(): List[String]
- Definition Classes
- MessageOrBuilder
- abstract def getAllFields(): Map[FieldDescriptor, AnyRef]
- Definition Classes
- MessageOrBuilder
- abstract def getBucketBy(): BucketBy
(Optional) Bucketing specification. Bucketing must set the number of buckets and the columns to bucket by.
(Optional) Bucketing specification. Bucketing must set the number of buckets and the columns to bucket by.
.spark.connect.WriteOperation.BucketBy bucket_by = 8;- returns
The bucketBy.
- abstract def getBucketByOrBuilder(): BucketByOrBuilder
(Optional) Bucketing specification. Bucketing must set the number of buckets and the columns to bucket by.
(Optional) Bucketing specification. Bucketing must set the number of buckets and the columns to bucket by.
.spark.connect.WriteOperation.BucketBy bucket_by = 8; - abstract def getDefaultInstanceForType(): Message
- Definition Classes
- MessageOrBuilder → MessageLiteOrBuilder
- abstract def getDescriptorForType(): Descriptor
- Definition Classes
- MessageOrBuilder
- abstract def getField(field: FieldDescriptor): AnyRef
- Definition Classes
- MessageOrBuilder
- abstract def getInitializationErrorString(): String
- Definition Classes
- MessageOrBuilder
- abstract def getInput(): Relation
(Required) The output of the `input` relation will be persisted according to the options.
(Required) The output of the `input` relation will be persisted according to the options.
.spark.connect.Relation input = 1;- returns
The input.
- abstract def getInputOrBuilder(): RelationOrBuilder
(Required) The output of the `input` relation will be persisted according to the options.
(Required) The output of the `input` relation will be persisted according to the options.
.spark.connect.Relation input = 1; - abstract def getMode(): SaveMode
(Required) the save mode.
(Required) the save mode.
.spark.connect.WriteOperation.SaveMode mode = 5;- returns
The mode.
- abstract def getModeValue(): Int
(Required) the save mode.
(Required) the save mode.
.spark.connect.WriteOperation.SaveMode mode = 5;- returns
The enum numeric value on the wire for mode.
- abstract def getOneofFieldDescriptor(oneof: OneofDescriptor): FieldDescriptor
- Definition Classes
- MessageOrBuilder
- abstract def getOptionsCount(): Int
(Optional) A list of configuration options.
(Optional) A list of configuration options.
map<string, string> options = 9; - abstract def getOptionsMap(): Map[String, String]
(Optional) A list of configuration options.
(Optional) A list of configuration options.
map<string, string> options = 9; - abstract def getOptionsOrDefault(key: String, defaultValue: String): String
(Optional) A list of configuration options.
(Optional) A list of configuration options.
map<string, string> options = 9; - abstract def getOptionsOrThrow(key: String): String
(Optional) A list of configuration options.
(Optional) A list of configuration options.
map<string, string> options = 9; - abstract def getPartitioningColumns(index: Int): String
(Optional) List of columns for partitioning.
(Optional) List of columns for partitioning.
repeated string partitioning_columns = 7;- index
The index of the element to return.
- returns
The partitioningColumns at the given index.
- abstract def getPartitioningColumnsBytes(index: Int): ByteString
(Optional) List of columns for partitioning.
(Optional) List of columns for partitioning.
repeated string partitioning_columns = 7;- index
The index of the value to return.
- returns
The bytes of the partitioningColumns at the given index.
- abstract def getPartitioningColumnsCount(): Int
(Optional) List of columns for partitioning.
(Optional) List of columns for partitioning.
repeated string partitioning_columns = 7;- returns
The count of partitioningColumns.
- abstract def getPartitioningColumnsList(): List[String]
(Optional) List of columns for partitioning.
(Optional) List of columns for partitioning.
repeated string partitioning_columns = 7;- returns
A list containing the partitioningColumns.
- abstract def getPath(): String
string path = 3;string path = 3;- returns
The path.
- abstract def getPathBytes(): ByteString
string path = 3;string path = 3;- returns
The bytes for path.
- abstract def getRepeatedField(field: FieldDescriptor, index: Int): AnyRef
- Definition Classes
- MessageOrBuilder
- abstract def getRepeatedFieldCount(field: FieldDescriptor): Int
- Definition Classes
- MessageOrBuilder
- abstract def getSaveTypeCase(): SaveTypeCase
- abstract def getSortColumnNames(index: Int): String
(Optional) List of columns to sort the output by.
(Optional) List of columns to sort the output by.
repeated string sort_column_names = 6;- index
The index of the element to return.
- returns
The sortColumnNames at the given index.
- abstract def getSortColumnNamesBytes(index: Int): ByteString
(Optional) List of columns to sort the output by.
(Optional) List of columns to sort the output by.
repeated string sort_column_names = 6;- index
The index of the value to return.
- returns
The bytes of the sortColumnNames at the given index.
- abstract def getSortColumnNamesCount(): Int
(Optional) List of columns to sort the output by.
(Optional) List of columns to sort the output by.
repeated string sort_column_names = 6;- returns
The count of sortColumnNames.
- abstract def getSortColumnNamesList(): List[String]
(Optional) List of columns to sort the output by.
(Optional) List of columns to sort the output by.
repeated string sort_column_names = 6;- returns
A list containing the sortColumnNames.
- abstract def getSource(): String
(Optional) Format value according to the Spark documentation. Examples are: text, parquet, delta.
(Optional) Format value according to the Spark documentation. Examples are: text, parquet, delta.
optional string source = 2;- returns
The source.
- abstract def getSourceBytes(): ByteString
(Optional) Format value according to the Spark documentation. Examples are: text, parquet, delta.
(Optional) Format value according to the Spark documentation. Examples are: text, parquet, delta.
optional string source = 2;- returns
The bytes for source.
- abstract def getTable(): SaveTable
.spark.connect.WriteOperation.SaveTable table = 4;.spark.connect.WriteOperation.SaveTable table = 4;- returns
The table.
- abstract def getTableOrBuilder(): SaveTableOrBuilder
.spark.connect.WriteOperation.SaveTable table = 4; - abstract def getUnknownFields(): UnknownFieldSet
- Definition Classes
- MessageOrBuilder
- abstract def hasBucketBy(): Boolean
(Optional) Bucketing specification. Bucketing must set the number of buckets and the columns to bucket by.
(Optional) Bucketing specification. Bucketing must set the number of buckets and the columns to bucket by.
.spark.connect.WriteOperation.BucketBy bucket_by = 8;- returns
Whether the bucketBy field is set.
- abstract def hasField(field: FieldDescriptor): Boolean
- Definition Classes
- MessageOrBuilder
- abstract def hasInput(): Boolean
(Required) The output of the `input` relation will be persisted according to the options.
(Required) The output of the `input` relation will be persisted according to the options.
.spark.connect.Relation input = 1;- returns
Whether the input field is set.
- abstract def hasOneof(oneof: OneofDescriptor): Boolean
- Definition Classes
- MessageOrBuilder
- abstract def hasPath(): Boolean
string path = 3;string path = 3;- returns
Whether the path field is set.
- abstract def hasSource(): Boolean
(Optional) Format value according to the Spark documentation. Examples are: text, parquet, delta.
(Optional) Format value according to the Spark documentation. Examples are: text, parquet, delta.
optional string source = 2;- returns
Whether the source field is set.
- abstract def hasTable(): Boolean
.spark.connect.WriteOperation.SaveTable table = 4;.spark.connect.WriteOperation.SaveTable table = 4;- returns
Whether the table field is set.
- abstract def isInitialized(): Boolean
- Definition Classes
- MessageLiteOrBuilder
- abstract def getOptions(): Map[String, String]
Use
#getOptionsMap()instead.Use
#getOptionsMap()instead.- Annotations
- @Deprecated
- Deprecated
Concrete Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @native()
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def equals(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef → Any
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable])
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- def toString(): String
- Definition Classes
- AnyRef → Any
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()