object TypeSig
- Alphabetic
- By Inheritance
- TypeSig
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
val
ARRAY: TypeSig
ARRAY type support, but not very useful on its own because no child types under it are supported
- val BINARY: TypeSig
- val BOOLEAN: TypeSig
- val BYTE: TypeSig
- val CALENDAR: TypeSig
- val DATE: TypeSig
-
val
DAYTIME: TypeSig
DayTimeIntervalType of Spark 3.2.0+ support
-
val
DECIMAL_128: TypeSig
Full support for 128 bit DECIMAL.
Full support for 128 bit DECIMAL. In the future we expect to have other types with slightly less than full DECIMAL support. This are things like math operations where we cannot replicate the overflow behavior of Spark. These will be added when needed.
- val DECIMAL_64: TypeSig
- val DOUBLE: TypeSig
- val FLOAT: TypeSig
- val INT: TypeSig
- val LONG: TypeSig
-
val
MAP: TypeSig
MAP type support, but not very useful on its own because no child types under it are supported
- val NULL: TypeSig
- val SHORT: TypeSig
- val STRING: TypeSig
-
val
STRUCT: TypeSig
STRUCT type support, but only matches empty structs unless you add child types to it.
- val TIMESTAMP: TypeSig
-
val
UDT: TypeSig
User Defined Type (We don't support these in the plugin yet)
-
val
YEARMONTH: TypeSig
YearMonthIntervalType of Spark 3.2.0+ support
-
val
all: TypeSig
All types nested and not nested
-
val
ansiIntervals: TypeSig
ANSI year-month and day-time interval for Spark 320+
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
val
astTypes: TypeSig
All types that can appear in AST expressions
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
val
commonCudfTypes: TypeSig
A signature for types that are generally supported by the plugin/CUDF.
A signature for types that are generally supported by the plugin/CUDF. Please make sure to check what Spark actually supports instead of blindly using this in a signature.
-
def
commonCudfTypesLit(): TypeSig
Create a TypeSig that supports only literals of common primitive CUDF types.
-
val
commonCudfTypesWithNested: TypeSig
commonCudfTypes plus decimal, null and nested types.
-
val
comparable: TypeSig
All types that Spark supports for comparison operators (really everything but MAP according to https://spark.apache.org/docs/latest/api/sql/index.html#_12), e.g.
All types that Spark supports for comparison operators (really everything but MAP according to https://spark.apache.org/docs/latest/api/sql/index.html#_12), e.g. "<=>", "=", "==".
-
val
comparisonAstTypes: TypeSig
All AST types that work for comparisons
-
val
cpuAtomics: TypeSig
All values that correspond to Spark's AtomicType
-
val
cpuNumeric: TypeSig
All numeric types fp + integral + DECIMAL_128
- def decimal(maxPrecision: Int): TypeSig
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
val
fp: TypeSig
All floating point types
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def getDataType(expr: Expression): Option[DataType]
-
val
gpuAtomics: TypeSig
All values that correspond to Spark's AtomicType but supported by GPU
-
val
gpuNumeric: TypeSig
All numeric types fp + integral + DECIMAL_128
-
val
gpuNumericAndInterval: TypeSig
numeric + CALENDAR but only for GPU
-
val
gpuOrderable: TypeSig
All types that CUDF supports sorting/ordering on.
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
val
implicitCastsAstTypes: TypeSig
All types that can appear in an implicit cast AST expression
-
val
integral: TypeSig
All integer types
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
lit(dataTypes: TypeEnum.ValueSet): TypeSig
Create a TypeSig that only supports literals of certain given types.
-
def
lit(dataType: TypeEnum.Value): TypeSig
Create a TypeSig that only supports a literal of the given type.
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
val
none: TypeSig
No types supported at all
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
val
numericAndInterval: TypeSig
numeric + CALENDAR
-
val
orderable: TypeSig
All types that Spark supports sorting/ordering on (really everything but MAP)
-
def
psNote(dataType: TypeEnum.Value, note: String): TypeSig
Create a TypeSig that has partial support for the given type.
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
val
unionOfPandasUdfOut: TypeSig
Different types of Pandas UDF support different sets of output type.
Different types of Pandas UDF support different sets of output type. Please refer to https://github.com/apache/spark/blob/master/python/pyspark/sql/udf.py#L98 for more details.
It is impossible to specify the exact type signature for each Pandas UDF type in a single expression 'PythonUDF'.
So here comes the union of all the sets of supported type, to cover all the cases.
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()