Packages

case class GpuParquetScan(sparkSession: SparkSession, hadoopConf: Configuration, fileIndex: PartitioningAwareFileIndex, dataSchema: StructType, readDataSchema: StructType, readPartitionSchema: StructType, pushedFilters: Array[Filter], options: CaseInsensitiveStringMap, partitionFilters: Seq[Expression], dataFilters: Seq[Expression], rapidsConf: RapidsConf, queryUsesInputFile: Boolean = false) extends FileScan with GpuScan with Logging with Product with Serializable

Base GpuParquetScan used for common code across Spark versions. Gpu version of Spark's 'ParquetScan'.

sparkSession

SparkSession.

hadoopConf

Hadoop configuration.

fileIndex

File index of the relation.

dataSchema

Schema of the data.

readDataSchema

Schema to read.

readPartitionSchema

Partition schema.

pushedFilters

Filters on non-partition columns.

options

Parquet option settings.

partitionFilters

Filters on partition columns.

dataFilters

File source metadata filters.

rapidsConf

Rapids configuration.

queryUsesInputFile

This is a parameter to easily allow turning it off in GpuTransitionOverrides if InputFileName, InputFileBlockStart, or InputFileBlockLength are used

Linear Supertypes
Serializable, Serializable, Product, Equals, GpuScan, ScanWithMetrics, FileScan, Logging, SupportsMetadata, SupportsReportStatistics, Batch, Scan, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. GpuParquetScan
  2. Serializable
  3. Serializable
  4. Product
  5. Equals
  6. GpuScan
  7. ScanWithMetrics
  8. FileScan
  9. Logging
  10. SupportsMetadata
  11. SupportsReportStatistics
  12. Batch
  13. Scan
  14. AnyRef
  15. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new GpuParquetScan(sparkSession: SparkSession, hadoopConf: Configuration, fileIndex: PartitioningAwareFileIndex, dataSchema: StructType, readDataSchema: StructType, readPartitionSchema: StructType, pushedFilters: Array[Filter], options: CaseInsensitiveStringMap, partitionFilters: Seq[Expression], dataFilters: Seq[Expression], rapidsConf: RapidsConf, queryUsesInputFile: Boolean = false)

    sparkSession

    SparkSession.

    hadoopConf

    Hadoop configuration.

    fileIndex

    File index of the relation.

    dataSchema

    Schema of the data.

    readDataSchema

    Schema to read.

    readPartitionSchema

    Partition schema.

    pushedFilters

    Filters on non-partition columns.

    options

    Parquet option settings.

    partitionFilters

    Filters on partition columns.

    dataFilters

    File source metadata filters.

    rapidsConf

    Rapids configuration.

    queryUsesInputFile

    This is a parameter to easily allow turning it off in GpuTransitionOverrides if InputFileName, InputFileBlockStart, or InputFileBlockLength are used

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  6. def createReaderFactory(): PartitionReaderFactory
    Definition Classes
    GpuParquetScan → Batch
  7. val dataFilters: Seq[Expression]
    Definition Classes
    GpuParquetScan → FileScan
  8. val dataSchema: StructType
  9. def description(): String
    Definition Classes
    GpuParquetScan → FileScan → Scan
  10. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  11. def equals(obj: Any): Boolean
    Definition Classes
    GpuParquetScan → Equals → FileScan → AnyRef → Any
  12. def equivalentFilters(a: Array[Filter], b: Array[Filter]): Boolean
    Attributes
    protected
    Definition Classes
    FileScan
  13. def estimateStatistics(): Statistics
    Definition Classes
    FileScan → SupportsReportStatistics
  14. val fileIndex: PartitioningAwareFileIndex
    Definition Classes
    GpuParquetScan → FileScan
  15. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  16. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  17. def getFileUnSplittableReason(path: Path): String
    Definition Classes
    FileScan
  18. def getMetaData(): Map[String, String]
    Definition Classes
    FileScan → SupportsMetadata
  19. val hadoopConf: Configuration
  20. def hashCode(): Int
    Definition Classes
    GpuParquetScan → FileScan → AnyRef → Any
  21. def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  22. def initializeLogIfNecessary(isInterpreter: Boolean): Unit
    Attributes
    protected
    Definition Classes
    Logging
  23. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  24. def isSplitable(path: Path): Boolean
    Definition Classes
    GpuParquetScan → FileScan
  25. def isTraceEnabled(): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  26. def log: Logger
    Attributes
    protected
    Definition Classes
    Logging
  27. def logDebug(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  28. def logDebug(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  29. def logError(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  30. def logError(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  31. def logInfo(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  32. def logInfo(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  33. def logName: String
    Attributes
    protected
    Definition Classes
    Logging
  34. def logTrace(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  35. def logTrace(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  36. def logWarning(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  37. def logWarning(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  38. val maxMetadataValueLength: Int
    Definition Classes
    FileScan
  39. val metrics: Map[String, GpuMetric]
    Definition Classes
    ScanWithMetrics
  40. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  41. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  42. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  43. val options: CaseInsensitiveStringMap
  44. val partitionFilters: Seq[Expression]
    Definition Classes
    GpuParquetScan → FileScan
  45. def partitions: Seq[FilePartition]
    Attributes
    protected
    Definition Classes
    FileScan
  46. def planInputPartitions(): Array[InputPartition]
    Definition Classes
    FileScan → Batch
  47. val pushedFilters: Array[Filter]
  48. val queryUsesInputFile: Boolean
  49. val rapidsConf: RapidsConf
  50. val readDataSchema: StructType
    Definition Classes
    GpuParquetScan → FileScan
  51. val readPartitionSchema: StructType
    Definition Classes
    GpuParquetScan → FileScan
  52. def readSchema(): StructType
    Definition Classes
    FileScan → Scan
  53. def seqToString(seq: Seq[Any]): String
    Attributes
    protected
    Definition Classes
    FileScan
  54. val sparkSession: SparkSession
    Definition Classes
    GpuParquetScan → FileScan
  55. def supportedCustomMetrics(): Array[CustomMetric]
    Definition Classes
    Scan
  56. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  57. def toBatch(): Batch
    Definition Classes
    FileScan → Scan
  58. def toContinuousStream(arg0: String): ContinuousStream
    Definition Classes
    Scan
  59. def toMicroBatchStream(arg0: String): MicroBatchStream
    Definition Classes
    Scan
  60. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  61. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  62. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  63. def withFilters(partitionFilters: Seq[Expression], dataFilters: Seq[Expression]): FileScan
    Definition Classes
    GpuParquetScan → FileScan
  64. def withInputFile(): GpuScan

    Create a version of this scan with input file name support

    Create a version of this scan with input file name support

    Definition Classes
    GpuParquetScanGpuScan

Inherited from Serializable

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from GpuScan

Inherited from ScanWithMetrics

Inherited from FileScan

Inherited from Logging

Inherited from SupportsMetadata

Inherited from SupportsReportStatistics

Inherited from Batch

Inherited from Scan

Inherited from AnyRef

Inherited from Any

Ungrouped