public class CqlInputFormat extends AbstractColumnFamilyInputFormat<java.lang.Long,com.datastax.driver.core.Row>
CASSANDRA_HADOOP_MAX_KEY_SIZE, CASSANDRA_HADOOP_MAX_KEY_SIZE_DEFAULT, MAPRED_TASK_ID| Constructor and Description |
|---|
CqlInputFormat() |
| Modifier and Type | Method and Description |
|---|---|
org.apache.hadoop.mapreduce.RecordReader<java.lang.Long,com.datastax.driver.core.Row> |
createRecordReader(org.apache.hadoop.mapreduce.InputSplit arg0,
org.apache.hadoop.mapreduce.TaskAttemptContext arg1) |
org.apache.hadoop.mapred.RecordReader<java.lang.Long,com.datastax.driver.core.Row> |
getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf jobConf,
org.apache.hadoop.mapred.Reporter reporter) |
createAuthenticatedClient, getSplits, getSplits, validateConfigurationpublic org.apache.hadoop.mapred.RecordReader<java.lang.Long,com.datastax.driver.core.Row> getRecordReader(org.apache.hadoop.mapred.InputSplit split,
org.apache.hadoop.mapred.JobConf jobConf,
org.apache.hadoop.mapred.Reporter reporter)
throws java.io.IOException
java.io.IOExceptionpublic org.apache.hadoop.mapreduce.RecordReader<java.lang.Long,com.datastax.driver.core.Row> createRecordReader(org.apache.hadoop.mapreduce.InputSplit arg0,
org.apache.hadoop.mapreduce.TaskAttemptContext arg1)
throws java.io.IOException,
java.lang.InterruptedException
createRecordReader in class org.apache.hadoop.mapreduce.InputFormat<java.lang.Long,com.datastax.driver.core.Row>java.io.IOExceptionjava.lang.InterruptedExceptionCopyright © 2015 The Apache Software Foundation