@CapabilityDescription(value="Consumes messages from Apache Kafka specifically built against the Kafka 1.0 Consumer API. The complementary NiFi processor for sending messages is PublishKafkaRecord_1_0. Please note that, at this time, the Processor assumes that all records that are retrieved from a given partition have the same schema. If any of the Kafka messages are pulled but cannot be parsed or written with the configured Record Reader or Record Writer, the contents of the message will be written to a separate FlowFile, and that FlowFile will be transferred to the \'parse.failure\' relationship. Otherwise, each FlowFile is sent to the \'success\' relationship and may contain many individual messages within the single FlowFile. A \'record.count\' attribute is added to indicate how many messages are contained in the FlowFile. No two Kafka messages will be placed into the same FlowFile if they have different schemas, or if they have different values for a message header that is included by the <Headers to Add as Attributes> property.") @Tags(value={"Kafka","Get","Record","csv","avro","json","Ingest","Ingress","Topic","PubSub","Consume","1.0"}) @WritesAttribute(attribute="record.count",description="The number of records received") @WritesAttribute(attribute="mime.type",description="The MIME Type that is provided by the configured Record Writer") @WritesAttribute(attribute="kafka.partition",description="The partition of the topic the records are from") @WritesAttribute(attribute="kafka.timestamp",description="The timestamp of the message in the partition of the topic.") @WritesAttribute(attribute="kafka.topic",description="The topic records are from") @InputRequirement(value=INPUT_FORBIDDEN) @DynamicProperty(name="The name of a Kafka configuration property.", value="The value of a given Kafka configuration property.", description="These properties will be added on the Kafka configuration after loading any provided configuration properties. In the event a dynamic property represents a property that was already set, its value will be ignored and WARN message logged. For the list of available Kafka properties please refer to: http://kafka.apache.org/documentation.html#configuration. ", expressionLanguageScope=VARIABLE_REGISTRY) @SeeAlso(value={ConsumeKafka_1_0.class,PublishKafka_1_0.class,PublishKafkaRecord_1_0.class}) public class ConsumeKafkaRecord_1_0 extends AbstractProcessor
| Constructor and Description |
|---|
ConsumeKafkaRecord_1_0() |
| Modifier and Type | Method and Description |
|---|---|
void |
close() |
protected ConsumerPool |
createConsumerPool(ProcessContext context,
ComponentLog log) |
protected Collection<ValidationResult> |
customValidate(ValidationContext validationContext) |
private ConsumerPool |
getConsumerPool(ProcessContext context) |
Set<Relationship> |
getRelationships() |
protected PropertyDescriptor |
getSupportedDynamicPropertyDescriptor(String propertyDescriptorName) |
protected List<PropertyDescriptor> |
getSupportedPropertyDescriptors() |
void |
interruptActiveThreads() |
void |
onTrigger(ProcessContext context,
ProcessSession session) |
onTriggergetControllerServiceLookup, getIdentifier, getLogger, getNodeTypeProvider, init, initialize, isConfigurationRestored, isScheduled, toString, updateConfiguredRestoredTrue, updateScheduledFalse, updateScheduledTrueequals, getPropertyDescriptor, getPropertyDescriptors, hashCode, onPropertyModified, validateclone, finalize, getClass, notify, notifyAll, wait, wait, waitisStatefulgetPropertyDescriptor, getPropertyDescriptors, onPropertyModified, validatestatic final AllowableValue OFFSET_EARLIEST
static final AllowableValue OFFSET_LATEST
static final AllowableValue OFFSET_NONE
static final AllowableValue TOPIC_NAME
static final AllowableValue TOPIC_PATTERN
static final PropertyDescriptor TOPICS
static final PropertyDescriptor TOPIC_TYPE
static final PropertyDescriptor RECORD_READER
static final PropertyDescriptor RECORD_WRITER
static final PropertyDescriptor GROUP_ID
static final PropertyDescriptor AUTO_OFFSET_RESET
static final PropertyDescriptor MAX_POLL_RECORDS
static final PropertyDescriptor MAX_UNCOMMITTED_TIME
static final PropertyDescriptor HONOR_TRANSACTIONS
static final PropertyDescriptor MESSAGE_HEADER_ENCODING
static final PropertyDescriptor HEADER_NAME_REGEX
static final Relationship REL_SUCCESS
static final Relationship REL_PARSE_FAILURE
static final List<PropertyDescriptor> DESCRIPTORS
static final Set<Relationship> RELATIONSHIPS
private volatile ConsumerPool consumerPool
private final Set<ConsumerLease> activeLeases
public Set<Relationship> getRelationships()
getRelationships in interface ProcessorgetRelationships in class AbstractSessionFactoryProcessorprotected List<PropertyDescriptor> getSupportedPropertyDescriptors()
getSupportedPropertyDescriptors in class AbstractConfigurableComponent@OnStopped public void close()
protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(String propertyDescriptorName)
getSupportedDynamicPropertyDescriptor in class AbstractConfigurableComponentprotected Collection<ValidationResult> customValidate(ValidationContext validationContext)
customValidate in class AbstractConfigurableComponentprivate ConsumerPool getConsumerPool(ProcessContext context)
protected ConsumerPool createConsumerPool(ProcessContext context, ComponentLog log)
@OnUnscheduled public void interruptActiveThreads()
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException
onTrigger in class AbstractProcessorProcessExceptionCopyright © 2022 Apache NiFi Project. All rights reserved.