You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@camel.apache.org by ac...@apache.org on 2020/10/29 18:57:01 UTC

[camel] branch master updated (01b1e25 -> 96d3c42)

This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git.


    from 01b1e25  Fix Zendesk integration tests
     new 0aa75ee  CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - keySerializerClass
     new abdd514  CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - keySerializerClass Test
     new ebf1b0a   CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - serializerClass
     new 5acb8e0  CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - serializerClass test
     new 7bc1fc1  CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - serializerClass test
     new 56489f4  CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - Regen
     new dab36bd  CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderSerializer
     new 557f5b3  CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderSerializer tests
     new 79c2c87  CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderSerializer
     new 55b0677  CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderDeserializer
     new 9f124e7  CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderDeserializer tests
     new 44d75a8  CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderDeserializer tests
     new 96d3c42  CAMEL-15770 - Regen catalog

The 13 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../org/apache/camel/catalog/components/kafka.json | 16 ++---
 .../apache/camel/catalog/docs/kafka-component.adoc | 16 ++---
 .../component/kafka/KafkaComponentConfigurer.java  | 40 +++++------
 .../component/kafka/KafkaEndpointConfigurer.java   | 40 +++++------
 .../component/kafka/KafkaEndpointUriFactory.java   |  8 +--
 .../org/apache/camel/component/kafka/kafka.json    | 16 ++---
 .../camel-kafka/src/main/docs/kafka-component.adoc | 16 ++---
 .../camel/component/kafka/KafkaConfiguration.java  | 46 ++++++-------
 .../camel/component/kafka/KafkaConsumer.java       |  2 +-
 .../camel/component/kafka/KafkaProducer.java       | 10 +--
 .../camel/component/kafka/KafkaComponentTest.java  |  4 +-
 .../component/kafka/KafkaConsumerFullTest.java     |  4 +-
 .../component/kafka/KafkaProducerFullTest.java     |  8 +--
 .../dsl/KafkaComponentBuilderFactory.java          | 54 +++++++--------
 .../endpoint/dsl/KafkaEndpointBuilderFactory.java  | 79 +++++++++++-----------
 .../modules/ROOT/pages/kafka-component.adoc        | 16 ++---
 16 files changed, 187 insertions(+), 188 deletions(-)


[camel] 06/13: CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - Regen

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 56489f4dbea1e32695867d0b41ce7561471a09a1
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Thu Oct 29 18:57:33 2020 +0100

    CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - Regen
---
 .../component/kafka/KafkaComponentConfigurer.java  | 527 +++++++++++++++++++++
 .../component/kafka/KafkaEndpointConfigurer.java   | 521 ++++++++++++++++++++
 .../component/kafka/KafkaEndpointUriFactory.java   | 163 +++++++
 .../services/org/apache/camel/component.properties |   7 +
 .../services/org/apache/camel/component/kafka      |   2 +
 .../org/apache/camel/configurer/kafka-component    |   2 +
 .../org/apache/camel/configurer/kafka-endpoint     |   2 +
 .../org/apache/camel/urifactory/kafka-endpoint     |   2 +
 .../org/apache/camel/component/kafka/kafka.json    | 223 +++++++++
 9 files changed, 1449 insertions(+)

diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java
new file mode 100644
index 0000000..6f9d5f6
--- /dev/null
+++ b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java
@@ -0,0 +1,527 @@
+/* Generated by camel build tools - do NOT edit this file! */
+package org.apache.camel.component.kafka;
+
+import java.util.Map;
+
+import org.apache.camel.CamelContext;
+import org.apache.camel.spi.GeneratedPropertyConfigurer;
+import org.apache.camel.spi.PropertyConfigurerGetter;
+import org.apache.camel.util.CaseInsensitiveMap;
+import org.apache.camel.support.component.PropertyConfigurerSupport;
+
+/**
+ * Generated by camel build tools - do NOT edit this file!
+ */
+@SuppressWarnings("unchecked")
+public class KafkaComponentConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
+
+    private static final Map<String, Object> ALL_OPTIONS;
+    static {
+        Map<String, Object> map = new CaseInsensitiveMap();
+        map.put("additionalProperties", java.util.Map.class);
+        map.put("brokers", java.lang.String.class);
+        map.put("clientId", java.lang.String.class);
+        map.put("configuration", org.apache.camel.component.kafka.KafkaConfiguration.class);
+        map.put("headerFilterStrategy", org.apache.camel.spi.HeaderFilterStrategy.class);
+        map.put("reconnectBackoffMaxMs", java.lang.Integer.class);
+        map.put("shutdownTimeout", int.class);
+        map.put("allowManualCommit", boolean.class);
+        map.put("autoCommitEnable", java.lang.Boolean.class);
+        map.put("autoCommitIntervalMs", java.lang.Integer.class);
+        map.put("autoCommitOnStop", java.lang.String.class);
+        map.put("autoOffsetReset", java.lang.String.class);
+        map.put("breakOnFirstError", boolean.class);
+        map.put("bridgeErrorHandler", boolean.class);
+        map.put("checkCrcs", java.lang.Boolean.class);
+        map.put("consumerRequestTimeoutMs", java.lang.Integer.class);
+        map.put("consumersCount", int.class);
+        map.put("consumerStreams", int.class);
+        map.put("fetchMaxBytes", java.lang.Integer.class);
+        map.put("fetchMinBytes", java.lang.Integer.class);
+        map.put("fetchWaitMaxMs", java.lang.Integer.class);
+        map.put("groupId", java.lang.String.class);
+        map.put("heartbeatIntervalMs", java.lang.Integer.class);
+        map.put("kafkaHeaderDeserializer", org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class);
+        map.put("keyDeserializer", java.lang.String.class);
+        map.put("maxPartitionFetchBytes", java.lang.Integer.class);
+        map.put("maxPollIntervalMs", java.lang.Long.class);
+        map.put("maxPollRecords", java.lang.Integer.class);
+        map.put("offsetRepository", org.apache.camel.spi.StateRepository.class);
+        map.put("partitionAssignor", java.lang.String.class);
+        map.put("pollTimeoutMs", java.lang.Long.class);
+        map.put("seekTo", java.lang.String.class);
+        map.put("sessionTimeoutMs", java.lang.Integer.class);
+        map.put("specificAvroReader", boolean.class);
+        map.put("topicIsPattern", boolean.class);
+        map.put("valueDeserializer", java.lang.String.class);
+        map.put("kafkaManualCommitFactory", org.apache.camel.component.kafka.KafkaManualCommitFactory.class);
+        map.put("bufferMemorySize", java.lang.Integer.class);
+        map.put("compressionCodec", java.lang.String.class);
+        map.put("connectionMaxIdleMs", java.lang.Integer.class);
+        map.put("enableIdempotence", boolean.class);
+        map.put("kafkaHeaderSerializer", org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class);
+        map.put("key", java.lang.String.class);
+        map.put("keySerializer", java.lang.String.class);
+        map.put("lazyStartProducer", boolean.class);
+        map.put("lingerMs", java.lang.Integer.class);
+        map.put("maxBlockMs", java.lang.Integer.class);
+        map.put("maxInFlightRequest", java.lang.Integer.class);
+        map.put("maxRequestSize", java.lang.Integer.class);
+        map.put("metadataMaxAgeMs", java.lang.Integer.class);
+        map.put("metricReporters", java.lang.String.class);
+        map.put("metricsSampleWindowMs", java.lang.Integer.class);
+        map.put("noOfMetricsSample", java.lang.Integer.class);
+        map.put("partitioner", java.lang.String.class);
+        map.put("partitionKey", java.lang.Integer.class);
+        map.put("producerBatchSize", java.lang.Integer.class);
+        map.put("queueBufferingMaxMessages", java.lang.Integer.class);
+        map.put("receiveBufferBytes", java.lang.Integer.class);
+        map.put("reconnectBackoffMs", java.lang.Integer.class);
+        map.put("recordMetadata", boolean.class);
+        map.put("requestRequiredAcks", java.lang.String.class);
+        map.put("requestTimeoutMs", java.lang.Integer.class);
+        map.put("retries", java.lang.Integer.class);
+        map.put("retryBackoffMs", java.lang.Integer.class);
+        map.put("sendBufferBytes", java.lang.Integer.class);
+        map.put("valueSerializer", java.lang.String.class);
+        map.put("workerPool", java.util.concurrent.ExecutorService.class);
+        map.put("workerPoolCoreSize", java.lang.Integer.class);
+        map.put("workerPoolMaxSize", java.lang.Integer.class);
+        map.put("basicPropertyBinding", boolean.class);
+        map.put("schemaRegistryURL", java.lang.String.class);
+        map.put("interceptorClasses", java.lang.String.class);
+        map.put("kerberosBeforeReloginMinTime", java.lang.Integer.class);
+        map.put("kerberosInitCmd", java.lang.String.class);
+        map.put("kerberosPrincipalToLocalRules", java.lang.String.class);
+        map.put("kerberosRenewJitter", java.lang.Double.class);
+        map.put("kerberosRenewWindowFactor", java.lang.Double.class);
+        map.put("saslJaasConfig", java.lang.String.class);
+        map.put("saslKerberosServiceName", java.lang.String.class);
+        map.put("saslMechanism", java.lang.String.class);
+        map.put("securityProtocol", java.lang.String.class);
+        map.put("sslCipherSuites", java.lang.String.class);
+        map.put("sslContextParameters", org.apache.camel.support.jsse.SSLContextParameters.class);
+        map.put("sslEnabledProtocols", java.lang.String.class);
+        map.put("sslEndpointAlgorithm", java.lang.String.class);
+        map.put("sslKeymanagerAlgorithm", java.lang.String.class);
+        map.put("sslKeyPassword", java.lang.String.class);
+        map.put("sslKeystoreLocation", java.lang.String.class);
+        map.put("sslKeystorePassword", java.lang.String.class);
+        map.put("sslKeystoreType", java.lang.String.class);
+        map.put("sslProtocol", java.lang.String.class);
+        map.put("sslProvider", java.lang.String.class);
+        map.put("sslTrustmanagerAlgorithm", java.lang.String.class);
+        map.put("sslTruststoreLocation", java.lang.String.class);
+        map.put("sslTruststorePassword", java.lang.String.class);
+        map.put("sslTruststoreType", java.lang.String.class);
+        map.put("useGlobalSslContextParameters", boolean.class);
+        ALL_OPTIONS = map;
+    }
+
+    private org.apache.camel.component.kafka.KafkaConfiguration getOrCreateConfiguration(KafkaComponent target) {
+        if (target.getConfiguration() == null) {
+            target.setConfiguration(new org.apache.camel.component.kafka.KafkaConfiguration());
+        }
+        return target.getConfiguration();
+    }
+
+    @Override
+    public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
+        KafkaComponent target = (KafkaComponent) obj;
+        switch (ignoreCase ? name.toLowerCase() : name) {
+        case "additionalproperties":
+        case "additionalProperties": getOrCreateConfiguration(target).setAdditionalProperties(property(camelContext, java.util.Map.class, value)); return true;
+        case "allowmanualcommit":
+        case "allowManualCommit": getOrCreateConfiguration(target).setAllowManualCommit(property(camelContext, boolean.class, value)); return true;
+        case "autocommitenable":
+        case "autoCommitEnable": getOrCreateConfiguration(target).setAutoCommitEnable(property(camelContext, java.lang.Boolean.class, value)); return true;
+        case "autocommitintervalms":
+        case "autoCommitIntervalMs": getOrCreateConfiguration(target).setAutoCommitIntervalMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "autocommitonstop":
+        case "autoCommitOnStop": getOrCreateConfiguration(target).setAutoCommitOnStop(property(camelContext, java.lang.String.class, value)); return true;
+        case "autooffsetreset":
+        case "autoOffsetReset": getOrCreateConfiguration(target).setAutoOffsetReset(property(camelContext, java.lang.String.class, value)); return true;
+        case "basicpropertybinding":
+        case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true;
+        case "breakonfirsterror":
+        case "breakOnFirstError": getOrCreateConfiguration(target).setBreakOnFirstError(property(camelContext, boolean.class, value)); return true;
+        case "bridgeerrorhandler":
+        case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
+        case "brokers": getOrCreateConfiguration(target).setBrokers(property(camelContext, java.lang.String.class, value)); return true;
+        case "buffermemorysize":
+        case "bufferMemorySize": getOrCreateConfiguration(target).setBufferMemorySize(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "checkcrcs":
+        case "checkCrcs": getOrCreateConfiguration(target).setCheckCrcs(property(camelContext, java.lang.Boolean.class, value)); return true;
+        case "clientid":
+        case "clientId": getOrCreateConfiguration(target).setClientId(property(camelContext, java.lang.String.class, value)); return true;
+        case "compressioncodec":
+        case "compressionCodec": getOrCreateConfiguration(target).setCompressionCodec(property(camelContext, java.lang.String.class, value)); return true;
+        case "configuration": target.setConfiguration(property(camelContext, org.apache.camel.component.kafka.KafkaConfiguration.class, value)); return true;
+        case "connectionmaxidlems":
+        case "connectionMaxIdleMs": getOrCreateConfiguration(target).setConnectionMaxIdleMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "consumerrequesttimeoutms":
+        case "consumerRequestTimeoutMs": getOrCreateConfiguration(target).setConsumerRequestTimeoutMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "consumerstreams":
+        case "consumerStreams": getOrCreateConfiguration(target).setConsumerStreams(property(camelContext, int.class, value)); return true;
+        case "consumerscount":
+        case "consumersCount": getOrCreateConfiguration(target).setConsumersCount(property(camelContext, int.class, value)); return true;
+        case "enableidempotence":
+        case "enableIdempotence": getOrCreateConfiguration(target).setEnableIdempotence(property(camelContext, boolean.class, value)); return true;
+        case "fetchmaxbytes":
+        case "fetchMaxBytes": getOrCreateConfiguration(target).setFetchMaxBytes(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "fetchminbytes":
+        case "fetchMinBytes": getOrCreateConfiguration(target).setFetchMinBytes(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "fetchwaitmaxms":
+        case "fetchWaitMaxMs": getOrCreateConfiguration(target).setFetchWaitMaxMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "groupid":
+        case "groupId": getOrCreateConfiguration(target).setGroupId(property(camelContext, java.lang.String.class, value)); return true;
+        case "headerfilterstrategy":
+        case "headerFilterStrategy": getOrCreateConfiguration(target).setHeaderFilterStrategy(property(camelContext, org.apache.camel.spi.HeaderFilterStrategy.class, value)); return true;
+        case "heartbeatintervalms":
+        case "heartbeatIntervalMs": getOrCreateConfiguration(target).setHeartbeatIntervalMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "interceptorclasses":
+        case "interceptorClasses": getOrCreateConfiguration(target).setInterceptorClasses(property(camelContext, java.lang.String.class, value)); return true;
+        case "kafkaheaderdeserializer":
+        case "kafkaHeaderDeserializer": getOrCreateConfiguration(target).setKafkaHeaderDeserializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class, value)); return true;
+        case "kafkaheaderserializer":
+        case "kafkaHeaderSerializer": getOrCreateConfiguration(target).setKafkaHeaderSerializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class, value)); return true;
+        case "kafkamanualcommitfactory":
+        case "kafkaManualCommitFactory": target.setKafkaManualCommitFactory(property(camelContext, org.apache.camel.component.kafka.KafkaManualCommitFactory.class, value)); return true;
+        case "kerberosbeforereloginmintime":
+        case "kerberosBeforeReloginMinTime": getOrCreateConfiguration(target).setKerberosBeforeReloginMinTime(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "kerberosinitcmd":
+        case "kerberosInitCmd": getOrCreateConfiguration(target).setKerberosInitCmd(property(camelContext, java.lang.String.class, value)); return true;
+        case "kerberosprincipaltolocalrules":
+        case "kerberosPrincipalToLocalRules": getOrCreateConfiguration(target).setKerberosPrincipalToLocalRules(property(camelContext, java.lang.String.class, value)); return true;
+        case "kerberosrenewjitter":
+        case "kerberosRenewJitter": getOrCreateConfiguration(target).setKerberosRenewJitter(property(camelContext, java.lang.Double.class, value)); return true;
+        case "kerberosrenewwindowfactor":
+        case "kerberosRenewWindowFactor": getOrCreateConfiguration(target).setKerberosRenewWindowFactor(property(camelContext, java.lang.Double.class, value)); return true;
+        case "key": getOrCreateConfiguration(target).setKey(property(camelContext, java.lang.String.class, value)); return true;
+        case "keydeserializer":
+        case "keyDeserializer": getOrCreateConfiguration(target).setKeyDeserializer(property(camelContext, java.lang.String.class, value)); return true;
+        case "keyserializer":
+        case "keySerializer": getOrCreateConfiguration(target).setKeySerializer(property(camelContext, java.lang.String.class, value)); return true;
+        case "lazystartproducer":
+        case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
+        case "lingerms":
+        case "lingerMs": getOrCreateConfiguration(target).setLingerMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "maxblockms":
+        case "maxBlockMs": getOrCreateConfiguration(target).setMaxBlockMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "maxinflightrequest":
+        case "maxInFlightRequest": getOrCreateConfiguration(target).setMaxInFlightRequest(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "maxpartitionfetchbytes":
+        case "maxPartitionFetchBytes": getOrCreateConfiguration(target).setMaxPartitionFetchBytes(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "maxpollintervalms":
+        case "maxPollIntervalMs": getOrCreateConfiguration(target).setMaxPollIntervalMs(property(camelContext, java.lang.Long.class, value)); return true;
+        case "maxpollrecords":
+        case "maxPollRecords": getOrCreateConfiguration(target).setMaxPollRecords(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "maxrequestsize":
+        case "maxRequestSize": getOrCreateConfiguration(target).setMaxRequestSize(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "metadatamaxagems":
+        case "metadataMaxAgeMs": getOrCreateConfiguration(target).setMetadataMaxAgeMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "metricreporters":
+        case "metricReporters": getOrCreateConfiguration(target).setMetricReporters(property(camelContext, java.lang.String.class, value)); return true;
+        case "metricssamplewindowms":
+        case "metricsSampleWindowMs": getOrCreateConfiguration(target).setMetricsSampleWindowMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "noofmetricssample":
+        case "noOfMetricsSample": getOrCreateConfiguration(target).setNoOfMetricsSample(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "offsetrepository":
+        case "offsetRepository": getOrCreateConfiguration(target).setOffsetRepository(property(camelContext, org.apache.camel.spi.StateRepository.class, value)); return true;
+        case "partitionassignor":
+        case "partitionAssignor": getOrCreateConfiguration(target).setPartitionAssignor(property(camelContext, java.lang.String.class, value)); return true;
+        case "partitionkey":
+        case "partitionKey": getOrCreateConfiguration(target).setPartitionKey(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "partitioner": getOrCreateConfiguration(target).setPartitioner(property(camelContext, java.lang.String.class, value)); return true;
+        case "polltimeoutms":
+        case "pollTimeoutMs": getOrCreateConfiguration(target).setPollTimeoutMs(property(camelContext, java.lang.Long.class, value)); return true;
+        case "producerbatchsize":
+        case "producerBatchSize": getOrCreateConfiguration(target).setProducerBatchSize(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "queuebufferingmaxmessages":
+        case "queueBufferingMaxMessages": getOrCreateConfiguration(target).setQueueBufferingMaxMessages(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "receivebufferbytes":
+        case "receiveBufferBytes": getOrCreateConfiguration(target).setReceiveBufferBytes(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "reconnectbackoffmaxms":
+        case "reconnectBackoffMaxMs": getOrCreateConfiguration(target).setReconnectBackoffMaxMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "reconnectbackoffms":
+        case "reconnectBackoffMs": getOrCreateConfiguration(target).setReconnectBackoffMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "recordmetadata":
+        case "recordMetadata": getOrCreateConfiguration(target).setRecordMetadata(property(camelContext, boolean.class, value)); return true;
+        case "requestrequiredacks":
+        case "requestRequiredAcks": getOrCreateConfiguration(target).setRequestRequiredAcks(property(camelContext, java.lang.String.class, value)); return true;
+        case "requesttimeoutms":
+        case "requestTimeoutMs": getOrCreateConfiguration(target).setRequestTimeoutMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "retries": getOrCreateConfiguration(target).setRetries(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "retrybackoffms":
+        case "retryBackoffMs": getOrCreateConfiguration(target).setRetryBackoffMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "sasljaasconfig":
+        case "saslJaasConfig": getOrCreateConfiguration(target).setSaslJaasConfig(property(camelContext, java.lang.String.class, value)); return true;
+        case "saslkerberosservicename":
+        case "saslKerberosServiceName": getOrCreateConfiguration(target).setSaslKerberosServiceName(property(camelContext, java.lang.String.class, value)); return true;
+        case "saslmechanism":
+        case "saslMechanism": getOrCreateConfiguration(target).setSaslMechanism(property(camelContext, java.lang.String.class, value)); return true;
+        case "schemaregistryurl":
+        case "schemaRegistryURL": getOrCreateConfiguration(target).setSchemaRegistryURL(property(camelContext, java.lang.String.class, value)); return true;
+        case "securityprotocol":
+        case "securityProtocol": getOrCreateConfiguration(target).setSecurityProtocol(property(camelContext, java.lang.String.class, value)); return true;
+        case "seekto":
+        case "seekTo": getOrCreateConfiguration(target).setSeekTo(property(camelContext, java.lang.String.class, value)); return true;
+        case "sendbufferbytes":
+        case "sendBufferBytes": getOrCreateConfiguration(target).setSendBufferBytes(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "sessiontimeoutms":
+        case "sessionTimeoutMs": getOrCreateConfiguration(target).setSessionTimeoutMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "shutdowntimeout":
+        case "shutdownTimeout": getOrCreateConfiguration(target).setShutdownTimeout(property(camelContext, int.class, value)); return true;
+        case "specificavroreader":
+        case "specificAvroReader": getOrCreateConfiguration(target).setSpecificAvroReader(property(camelContext, boolean.class, value)); return true;
+        case "sslciphersuites":
+        case "sslCipherSuites": getOrCreateConfiguration(target).setSslCipherSuites(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslcontextparameters":
+        case "sslContextParameters": getOrCreateConfiguration(target).setSslContextParameters(property(camelContext, org.apache.camel.support.jsse.SSLContextParameters.class, value)); return true;
+        case "sslenabledprotocols":
+        case "sslEnabledProtocols": getOrCreateConfiguration(target).setSslEnabledProtocols(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslendpointalgorithm":
+        case "sslEndpointAlgorithm": getOrCreateConfiguration(target).setSslEndpointAlgorithm(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslkeypassword":
+        case "sslKeyPassword": getOrCreateConfiguration(target).setSslKeyPassword(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslkeymanageralgorithm":
+        case "sslKeymanagerAlgorithm": getOrCreateConfiguration(target).setSslKeymanagerAlgorithm(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslkeystorelocation":
+        case "sslKeystoreLocation": getOrCreateConfiguration(target).setSslKeystoreLocation(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslkeystorepassword":
+        case "sslKeystorePassword": getOrCreateConfiguration(target).setSslKeystorePassword(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslkeystoretype":
+        case "sslKeystoreType": getOrCreateConfiguration(target).setSslKeystoreType(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslprotocol":
+        case "sslProtocol": getOrCreateConfiguration(target).setSslProtocol(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslprovider":
+        case "sslProvider": getOrCreateConfiguration(target).setSslProvider(property(camelContext, java.lang.String.class, value)); return true;
+        case "ssltrustmanageralgorithm":
+        case "sslTrustmanagerAlgorithm": getOrCreateConfiguration(target).setSslTrustmanagerAlgorithm(property(camelContext, java.lang.String.class, value)); return true;
+        case "ssltruststorelocation":
+        case "sslTruststoreLocation": getOrCreateConfiguration(target).setSslTruststoreLocation(property(camelContext, java.lang.String.class, value)); return true;
+        case "ssltruststorepassword":
+        case "sslTruststorePassword": getOrCreateConfiguration(target).setSslTruststorePassword(property(camelContext, java.lang.String.class, value)); return true;
+        case "ssltruststoretype":
+        case "sslTruststoreType": getOrCreateConfiguration(target).setSslTruststoreType(property(camelContext, java.lang.String.class, value)); return true;
+        case "topicispattern":
+        case "topicIsPattern": getOrCreateConfiguration(target).setTopicIsPattern(property(camelContext, boolean.class, value)); return true;
+        case "useglobalsslcontextparameters":
+        case "useGlobalSslContextParameters": target.setUseGlobalSslContextParameters(property(camelContext, boolean.class, value)); return true;
+        case "valuedeserializer":
+        case "valueDeserializer": getOrCreateConfiguration(target).setValueDeserializer(property(camelContext, java.lang.String.class, value)); return true;
+        case "valueserializer":
+        case "valueSerializer": getOrCreateConfiguration(target).setValueSerializer(property(camelContext, java.lang.String.class, value)); return true;
+        case "workerpool":
+        case "workerPool": getOrCreateConfiguration(target).setWorkerPool(property(camelContext, java.util.concurrent.ExecutorService.class, value)); return true;
+        case "workerpoolcoresize":
+        case "workerPoolCoreSize": getOrCreateConfiguration(target).setWorkerPoolCoreSize(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "workerpoolmaxsize":
+        case "workerPoolMaxSize": getOrCreateConfiguration(target).setWorkerPoolMaxSize(property(camelContext, java.lang.Integer.class, value)); return true;
+        default: return false;
+        }
+    }
+
+    @Override
+    public Map<String, Object> getAllOptions(Object target) {
+        return ALL_OPTIONS;
+    }
+
+    @Override
+    public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
+        KafkaComponent target = (KafkaComponent) obj;
+        switch (ignoreCase ? name.toLowerCase() : name) {
+        case "additionalproperties":
+        case "additionalProperties": return getOrCreateConfiguration(target).getAdditionalProperties();
+        case "allowmanualcommit":
+        case "allowManualCommit": return getOrCreateConfiguration(target).isAllowManualCommit();
+        case "autocommitenable":
+        case "autoCommitEnable": return getOrCreateConfiguration(target).getAutoCommitEnable();
+        case "autocommitintervalms":
+        case "autoCommitIntervalMs": return getOrCreateConfiguration(target).getAutoCommitIntervalMs();
+        case "autocommitonstop":
+        case "autoCommitOnStop": return getOrCreateConfiguration(target).getAutoCommitOnStop();
+        case "autooffsetreset":
+        case "autoOffsetReset": return getOrCreateConfiguration(target).getAutoOffsetReset();
+        case "basicpropertybinding":
+        case "basicPropertyBinding": return target.isBasicPropertyBinding();
+        case "breakonfirsterror":
+        case "breakOnFirstError": return getOrCreateConfiguration(target).isBreakOnFirstError();
+        case "bridgeerrorhandler":
+        case "bridgeErrorHandler": return target.isBridgeErrorHandler();
+        case "brokers": return getOrCreateConfiguration(target).getBrokers();
+        case "buffermemorysize":
+        case "bufferMemorySize": return getOrCreateConfiguration(target).getBufferMemorySize();
+        case "checkcrcs":
+        case "checkCrcs": return getOrCreateConfiguration(target).getCheckCrcs();
+        case "clientid":
+        case "clientId": return getOrCreateConfiguration(target).getClientId();
+        case "compressioncodec":
+        case "compressionCodec": return getOrCreateConfiguration(target).getCompressionCodec();
+        case "configuration": return target.getConfiguration();
+        case "connectionmaxidlems":
+        case "connectionMaxIdleMs": return getOrCreateConfiguration(target).getConnectionMaxIdleMs();
+        case "consumerrequesttimeoutms":
+        case "consumerRequestTimeoutMs": return getOrCreateConfiguration(target).getConsumerRequestTimeoutMs();
+        case "consumerstreams":
+        case "consumerStreams": return getOrCreateConfiguration(target).getConsumerStreams();
+        case "consumerscount":
+        case "consumersCount": return getOrCreateConfiguration(target).getConsumersCount();
+        case "enableidempotence":
+        case "enableIdempotence": return getOrCreateConfiguration(target).isEnableIdempotence();
+        case "fetchmaxbytes":
+        case "fetchMaxBytes": return getOrCreateConfiguration(target).getFetchMaxBytes();
+        case "fetchminbytes":
+        case "fetchMinBytes": return getOrCreateConfiguration(target).getFetchMinBytes();
+        case "fetchwaitmaxms":
+        case "fetchWaitMaxMs": return getOrCreateConfiguration(target).getFetchWaitMaxMs();
+        case "groupid":
+        case "groupId": return getOrCreateConfiguration(target).getGroupId();
+        case "headerfilterstrategy":
+        case "headerFilterStrategy": return getOrCreateConfiguration(target).getHeaderFilterStrategy();
+        case "heartbeatintervalms":
+        case "heartbeatIntervalMs": return getOrCreateConfiguration(target).getHeartbeatIntervalMs();
+        case "interceptorclasses":
+        case "interceptorClasses": return getOrCreateConfiguration(target).getInterceptorClasses();
+        case "kafkaheaderdeserializer":
+        case "kafkaHeaderDeserializer": return getOrCreateConfiguration(target).getKafkaHeaderDeserializer();
+        case "kafkaheaderserializer":
+        case "kafkaHeaderSerializer": return getOrCreateConfiguration(target).getKafkaHeaderSerializer();
+        case "kafkamanualcommitfactory":
+        case "kafkaManualCommitFactory": return target.getKafkaManualCommitFactory();
+        case "kerberosbeforereloginmintime":
+        case "kerberosBeforeReloginMinTime": return getOrCreateConfiguration(target).getKerberosBeforeReloginMinTime();
+        case "kerberosinitcmd":
+        case "kerberosInitCmd": return getOrCreateConfiguration(target).getKerberosInitCmd();
+        case "kerberosprincipaltolocalrules":
+        case "kerberosPrincipalToLocalRules": return getOrCreateConfiguration(target).getKerberosPrincipalToLocalRules();
+        case "kerberosrenewjitter":
+        case "kerberosRenewJitter": return getOrCreateConfiguration(target).getKerberosRenewJitter();
+        case "kerberosrenewwindowfactor":
+        case "kerberosRenewWindowFactor": return getOrCreateConfiguration(target).getKerberosRenewWindowFactor();
+        case "key": return getOrCreateConfiguration(target).getKey();
+        case "keydeserializer":
+        case "keyDeserializer": return getOrCreateConfiguration(target).getKeyDeserializer();
+        case "keyserializer":
+        case "keySerializer": return getOrCreateConfiguration(target).getKeySerializer();
+        case "lazystartproducer":
+        case "lazyStartProducer": return target.isLazyStartProducer();
+        case "lingerms":
+        case "lingerMs": return getOrCreateConfiguration(target).getLingerMs();
+        case "maxblockms":
+        case "maxBlockMs": return getOrCreateConfiguration(target).getMaxBlockMs();
+        case "maxinflightrequest":
+        case "maxInFlightRequest": return getOrCreateConfiguration(target).getMaxInFlightRequest();
+        case "maxpartitionfetchbytes":
+        case "maxPartitionFetchBytes": return getOrCreateConfiguration(target).getMaxPartitionFetchBytes();
+        case "maxpollintervalms":
+        case "maxPollIntervalMs": return getOrCreateConfiguration(target).getMaxPollIntervalMs();
+        case "maxpollrecords":
+        case "maxPollRecords": return getOrCreateConfiguration(target).getMaxPollRecords();
+        case "maxrequestsize":
+        case "maxRequestSize": return getOrCreateConfiguration(target).getMaxRequestSize();
+        case "metadatamaxagems":
+        case "metadataMaxAgeMs": return getOrCreateConfiguration(target).getMetadataMaxAgeMs();
+        case "metricreporters":
+        case "metricReporters": return getOrCreateConfiguration(target).getMetricReporters();
+        case "metricssamplewindowms":
+        case "metricsSampleWindowMs": return getOrCreateConfiguration(target).getMetricsSampleWindowMs();
+        case "noofmetricssample":
+        case "noOfMetricsSample": return getOrCreateConfiguration(target).getNoOfMetricsSample();
+        case "offsetrepository":
+        case "offsetRepository": return getOrCreateConfiguration(target).getOffsetRepository();
+        case "partitionassignor":
+        case "partitionAssignor": return getOrCreateConfiguration(target).getPartitionAssignor();
+        case "partitionkey":
+        case "partitionKey": return getOrCreateConfiguration(target).getPartitionKey();
+        case "partitioner": return getOrCreateConfiguration(target).getPartitioner();
+        case "polltimeoutms":
+        case "pollTimeoutMs": return getOrCreateConfiguration(target).getPollTimeoutMs();
+        case "producerbatchsize":
+        case "producerBatchSize": return getOrCreateConfiguration(target).getProducerBatchSize();
+        case "queuebufferingmaxmessages":
+        case "queueBufferingMaxMessages": return getOrCreateConfiguration(target).getQueueBufferingMaxMessages();
+        case "receivebufferbytes":
+        case "receiveBufferBytes": return getOrCreateConfiguration(target).getReceiveBufferBytes();
+        case "reconnectbackoffmaxms":
+        case "reconnectBackoffMaxMs": return getOrCreateConfiguration(target).getReconnectBackoffMaxMs();
+        case "reconnectbackoffms":
+        case "reconnectBackoffMs": return getOrCreateConfiguration(target).getReconnectBackoffMs();
+        case "recordmetadata":
+        case "recordMetadata": return getOrCreateConfiguration(target).isRecordMetadata();
+        case "requestrequiredacks":
+        case "requestRequiredAcks": return getOrCreateConfiguration(target).getRequestRequiredAcks();
+        case "requesttimeoutms":
+        case "requestTimeoutMs": return getOrCreateConfiguration(target).getRequestTimeoutMs();
+        case "retries": return getOrCreateConfiguration(target).getRetries();
+        case "retrybackoffms":
+        case "retryBackoffMs": return getOrCreateConfiguration(target).getRetryBackoffMs();
+        case "sasljaasconfig":
+        case "saslJaasConfig": return getOrCreateConfiguration(target).getSaslJaasConfig();
+        case "saslkerberosservicename":
+        case "saslKerberosServiceName": return getOrCreateConfiguration(target).getSaslKerberosServiceName();
+        case "saslmechanism":
+        case "saslMechanism": return getOrCreateConfiguration(target).getSaslMechanism();
+        case "schemaregistryurl":
+        case "schemaRegistryURL": return getOrCreateConfiguration(target).getSchemaRegistryURL();
+        case "securityprotocol":
+        case "securityProtocol": return getOrCreateConfiguration(target).getSecurityProtocol();
+        case "seekto":
+        case "seekTo": return getOrCreateConfiguration(target).getSeekTo();
+        case "sendbufferbytes":
+        case "sendBufferBytes": return getOrCreateConfiguration(target).getSendBufferBytes();
+        case "sessiontimeoutms":
+        case "sessionTimeoutMs": return getOrCreateConfiguration(target).getSessionTimeoutMs();
+        case "shutdowntimeout":
+        case "shutdownTimeout": return getOrCreateConfiguration(target).getShutdownTimeout();
+        case "specificavroreader":
+        case "specificAvroReader": return getOrCreateConfiguration(target).isSpecificAvroReader();
+        case "sslciphersuites":
+        case "sslCipherSuites": return getOrCreateConfiguration(target).getSslCipherSuites();
+        case "sslcontextparameters":
+        case "sslContextParameters": return getOrCreateConfiguration(target).getSslContextParameters();
+        case "sslenabledprotocols":
+        case "sslEnabledProtocols": return getOrCreateConfiguration(target).getSslEnabledProtocols();
+        case "sslendpointalgorithm":
+        case "sslEndpointAlgorithm": return getOrCreateConfiguration(target).getSslEndpointAlgorithm();
+        case "sslkeypassword":
+        case "sslKeyPassword": return getOrCreateConfiguration(target).getSslKeyPassword();
+        case "sslkeymanageralgorithm":
+        case "sslKeymanagerAlgorithm": return getOrCreateConfiguration(target).getSslKeymanagerAlgorithm();
+        case "sslkeystorelocation":
+        case "sslKeystoreLocation": return getOrCreateConfiguration(target).getSslKeystoreLocation();
+        case "sslkeystorepassword":
+        case "sslKeystorePassword": return getOrCreateConfiguration(target).getSslKeystorePassword();
+        case "sslkeystoretype":
+        case "sslKeystoreType": return getOrCreateConfiguration(target).getSslKeystoreType();
+        case "sslprotocol":
+        case "sslProtocol": return getOrCreateConfiguration(target).getSslProtocol();
+        case "sslprovider":
+        case "sslProvider": return getOrCreateConfiguration(target).getSslProvider();
+        case "ssltrustmanageralgorithm":
+        case "sslTrustmanagerAlgorithm": return getOrCreateConfiguration(target).getSslTrustmanagerAlgorithm();
+        case "ssltruststorelocation":
+        case "sslTruststoreLocation": return getOrCreateConfiguration(target).getSslTruststoreLocation();
+        case "ssltruststorepassword":
+        case "sslTruststorePassword": return getOrCreateConfiguration(target).getSslTruststorePassword();
+        case "ssltruststoretype":
+        case "sslTruststoreType": return getOrCreateConfiguration(target).getSslTruststoreType();
+        case "topicispattern":
+        case "topicIsPattern": return getOrCreateConfiguration(target).isTopicIsPattern();
+        case "useglobalsslcontextparameters":
+        case "useGlobalSslContextParameters": return target.isUseGlobalSslContextParameters();
+        case "valuedeserializer":
+        case "valueDeserializer": return getOrCreateConfiguration(target).getValueDeserializer();
+        case "valueserializer":
+        case "valueSerializer": return getOrCreateConfiguration(target).getValueSerializer();
+        case "workerpool":
+        case "workerPool": return getOrCreateConfiguration(target).getWorkerPool();
+        case "workerpoolcoresize":
+        case "workerPoolCoreSize": return getOrCreateConfiguration(target).getWorkerPoolCoreSize();
+        case "workerpoolmaxsize":
+        case "workerPoolMaxSize": return getOrCreateConfiguration(target).getWorkerPoolMaxSize();
+        default: return null;
+        }
+    }
+}
+
diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java
new file mode 100644
index 0000000..e2dfa37
--- /dev/null
+++ b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java
@@ -0,0 +1,521 @@
+/* Generated by camel build tools - do NOT edit this file! */
+package org.apache.camel.component.kafka;
+
+import java.util.Map;
+
+import org.apache.camel.CamelContext;
+import org.apache.camel.spi.GeneratedPropertyConfigurer;
+import org.apache.camel.spi.PropertyConfigurerGetter;
+import org.apache.camel.util.CaseInsensitiveMap;
+import org.apache.camel.support.component.PropertyConfigurerSupport;
+
+/**
+ * Generated by camel build tools - do NOT edit this file!
+ */
+@SuppressWarnings("unchecked")
+public class KafkaEndpointConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
+
+    private static final Map<String, Object> ALL_OPTIONS;
+    static {
+        Map<String, Object> map = new CaseInsensitiveMap();
+        map.put("topic", java.lang.String.class);
+        map.put("additionalProperties", java.util.Map.class);
+        map.put("brokers", java.lang.String.class);
+        map.put("clientId", java.lang.String.class);
+        map.put("headerFilterStrategy", org.apache.camel.spi.HeaderFilterStrategy.class);
+        map.put("reconnectBackoffMaxMs", java.lang.Integer.class);
+        map.put("shutdownTimeout", int.class);
+        map.put("allowManualCommit", boolean.class);
+        map.put("autoCommitEnable", java.lang.Boolean.class);
+        map.put("autoCommitIntervalMs", java.lang.Integer.class);
+        map.put("autoCommitOnStop", java.lang.String.class);
+        map.put("autoOffsetReset", java.lang.String.class);
+        map.put("breakOnFirstError", boolean.class);
+        map.put("bridgeErrorHandler", boolean.class);
+        map.put("checkCrcs", java.lang.Boolean.class);
+        map.put("consumerRequestTimeoutMs", java.lang.Integer.class);
+        map.put("consumersCount", int.class);
+        map.put("consumerStreams", int.class);
+        map.put("fetchMaxBytes", java.lang.Integer.class);
+        map.put("fetchMinBytes", java.lang.Integer.class);
+        map.put("fetchWaitMaxMs", java.lang.Integer.class);
+        map.put("groupId", java.lang.String.class);
+        map.put("heartbeatIntervalMs", java.lang.Integer.class);
+        map.put("kafkaHeaderDeserializer", org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class);
+        map.put("keyDeserializer", java.lang.String.class);
+        map.put("maxPartitionFetchBytes", java.lang.Integer.class);
+        map.put("maxPollIntervalMs", java.lang.Long.class);
+        map.put("maxPollRecords", java.lang.Integer.class);
+        map.put("offsetRepository", org.apache.camel.spi.StateRepository.class);
+        map.put("partitionAssignor", java.lang.String.class);
+        map.put("pollTimeoutMs", java.lang.Long.class);
+        map.put("seekTo", java.lang.String.class);
+        map.put("sessionTimeoutMs", java.lang.Integer.class);
+        map.put("specificAvroReader", boolean.class);
+        map.put("topicIsPattern", boolean.class);
+        map.put("valueDeserializer", java.lang.String.class);
+        map.put("exceptionHandler", org.apache.camel.spi.ExceptionHandler.class);
+        map.put("exchangePattern", org.apache.camel.ExchangePattern.class);
+        map.put("bufferMemorySize", java.lang.Integer.class);
+        map.put("compressionCodec", java.lang.String.class);
+        map.put("connectionMaxIdleMs", java.lang.Integer.class);
+        map.put("enableIdempotence", boolean.class);
+        map.put("kafkaHeaderSerializer", org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class);
+        map.put("key", java.lang.String.class);
+        map.put("keySerializer", java.lang.String.class);
+        map.put("lazyStartProducer", boolean.class);
+        map.put("lingerMs", java.lang.Integer.class);
+        map.put("maxBlockMs", java.lang.Integer.class);
+        map.put("maxInFlightRequest", java.lang.Integer.class);
+        map.put("maxRequestSize", java.lang.Integer.class);
+        map.put("metadataMaxAgeMs", java.lang.Integer.class);
+        map.put("metricReporters", java.lang.String.class);
+        map.put("metricsSampleWindowMs", java.lang.Integer.class);
+        map.put("noOfMetricsSample", java.lang.Integer.class);
+        map.put("partitioner", java.lang.String.class);
+        map.put("partitionKey", java.lang.Integer.class);
+        map.put("producerBatchSize", java.lang.Integer.class);
+        map.put("queueBufferingMaxMessages", java.lang.Integer.class);
+        map.put("receiveBufferBytes", java.lang.Integer.class);
+        map.put("reconnectBackoffMs", java.lang.Integer.class);
+        map.put("recordMetadata", boolean.class);
+        map.put("requestRequiredAcks", java.lang.String.class);
+        map.put("requestTimeoutMs", java.lang.Integer.class);
+        map.put("retries", java.lang.Integer.class);
+        map.put("retryBackoffMs", java.lang.Integer.class);
+        map.put("sendBufferBytes", java.lang.Integer.class);
+        map.put("valueSerializer", java.lang.String.class);
+        map.put("workerPool", java.util.concurrent.ExecutorService.class);
+        map.put("workerPoolCoreSize", java.lang.Integer.class);
+        map.put("workerPoolMaxSize", java.lang.Integer.class);
+        map.put("basicPropertyBinding", boolean.class);
+        map.put("synchronous", boolean.class);
+        map.put("schemaRegistryURL", java.lang.String.class);
+        map.put("interceptorClasses", java.lang.String.class);
+        map.put("kerberosBeforeReloginMinTime", java.lang.Integer.class);
+        map.put("kerberosInitCmd", java.lang.String.class);
+        map.put("kerberosPrincipalToLocalRules", java.lang.String.class);
+        map.put("kerberosRenewJitter", java.lang.Double.class);
+        map.put("kerberosRenewWindowFactor", java.lang.Double.class);
+        map.put("saslJaasConfig", java.lang.String.class);
+        map.put("saslKerberosServiceName", java.lang.String.class);
+        map.put("saslMechanism", java.lang.String.class);
+        map.put("securityProtocol", java.lang.String.class);
+        map.put("sslCipherSuites", java.lang.String.class);
+        map.put("sslContextParameters", org.apache.camel.support.jsse.SSLContextParameters.class);
+        map.put("sslEnabledProtocols", java.lang.String.class);
+        map.put("sslEndpointAlgorithm", java.lang.String.class);
+        map.put("sslKeymanagerAlgorithm", java.lang.String.class);
+        map.put("sslKeyPassword", java.lang.String.class);
+        map.put("sslKeystoreLocation", java.lang.String.class);
+        map.put("sslKeystorePassword", java.lang.String.class);
+        map.put("sslKeystoreType", java.lang.String.class);
+        map.put("sslProtocol", java.lang.String.class);
+        map.put("sslProvider", java.lang.String.class);
+        map.put("sslTrustmanagerAlgorithm", java.lang.String.class);
+        map.put("sslTruststoreLocation", java.lang.String.class);
+        map.put("sslTruststorePassword", java.lang.String.class);
+        map.put("sslTruststoreType", java.lang.String.class);
+        ALL_OPTIONS = map;
+    }
+
+    @Override
+    public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
+        KafkaEndpoint target = (KafkaEndpoint) obj;
+        switch (ignoreCase ? name.toLowerCase() : name) {
+        case "additionalproperties":
+        case "additionalProperties": target.getConfiguration().setAdditionalProperties(property(camelContext, java.util.Map.class, value)); return true;
+        case "allowmanualcommit":
+        case "allowManualCommit": target.getConfiguration().setAllowManualCommit(property(camelContext, boolean.class, value)); return true;
+        case "autocommitenable":
+        case "autoCommitEnable": target.getConfiguration().setAutoCommitEnable(property(camelContext, java.lang.Boolean.class, value)); return true;
+        case "autocommitintervalms":
+        case "autoCommitIntervalMs": target.getConfiguration().setAutoCommitIntervalMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "autocommitonstop":
+        case "autoCommitOnStop": target.getConfiguration().setAutoCommitOnStop(property(camelContext, java.lang.String.class, value)); return true;
+        case "autooffsetreset":
+        case "autoOffsetReset": target.getConfiguration().setAutoOffsetReset(property(camelContext, java.lang.String.class, value)); return true;
+        case "basicpropertybinding":
+        case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true;
+        case "breakonfirsterror":
+        case "breakOnFirstError": target.getConfiguration().setBreakOnFirstError(property(camelContext, boolean.class, value)); return true;
+        case "bridgeerrorhandler":
+        case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
+        case "brokers": target.getConfiguration().setBrokers(property(camelContext, java.lang.String.class, value)); return true;
+        case "buffermemorysize":
+        case "bufferMemorySize": target.getConfiguration().setBufferMemorySize(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "checkcrcs":
+        case "checkCrcs": target.getConfiguration().setCheckCrcs(property(camelContext, java.lang.Boolean.class, value)); return true;
+        case "clientid":
+        case "clientId": target.getConfiguration().setClientId(property(camelContext, java.lang.String.class, value)); return true;
+        case "compressioncodec":
+        case "compressionCodec": target.getConfiguration().setCompressionCodec(property(camelContext, java.lang.String.class, value)); return true;
+        case "connectionmaxidlems":
+        case "connectionMaxIdleMs": target.getConfiguration().setConnectionMaxIdleMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "consumerrequesttimeoutms":
+        case "consumerRequestTimeoutMs": target.getConfiguration().setConsumerRequestTimeoutMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "consumerstreams":
+        case "consumerStreams": target.getConfiguration().setConsumerStreams(property(camelContext, int.class, value)); return true;
+        case "consumerscount":
+        case "consumersCount": target.getConfiguration().setConsumersCount(property(camelContext, int.class, value)); return true;
+        case "enableidempotence":
+        case "enableIdempotence": target.getConfiguration().setEnableIdempotence(property(camelContext, boolean.class, value)); return true;
+        case "exceptionhandler":
+        case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
+        case "exchangepattern":
+        case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
+        case "fetchmaxbytes":
+        case "fetchMaxBytes": target.getConfiguration().setFetchMaxBytes(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "fetchminbytes":
+        case "fetchMinBytes": target.getConfiguration().setFetchMinBytes(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "fetchwaitmaxms":
+        case "fetchWaitMaxMs": target.getConfiguration().setFetchWaitMaxMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "groupid":
+        case "groupId": target.getConfiguration().setGroupId(property(camelContext, java.lang.String.class, value)); return true;
+        case "headerfilterstrategy":
+        case "headerFilterStrategy": target.getConfiguration().setHeaderFilterStrategy(property(camelContext, org.apache.camel.spi.HeaderFilterStrategy.class, value)); return true;
+        case "heartbeatintervalms":
+        case "heartbeatIntervalMs": target.getConfiguration().setHeartbeatIntervalMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "interceptorclasses":
+        case "interceptorClasses": target.getConfiguration().setInterceptorClasses(property(camelContext, java.lang.String.class, value)); return true;
+        case "kafkaheaderdeserializer":
+        case "kafkaHeaderDeserializer": target.getConfiguration().setKafkaHeaderDeserializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class, value)); return true;
+        case "kafkaheaderserializer":
+        case "kafkaHeaderSerializer": target.getConfiguration().setKafkaHeaderSerializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class, value)); return true;
+        case "kerberosbeforereloginmintime":
+        case "kerberosBeforeReloginMinTime": target.getConfiguration().setKerberosBeforeReloginMinTime(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "kerberosinitcmd":
+        case "kerberosInitCmd": target.getConfiguration().setKerberosInitCmd(property(camelContext, java.lang.String.class, value)); return true;
+        case "kerberosprincipaltolocalrules":
+        case "kerberosPrincipalToLocalRules": target.getConfiguration().setKerberosPrincipalToLocalRules(property(camelContext, java.lang.String.class, value)); return true;
+        case "kerberosrenewjitter":
+        case "kerberosRenewJitter": target.getConfiguration().setKerberosRenewJitter(property(camelContext, java.lang.Double.class, value)); return true;
+        case "kerberosrenewwindowfactor":
+        case "kerberosRenewWindowFactor": target.getConfiguration().setKerberosRenewWindowFactor(property(camelContext, java.lang.Double.class, value)); return true;
+        case "key": target.getConfiguration().setKey(property(camelContext, java.lang.String.class, value)); return true;
+        case "keydeserializer":
+        case "keyDeserializer": target.getConfiguration().setKeyDeserializer(property(camelContext, java.lang.String.class, value)); return true;
+        case "keyserializer":
+        case "keySerializer": target.getConfiguration().setKeySerializer(property(camelContext, java.lang.String.class, value)); return true;
+        case "lazystartproducer":
+        case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
+        case "lingerms":
+        case "lingerMs": target.getConfiguration().setLingerMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "maxblockms":
+        case "maxBlockMs": target.getConfiguration().setMaxBlockMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "maxinflightrequest":
+        case "maxInFlightRequest": target.getConfiguration().setMaxInFlightRequest(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "maxpartitionfetchbytes":
+        case "maxPartitionFetchBytes": target.getConfiguration().setMaxPartitionFetchBytes(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "maxpollintervalms":
+        case "maxPollIntervalMs": target.getConfiguration().setMaxPollIntervalMs(property(camelContext, java.lang.Long.class, value)); return true;
+        case "maxpollrecords":
+        case "maxPollRecords": target.getConfiguration().setMaxPollRecords(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "maxrequestsize":
+        case "maxRequestSize": target.getConfiguration().setMaxRequestSize(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "metadatamaxagems":
+        case "metadataMaxAgeMs": target.getConfiguration().setMetadataMaxAgeMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "metricreporters":
+        case "metricReporters": target.getConfiguration().setMetricReporters(property(camelContext, java.lang.String.class, value)); return true;
+        case "metricssamplewindowms":
+        case "metricsSampleWindowMs": target.getConfiguration().setMetricsSampleWindowMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "noofmetricssample":
+        case "noOfMetricsSample": target.getConfiguration().setNoOfMetricsSample(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "offsetrepository":
+        case "offsetRepository": target.getConfiguration().setOffsetRepository(property(camelContext, org.apache.camel.spi.StateRepository.class, value)); return true;
+        case "partitionassignor":
+        case "partitionAssignor": target.getConfiguration().setPartitionAssignor(property(camelContext, java.lang.String.class, value)); return true;
+        case "partitionkey":
+        case "partitionKey": target.getConfiguration().setPartitionKey(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "partitioner": target.getConfiguration().setPartitioner(property(camelContext, java.lang.String.class, value)); return true;
+        case "polltimeoutms":
+        case "pollTimeoutMs": target.getConfiguration().setPollTimeoutMs(property(camelContext, java.lang.Long.class, value)); return true;
+        case "producerbatchsize":
+        case "producerBatchSize": target.getConfiguration().setProducerBatchSize(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "queuebufferingmaxmessages":
+        case "queueBufferingMaxMessages": target.getConfiguration().setQueueBufferingMaxMessages(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "receivebufferbytes":
+        case "receiveBufferBytes": target.getConfiguration().setReceiveBufferBytes(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "reconnectbackoffmaxms":
+        case "reconnectBackoffMaxMs": target.getConfiguration().setReconnectBackoffMaxMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "reconnectbackoffms":
+        case "reconnectBackoffMs": target.getConfiguration().setReconnectBackoffMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "recordmetadata":
+        case "recordMetadata": target.getConfiguration().setRecordMetadata(property(camelContext, boolean.class, value)); return true;
+        case "requestrequiredacks":
+        case "requestRequiredAcks": target.getConfiguration().setRequestRequiredAcks(property(camelContext, java.lang.String.class, value)); return true;
+        case "requesttimeoutms":
+        case "requestTimeoutMs": target.getConfiguration().setRequestTimeoutMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "retries": target.getConfiguration().setRetries(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "retrybackoffms":
+        case "retryBackoffMs": target.getConfiguration().setRetryBackoffMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "sasljaasconfig":
+        case "saslJaasConfig": target.getConfiguration().setSaslJaasConfig(property(camelContext, java.lang.String.class, value)); return true;
+        case "saslkerberosservicename":
+        case "saslKerberosServiceName": target.getConfiguration().setSaslKerberosServiceName(property(camelContext, java.lang.String.class, value)); return true;
+        case "saslmechanism":
+        case "saslMechanism": target.getConfiguration().setSaslMechanism(property(camelContext, java.lang.String.class, value)); return true;
+        case "schemaregistryurl":
+        case "schemaRegistryURL": target.getConfiguration().setSchemaRegistryURL(property(camelContext, java.lang.String.class, value)); return true;
+        case "securityprotocol":
+        case "securityProtocol": target.getConfiguration().setSecurityProtocol(property(camelContext, java.lang.String.class, value)); return true;
+        case "seekto":
+        case "seekTo": target.getConfiguration().setSeekTo(property(camelContext, java.lang.String.class, value)); return true;
+        case "sendbufferbytes":
+        case "sendBufferBytes": target.getConfiguration().setSendBufferBytes(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "sessiontimeoutms":
+        case "sessionTimeoutMs": target.getConfiguration().setSessionTimeoutMs(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "shutdowntimeout":
+        case "shutdownTimeout": target.getConfiguration().setShutdownTimeout(property(camelContext, int.class, value)); return true;
+        case "specificavroreader":
+        case "specificAvroReader": target.getConfiguration().setSpecificAvroReader(property(camelContext, boolean.class, value)); return true;
+        case "sslciphersuites":
+        case "sslCipherSuites": target.getConfiguration().setSslCipherSuites(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslcontextparameters":
+        case "sslContextParameters": target.getConfiguration().setSslContextParameters(property(camelContext, org.apache.camel.support.jsse.SSLContextParameters.class, value)); return true;
+        case "sslenabledprotocols":
+        case "sslEnabledProtocols": target.getConfiguration().setSslEnabledProtocols(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslendpointalgorithm":
+        case "sslEndpointAlgorithm": target.getConfiguration().setSslEndpointAlgorithm(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslkeypassword":
+        case "sslKeyPassword": target.getConfiguration().setSslKeyPassword(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslkeymanageralgorithm":
+        case "sslKeymanagerAlgorithm": target.getConfiguration().setSslKeymanagerAlgorithm(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslkeystorelocation":
+        case "sslKeystoreLocation": target.getConfiguration().setSslKeystoreLocation(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslkeystorepassword":
+        case "sslKeystorePassword": target.getConfiguration().setSslKeystorePassword(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslkeystoretype":
+        case "sslKeystoreType": target.getConfiguration().setSslKeystoreType(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslprotocol":
+        case "sslProtocol": target.getConfiguration().setSslProtocol(property(camelContext, java.lang.String.class, value)); return true;
+        case "sslprovider":
+        case "sslProvider": target.getConfiguration().setSslProvider(property(camelContext, java.lang.String.class, value)); return true;
+        case "ssltrustmanageralgorithm":
+        case "sslTrustmanagerAlgorithm": target.getConfiguration().setSslTrustmanagerAlgorithm(property(camelContext, java.lang.String.class, value)); return true;
+        case "ssltruststorelocation":
+        case "sslTruststoreLocation": target.getConfiguration().setSslTruststoreLocation(property(camelContext, java.lang.String.class, value)); return true;
+        case "ssltruststorepassword":
+        case "sslTruststorePassword": target.getConfiguration().setSslTruststorePassword(property(camelContext, java.lang.String.class, value)); return true;
+        case "ssltruststoretype":
+        case "sslTruststoreType": target.getConfiguration().setSslTruststoreType(property(camelContext, java.lang.String.class, value)); return true;
+        case "synchronous": target.setSynchronous(property(camelContext, boolean.class, value)); return true;
+        case "topicispattern":
+        case "topicIsPattern": target.getConfiguration().setTopicIsPattern(property(camelContext, boolean.class, value)); return true;
+        case "valuedeserializer":
+        case "valueDeserializer": target.getConfiguration().setValueDeserializer(property(camelContext, java.lang.String.class, value)); return true;
+        case "valueserializer":
+        case "valueSerializer": target.getConfiguration().setValueSerializer(property(camelContext, java.lang.String.class, value)); return true;
+        case "workerpool":
+        case "workerPool": target.getConfiguration().setWorkerPool(property(camelContext, java.util.concurrent.ExecutorService.class, value)); return true;
+        case "workerpoolcoresize":
+        case "workerPoolCoreSize": target.getConfiguration().setWorkerPoolCoreSize(property(camelContext, java.lang.Integer.class, value)); return true;
+        case "workerpoolmaxsize":
+        case "workerPoolMaxSize": target.getConfiguration().setWorkerPoolMaxSize(property(camelContext, java.lang.Integer.class, value)); return true;
+        default: return false;
+        }
+    }
+
+    @Override
+    public Map<String, Object> getAllOptions(Object target) {
+        return ALL_OPTIONS;
+    }
+
+    @Override
+    public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
+        KafkaEndpoint target = (KafkaEndpoint) obj;
+        switch (ignoreCase ? name.toLowerCase() : name) {
+        case "additionalproperties":
+        case "additionalProperties": return target.getConfiguration().getAdditionalProperties();
+        case "allowmanualcommit":
+        case "allowManualCommit": return target.getConfiguration().isAllowManualCommit();
+        case "autocommitenable":
+        case "autoCommitEnable": return target.getConfiguration().getAutoCommitEnable();
+        case "autocommitintervalms":
+        case "autoCommitIntervalMs": return target.getConfiguration().getAutoCommitIntervalMs();
+        case "autocommitonstop":
+        case "autoCommitOnStop": return target.getConfiguration().getAutoCommitOnStop();
+        case "autooffsetreset":
+        case "autoOffsetReset": return target.getConfiguration().getAutoOffsetReset();
+        case "basicpropertybinding":
+        case "basicPropertyBinding": return target.isBasicPropertyBinding();
+        case "breakonfirsterror":
+        case "breakOnFirstError": return target.getConfiguration().isBreakOnFirstError();
+        case "bridgeerrorhandler":
+        case "bridgeErrorHandler": return target.isBridgeErrorHandler();
+        case "brokers": return target.getConfiguration().getBrokers();
+        case "buffermemorysize":
+        case "bufferMemorySize": return target.getConfiguration().getBufferMemorySize();
+        case "checkcrcs":
+        case "checkCrcs": return target.getConfiguration().getCheckCrcs();
+        case "clientid":
+        case "clientId": return target.getConfiguration().getClientId();
+        case "compressioncodec":
+        case "compressionCodec": return target.getConfiguration().getCompressionCodec();
+        case "connectionmaxidlems":
+        case "connectionMaxIdleMs": return target.getConfiguration().getConnectionMaxIdleMs();
+        case "consumerrequesttimeoutms":
+        case "consumerRequestTimeoutMs": return target.getConfiguration().getConsumerRequestTimeoutMs();
+        case "consumerstreams":
+        case "consumerStreams": return target.getConfiguration().getConsumerStreams();
+        case "consumerscount":
+        case "consumersCount": return target.getConfiguration().getConsumersCount();
+        case "enableidempotence":
+        case "enableIdempotence": return target.getConfiguration().isEnableIdempotence();
+        case "exceptionhandler":
+        case "exceptionHandler": return target.getExceptionHandler();
+        case "exchangepattern":
+        case "exchangePattern": return target.getExchangePattern();
+        case "fetchmaxbytes":
+        case "fetchMaxBytes": return target.getConfiguration().getFetchMaxBytes();
+        case "fetchminbytes":
+        case "fetchMinBytes": return target.getConfiguration().getFetchMinBytes();
+        case "fetchwaitmaxms":
+        case "fetchWaitMaxMs": return target.getConfiguration().getFetchWaitMaxMs();
+        case "groupid":
+        case "groupId": return target.getConfiguration().getGroupId();
+        case "headerfilterstrategy":
+        case "headerFilterStrategy": return target.getConfiguration().getHeaderFilterStrategy();
+        case "heartbeatintervalms":
+        case "heartbeatIntervalMs": return target.getConfiguration().getHeartbeatIntervalMs();
+        case "interceptorclasses":
+        case "interceptorClasses": return target.getConfiguration().getInterceptorClasses();
+        case "kafkaheaderdeserializer":
+        case "kafkaHeaderDeserializer": return target.getConfiguration().getKafkaHeaderDeserializer();
+        case "kafkaheaderserializer":
+        case "kafkaHeaderSerializer": return target.getConfiguration().getKafkaHeaderSerializer();
+        case "kerberosbeforereloginmintime":
+        case "kerberosBeforeReloginMinTime": return target.getConfiguration().getKerberosBeforeReloginMinTime();
+        case "kerberosinitcmd":
+        case "kerberosInitCmd": return target.getConfiguration().getKerberosInitCmd();
+        case "kerberosprincipaltolocalrules":
+        case "kerberosPrincipalToLocalRules": return target.getConfiguration().getKerberosPrincipalToLocalRules();
+        case "kerberosrenewjitter":
+        case "kerberosRenewJitter": return target.getConfiguration().getKerberosRenewJitter();
+        case "kerberosrenewwindowfactor":
+        case "kerberosRenewWindowFactor": return target.getConfiguration().getKerberosRenewWindowFactor();
+        case "key": return target.getConfiguration().getKey();
+        case "keydeserializer":
+        case "keyDeserializer": return target.getConfiguration().getKeyDeserializer();
+        case "keyserializer":
+        case "keySerializer": return target.getConfiguration().getKeySerializer();
+        case "lazystartproducer":
+        case "lazyStartProducer": return target.isLazyStartProducer();
+        case "lingerms":
+        case "lingerMs": return target.getConfiguration().getLingerMs();
+        case "maxblockms":
+        case "maxBlockMs": return target.getConfiguration().getMaxBlockMs();
+        case "maxinflightrequest":
+        case "maxInFlightRequest": return target.getConfiguration().getMaxInFlightRequest();
+        case "maxpartitionfetchbytes":
+        case "maxPartitionFetchBytes": return target.getConfiguration().getMaxPartitionFetchBytes();
+        case "maxpollintervalms":
+        case "maxPollIntervalMs": return target.getConfiguration().getMaxPollIntervalMs();
+        case "maxpollrecords":
+        case "maxPollRecords": return target.getConfiguration().getMaxPollRecords();
+        case "maxrequestsize":
+        case "maxRequestSize": return target.getConfiguration().getMaxRequestSize();
+        case "metadatamaxagems":
+        case "metadataMaxAgeMs": return target.getConfiguration().getMetadataMaxAgeMs();
+        case "metricreporters":
+        case "metricReporters": return target.getConfiguration().getMetricReporters();
+        case "metricssamplewindowms":
+        case "metricsSampleWindowMs": return target.getConfiguration().getMetricsSampleWindowMs();
+        case "noofmetricssample":
+        case "noOfMetricsSample": return target.getConfiguration().getNoOfMetricsSample();
+        case "offsetrepository":
+        case "offsetRepository": return target.getConfiguration().getOffsetRepository();
+        case "partitionassignor":
+        case "partitionAssignor": return target.getConfiguration().getPartitionAssignor();
+        case "partitionkey":
+        case "partitionKey": return target.getConfiguration().getPartitionKey();
+        case "partitioner": return target.getConfiguration().getPartitioner();
+        case "polltimeoutms":
+        case "pollTimeoutMs": return target.getConfiguration().getPollTimeoutMs();
+        case "producerbatchsize":
+        case "producerBatchSize": return target.getConfiguration().getProducerBatchSize();
+        case "queuebufferingmaxmessages":
+        case "queueBufferingMaxMessages": return target.getConfiguration().getQueueBufferingMaxMessages();
+        case "receivebufferbytes":
+        case "receiveBufferBytes": return target.getConfiguration().getReceiveBufferBytes();
+        case "reconnectbackoffmaxms":
+        case "reconnectBackoffMaxMs": return target.getConfiguration().getReconnectBackoffMaxMs();
+        case "reconnectbackoffms":
+        case "reconnectBackoffMs": return target.getConfiguration().getReconnectBackoffMs();
+        case "recordmetadata":
+        case "recordMetadata": return target.getConfiguration().isRecordMetadata();
+        case "requestrequiredacks":
+        case "requestRequiredAcks": return target.getConfiguration().getRequestRequiredAcks();
+        case "requesttimeoutms":
+        case "requestTimeoutMs": return target.getConfiguration().getRequestTimeoutMs();
+        case "retries": return target.getConfiguration().getRetries();
+        case "retrybackoffms":
+        case "retryBackoffMs": return target.getConfiguration().getRetryBackoffMs();
+        case "sasljaasconfig":
+        case "saslJaasConfig": return target.getConfiguration().getSaslJaasConfig();
+        case "saslkerberosservicename":
+        case "saslKerberosServiceName": return target.getConfiguration().getSaslKerberosServiceName();
+        case "saslmechanism":
+        case "saslMechanism": return target.getConfiguration().getSaslMechanism();
+        case "schemaregistryurl":
+        case "schemaRegistryURL": return target.getConfiguration().getSchemaRegistryURL();
+        case "securityprotocol":
+        case "securityProtocol": return target.getConfiguration().getSecurityProtocol();
+        case "seekto":
+        case "seekTo": return target.getConfiguration().getSeekTo();
+        case "sendbufferbytes":
+        case "sendBufferBytes": return target.getConfiguration().getSendBufferBytes();
+        case "sessiontimeoutms":
+        case "sessionTimeoutMs": return target.getConfiguration().getSessionTimeoutMs();
+        case "shutdowntimeout":
+        case "shutdownTimeout": return target.getConfiguration().getShutdownTimeout();
+        case "specificavroreader":
+        case "specificAvroReader": return target.getConfiguration().isSpecificAvroReader();
+        case "sslciphersuites":
+        case "sslCipherSuites": return target.getConfiguration().getSslCipherSuites();
+        case "sslcontextparameters":
+        case "sslContextParameters": return target.getConfiguration().getSslContextParameters();
+        case "sslenabledprotocols":
+        case "sslEnabledProtocols": return target.getConfiguration().getSslEnabledProtocols();
+        case "sslendpointalgorithm":
+        case "sslEndpointAlgorithm": return target.getConfiguration().getSslEndpointAlgorithm();
+        case "sslkeypassword":
+        case "sslKeyPassword": return target.getConfiguration().getSslKeyPassword();
+        case "sslkeymanageralgorithm":
+        case "sslKeymanagerAlgorithm": return target.getConfiguration().getSslKeymanagerAlgorithm();
+        case "sslkeystorelocation":
+        case "sslKeystoreLocation": return target.getConfiguration().getSslKeystoreLocation();
+        case "sslkeystorepassword":
+        case "sslKeystorePassword": return target.getConfiguration().getSslKeystorePassword();
+        case "sslkeystoretype":
+        case "sslKeystoreType": return target.getConfiguration().getSslKeystoreType();
+        case "sslprotocol":
+        case "sslProtocol": return target.getConfiguration().getSslProtocol();
+        case "sslprovider":
+        case "sslProvider": return target.getConfiguration().getSslProvider();
+        case "ssltrustmanageralgorithm":
+        case "sslTrustmanagerAlgorithm": return target.getConfiguration().getSslTrustmanagerAlgorithm();
+        case "ssltruststorelocation":
+        case "sslTruststoreLocation": return target.getConfiguration().getSslTruststoreLocation();
+        case "ssltruststorepassword":
+        case "sslTruststorePassword": return target.getConfiguration().getSslTruststorePassword();
+        case "ssltruststoretype":
+        case "sslTruststoreType": return target.getConfiguration().getSslTruststoreType();
+        case "synchronous": return target.isSynchronous();
+        case "topicispattern":
+        case "topicIsPattern": return target.getConfiguration().isTopicIsPattern();
+        case "valuedeserializer":
+        case "valueDeserializer": return target.getConfiguration().getValueDeserializer();
+        case "valueserializer":
+        case "valueSerializer": return target.getConfiguration().getValueSerializer();
+        case "workerpool":
+        case "workerPool": return target.getConfiguration().getWorkerPool();
+        case "workerpoolcoresize":
+        case "workerPoolCoreSize": return target.getConfiguration().getWorkerPoolCoreSize();
+        case "workerpoolmaxsize":
+        case "workerPoolMaxSize": return target.getConfiguration().getWorkerPoolMaxSize();
+        default: return null;
+        }
+    }
+}
+
diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java
new file mode 100644
index 0000000..e0d88d3
--- /dev/null
+++ b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java
@@ -0,0 +1,163 @@
+/* Generated by camel build tools - do NOT edit this file! */
+package org.apache.camel.component.kafka;
+
+import java.net.URISyntaxException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.camel.spi.EndpointUriFactory;
+
+/**
+ * Generated by camel build tools - do NOT edit this file!
+ */
+public class KafkaEndpointUriFactory extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
+
+    private static final String BASE = ":topic";
+
+    private static final Set<String> PROPERTY_NAMES;
+    private static final Set<String> SECRET_PROPERTY_NAMES;
+    static {
+        Set<String> props = new HashSet<>(98);
+        props.add("synchronous");
+        props.add("queueBufferingMaxMessages");
+        props.add("allowManualCommit");
+        props.add("consumersCount");
+        props.add("receiveBufferBytes");
+        props.add("reconnectBackoffMaxMs");
+        props.add("valueDeserializer");
+        props.add("metricReporters");
+        props.add("sslTruststoreType");
+        props.add("sendBufferBytes");
+        props.add("heartbeatIntervalMs");
+        props.add("consumerStreams");
+        props.add("kafkaHeaderSerializer");
+        props.add("interceptorClasses");
+        props.add("sslKeystoreType");
+        props.add("breakOnFirstError");
+        props.add("requestRequiredAcks");
+        props.add("enableIdempotence");
+        props.add("fetchWaitMaxMs");
+        props.add("retries");
+        props.add("maxPollRecords");
+        props.add("additionalProperties");
+        props.add("keyDeserializer");
+        props.add("producerBatchSize");
+        props.add("retryBackoffMs");
+        props.add("brokers");
+        props.add("metricsSampleWindowMs");
+        props.add("sslContextParameters");
+        props.add("sslKeyPassword");
+        props.add("keySerializer");
+        props.add("noOfMetricsSample");
+        props.add("maxPartitionFetchBytes");
+        props.add("partitionKey");
+        props.add("headerFilterStrategy");
+        props.add("sslTruststorePassword");
+        props.add("sessionTimeoutMs");
+        props.add("key");
+        props.add("topicIsPattern");
+        props.add("sslTruststoreLocation");
+        props.add("clientId");
+        props.add("maxRequestSize");
+        props.add("recordMetadata");
+        props.add("sslTrustmanagerAlgorithm");
+        props.add("compressionCodec");
+        props.add("autoCommitOnStop");
+        props.add("workerPoolCoreSize");
+        props.add("autoCommitEnable");
+        props.add("consumerRequestTimeoutMs");
+        props.add("maxPollIntervalMs");
+        props.add("kerberosInitCmd");
+        props.add("workerPoolMaxSize");
+        props.add("reconnectBackoffMs");
+        props.add("groupId");
+        props.add("offsetRepository");
+        props.add("kerberosRenewJitter");
+        props.add("sslProvider");
+        props.add("saslKerberosServiceName");
+        props.add("bridgeErrorHandler");
+        props.add("shutdownTimeout");
+        props.add("saslMechanism");
+        props.add("workerPool");
+        props.add("lazyStartProducer");
+        props.add("sslKeystorePassword");
+        props.add("sslEndpointAlgorithm");
+        props.add("topic");
+        props.add("sslProtocol");
+        props.add("sslKeymanagerAlgorithm");
+        props.add("pollTimeoutMs");
+        props.add("exceptionHandler");
+        props.add("maxBlockMs");
+        props.add("kerberosBeforeReloginMinTime");
+        props.add("bufferMemorySize");
+        props.add("basicPropertyBinding");
+        props.add("metadataMaxAgeMs");
+        props.add("sslCipherSuites");
+        props.add("specificAvroReader");
+        props.add("saslJaasConfig");
+        props.add("fetchMinBytes");
+        props.add("connectionMaxIdleMs");
+        props.add("lingerMs");
+        props.add("kerberosRenewWindowFactor");
+        props.add("securityProtocol");
+        props.add("autoCommitIntervalMs");
+        props.add("partitioner");
+        props.add("kerberosPrincipalToLocalRules");
+        props.add("sslEnabledProtocols");
+        props.add("sslKeystoreLocation");
+        props.add("schemaRegistryURL");
+        props.add("maxInFlightRequest");
+        props.add("exchangePattern");
+        props.add("valueSerializer");
+        props.add("autoOffsetReset");
+        props.add("seekTo");
+        props.add("requestTimeoutMs");
+        props.add("kafkaHeaderDeserializer");
+        props.add("fetchMaxBytes");
+        props.add("checkCrcs");
+        props.add("partitionAssignor");
+        PROPERTY_NAMES = Collections.unmodifiableSet(props);
+        Set<String> secretProps = new HashSet<>(4);
+        secretProps.add("sslKeystorePassword");
+        secretProps.add("sslTruststorePassword");
+        secretProps.add("saslJaasConfig");
+        secretProps.add("sslKeyPassword");
+        SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
+    }
+
+    @Override
+    public boolean isEnabled(String scheme) {
+        return "kafka".equals(scheme);
+    }
+
+    @Override
+    public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
+        String syntax = scheme + BASE;
+        String uri = syntax;
+
+        Map<String, Object> copy = new HashMap<>(properties);
+
+        uri = buildPathParameter(syntax, uri, "topic", null, true, copy);
+        uri = buildQueryParameters(uri, copy, encode);
+        return uri;
+    }
+
+    @Override
+    public Set<String> propertyNames() {
+        return PROPERTY_NAMES;
+    }
+
+    @Override
+    public Set<String> secretPropertyNames() {
+        return SECRET_PROPERTY_NAMES;
+    }
+
+    @Override
+    public boolean isLenientProperties() {
+        return false;
+    }
+}
+
diff --git a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/component.properties b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/component.properties
new file mode 100644
index 0000000..c2670a0
--- /dev/null
+++ b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/component.properties
@@ -0,0 +1,7 @@
+# Generated by camel build tools - do NOT edit this file!
+components=kafka
+groupId=org.apache.camel
+artifactId=camel-kafka
+version=3.7.0-SNAPSHOT
+projectName=Camel :: Kafka
+projectDescription=Camel Kafka support
diff --git a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/component/kafka b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/component/kafka
new file mode 100644
index 0000000..e34127d
--- /dev/null
+++ b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/component/kafka
@@ -0,0 +1,2 @@
+# Generated by camel build tools - do NOT edit this file!
+class=org.apache.camel.component.kafka.KafkaComponent
diff --git a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/configurer/kafka-component b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/configurer/kafka-component
new file mode 100644
index 0000000..6b9be66
--- /dev/null
+++ b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/configurer/kafka-component
@@ -0,0 +1,2 @@
+# Generated by camel build tools - do NOT edit this file!
+class=org.apache.camel.component.kafka.KafkaComponentConfigurer
diff --git a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/configurer/kafka-endpoint b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/configurer/kafka-endpoint
new file mode 100644
index 0000000..aa0bd1a
--- /dev/null
+++ b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/configurer/kafka-endpoint
@@ -0,0 +1,2 @@
+# Generated by camel build tools - do NOT edit this file!
+class=org.apache.camel.component.kafka.KafkaEndpointConfigurer
diff --git a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/urifactory/kafka-endpoint b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/urifactory/kafka-endpoint
new file mode 100644
index 0000000..dce8718
--- /dev/null
+++ b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/urifactory/kafka-endpoint
@@ -0,0 +1,2 @@
+# Generated by camel build tools - do NOT edit this file!
+class=org.apache.camel.component.kafka.KafkaEndpointUriFactory
diff --git a/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json b/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json
new file mode 100644
index 0000000..82d0b38
--- /dev/null
+++ b/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json
@@ -0,0 +1,223 @@
+{
+  "component": {
+    "kind": "component",
+    "name": "kafka",
+    "title": "Kafka",
+    "description": "Sent and receive messages to\/from an Apache Kafka broker.",
+    "deprecated": false,
+    "firstVersion": "2.13.0",
+    "label": "messaging",
+    "javaType": "org.apache.camel.component.kafka.KafkaComponent",
+    "supportLevel": "Stable",
+    "groupId": "org.apache.camel",
+    "artifactId": "camel-kafka",
+    "version": "3.7.0-SNAPSHOT",
+    "scheme": "kafka",
+    "extendsScheme": "",
+    "syntax": "kafka:topic",
+    "async": false,
+    "api": false,
+    "consumerOnly": false,
+    "producerOnly": false,
+    "lenientProperties": false
+  },
+  "componentProperties": {
+    "additionalProperties": { "kind": "property", "displayName": "Additional Properties", "group": "common", "label": "common", "required": false, "type": "object", "javaType": "java.util.Map<java.lang.String, java.lang.Object>", "prefix": "additionalProperties.", "multiValue": true, "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Sets additional properties for either [...]
+    "brokers": { "kind": "property", "displayName": "Brokers", "group": "common", "label": "common", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "URL of the Kafka brokers to use. The format is host1:port1,host2:port2, and the list can be a subset of brokers or a VIP pointing to a subset of brokers [...]
+    "clientId": { "kind": "property", "displayName": "Client Id", "group": "common", "label": "common", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The client id is a user-specified string sent in each request to help trace calls. It should logically identify the application making the request." },
+    "configuration": { "kind": "property", "displayName": "Configuration", "group": "common", "label": "", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.KafkaConfiguration", "deprecated": false, "secret": false, "description": "Allows to pre-configure the Kafka component with common options that the endpoints will reuse." },
+    "headerFilterStrategy": { "kind": "property", "displayName": "Header Filter Strategy", "group": "common", "label": "common", "required": false, "type": "object", "javaType": "org.apache.camel.spi.HeaderFilterStrategy", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom HeaderFilterStrategy to filter header to and from Camel message." },
+    "reconnectBackoffMaxMs": { "kind": "property", "displayName": "Reconnect Backoff Max Ms", "group": "common", "label": "common", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of time in milliseconds to wait when reconnecting to a broker that has repea [...]
+    "shutdownTimeout": { "kind": "property", "displayName": "Shutdown Timeout", "group": "common", "label": "common", "required": false, "type": "integer", "javaType": "int", "deprecated": false, "secret": false, "defaultValue": 30000, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Timeout in milli seconds to wait gracefully for the consumer or producer to shutdown and terminate its worker threads." },
+    "allowManualCommit": { "kind": "property", "displayName": "Allow Manual Commit", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether to allow doing manual commits via KafkaManualCommit. If this option is enabled then an instance of Kafk [...]
+    "autoCommitEnable": { "kind": "property", "displayName": "Auto Commit Enable", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "java.lang.Boolean", "deprecated": false, "secret": false, "defaultValue": "true", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "If true, periodically commit to ZooKeeper the offset of messages already fetched by the consumer. This [...]
+    "autoCommitIntervalMs": { "kind": "property", "displayName": "Auto Commit Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "5000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The frequency in ms that the consumer offsets are committed to zookeeper." },
+    "autoCommitOnStop": { "kind": "property", "displayName": "Auto Commit On Stop", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "sync", "async", "none" ], "deprecated": false, "secret": false, "defaultValue": "sync", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether to perform an explicit auto commit when the consumer stops [...]
+    "autoOffsetReset": { "kind": "property", "displayName": "Auto Offset Reset", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "latest", "earliest", "none" ], "deprecated": false, "secret": false, "defaultValue": "latest", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "What to do when there is no initial offset in ZooKeeper or if  [...]
+    "breakOnFirstError": { "kind": "property", "displayName": "Break On First Error", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This options controls what happens when a consumer is processing an exchange and it fails. If the option is fa [...]
+    "bridgeErrorHandler": { "kind": "property", "displayName": "Bridge Error Handler", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Allows for bridging the consumer to the Camel routing Error Handler, which mean any exceptions occurred while the consumer is trying to pickup incoming messages, or the likes, will now be processed as a message and handled by [...]
+    "checkCrcs": { "kind": "property", "displayName": "Check Crcs", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "java.lang.Boolean", "deprecated": false, "secret": false, "defaultValue": "true", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messa [...]
+    "consumerRequestTimeoutMs": { "kind": "property", "displayName": "Consumer Request Timeout Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "40000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The configuration controls the maximum amount of time the client will wait for the r [...]
+    "consumersCount": { "kind": "property", "displayName": "Consumers Count", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "int", "deprecated": false, "secret": false, "defaultValue": 1, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of consumers that connect to kafka server" },
+    "consumerStreams": { "kind": "property", "displayName": "Consumer Streams", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "int", "deprecated": false, "secret": false, "defaultValue": 10, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Number of concurrent consumers on the consumer" },
+    "fetchMaxBytes": { "kind": "property", "displayName": "Fetch Max Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "52428800", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of data the server should return for a fetch request This is not an absolute maximum, [...]
+    "fetchMinBytes": { "kind": "property", "displayName": "Fetch Min Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The minimum amount of data the server should return for a fetch request. If insufficient data is available the  [...]
+    "fetchWaitMaxMs": { "kind": "property", "displayName": "Fetch Wait Max Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "500", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of time the server will block before answering the fetch request if there isn't suffici [...]
+    "groupId": { "kind": "property", "displayName": "Group Id", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes i [...]
+    "heartbeatIntervalMs": { "kind": "property", "displayName": "Heartbeat Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "3000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The expected time between heartbeats to the consumer coordinator when using Kafka's group manage [...]
+    "kafkaHeaderDeserializer": { "kind": "property", "displayName": "Kafka Header Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderDeserializer to deserialize kafka headers [...]
+    "keyDeserializer": { "kind": "property", "displayName": "Key Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringDeserializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Deserializer class for key that implements the Deserial [...]
+    "maxPartitionFetchBytes": { "kind": "property", "displayName": "Max Partition Fetch Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1048576", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of data per-partition the server will return. The maximum total mem [...]
+    "maxPollIntervalMs": { "kind": "property", "displayName": "Max Poll Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "duration", "javaType": "java.lang.Long", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum delay between invocations of poll() when using consumer group management. This places an upper bound on the amoun [...]
+    "maxPollRecords": { "kind": "property", "displayName": "Max Poll Records", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "500", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum number of records returned in a single call to poll()" },
+    "offsetRepository": { "kind": "property", "displayName": "Offset Repository", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.spi.StateRepository<java.lang.String, java.lang.String>", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The offset repository to use in order to locally store the offset of each [...]
+    "partitionAssignor": { "kind": "property", "displayName": "Partition Assignor", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.clients.consumer.RangeAssignor", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The class name of the partition assignment strategy that the [...]
+    "pollTimeoutMs": { "kind": "property", "displayName": "Poll Timeout Ms", "group": "consumer", "label": "consumer", "required": false, "type": "duration", "javaType": "java.lang.Long", "deprecated": false, "secret": false, "defaultValue": "5000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The timeout used when polling the KafkaConsumer." },
+    "seekTo": { "kind": "property", "displayName": "Seek To", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "beginning", "end" ], "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Set if KafkaConsumer will read from beginning or end on startup: beginning : read from beginning end : read from end  [...]
+    "sessionTimeoutMs": { "kind": "property", "displayName": "Session Timeout Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "10000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The timeout used to detect failures when using Kafka's group management facilities." },
+    "specificAvroReader": { "kind": "property", "displayName": "Specific Avro Reader", "group": "consumer", "label": "confluent,consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This enables the use of a specific Avro reader for use with the Confluent Platform schema registry  [...]
+    "topicIsPattern": { "kind": "property", "displayName": "Topic Is Pattern", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether the topic is a pattern (regular expression). This can be used to subscribe to dynamic number of topics matchi [...]
+    "valueDeserializer": { "kind": "property", "displayName": "Value Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringDeserializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Deserializer class for value that implements the De [...]
+    "kafkaManualCommitFactory": { "kind": "property", "displayName": "Kafka Manual Commit Factory", "group": "consumer (advanced)", "label": "consumer,advanced", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.KafkaManualCommitFactory", "deprecated": false, "secret": false, "description": "Factory to use for creating KafkaManualCommit instances. This allows to plugin a custom factory to create custom KafkaManualCommit instances in case special logic is  [...]
+    "bufferMemorySize": { "kind": "property", "displayName": "Buffer Memory Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "33554432", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The total bytes of memory the producer can use to buffer records waiting to be sent to the server. [...]
+    "compressionCodec": { "kind": "property", "displayName": "Compression Codec", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "none", "gzip", "snappy", "lz4" ], "deprecated": false, "secret": false, "defaultValue": "none", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This parameter allows you to specify the compression codec f [...]
+    "connectionMaxIdleMs": { "kind": "property", "displayName": "Connection Max Idle Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "540000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Close idle connections after the number of milliseconds specified by this config." },
+    "enableIdempotence": { "kind": "property", "displayName": "Enable Idempotence", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'fa [...]
+    "kafkaHeaderSerializer": { "kind": "property", "displayName": "Kafka Header Serializer", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderSerializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderSerializer to serialize kafka headers values" },
+    "key": { "kind": "property", "displayName": "Key", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY" },
+    "keySerializer": { "kind": "property", "displayName": "Key Serializer", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for keys (defaults to the same as for me [...]
+    "lazyStartProducer": { "kind": "property", "displayName": "Lazy Start Producer", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the r [...]
+    "lingerMs": { "kind": "property", "displayName": "Linger Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "0", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The producer groups together any records that arrive in between request transmissions into a single batched request. Norma [...]
+    "maxBlockMs": { "kind": "property", "displayName": "Max Block Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "60000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The configuration controls how long sending to kafka will block. These methods can be blocked for multiple reason [...]
+    "maxInFlightRequest": { "kind": "property", "displayName": "Max In Flight Request", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "5", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum number of unacknowledged requests the client will send on a single connection before blo [...]
+    "maxRequestSize": { "kind": "property", "displayName": "Max Request Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1048576", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum size of a request. This is also effectively a cap on the maximum record size. Note that the [...]
+    "metadataMaxAgeMs": { "kind": "property", "displayName": "Metadata Max Age Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "300000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The period of time in milliseconds after which we force a refresh of metadata even if we haven't se [...]
+    "metricReporters": { "kind": "property", "displayName": "Metric Reporters", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A list of classes to use as metrics reporters. Implementing the MetricReporter interface allows plugging in classes that will be no [...]
+    "metricsSampleWindowMs": { "kind": "property", "displayName": "Metrics Sample Window Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "30000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of samples maintained to compute metrics." },
+    "noOfMetricsSample": { "kind": "property", "displayName": "No Of Metrics Sample", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "2", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of samples maintained to compute metrics." },
+    "partitioner": { "kind": "property", "displayName": "Partitioner", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.clients.producer.internals.DefaultPartitioner", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The partitioner class for partitioning messages amongst su [...]
+    "partitionKey": { "kind": "property", "displayName": "Partition Key", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The partition to which the record will be sent (or null if no partition was specified). If this option has been configured then it take [...]
+    "producerBatchSize": { "kind": "property", "displayName": "Producer Batch Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "16384", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The producer will attempt to batch records together into fewer requests whenever multiple records a [...]
+    "queueBufferingMaxMessages": { "kind": "property", "displayName": "Queue Buffering Max Messages", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "10000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum number of unsent messages that can be queued up the producer when usin [...]
+    "receiveBufferBytes": { "kind": "property", "displayName": "Receive Buffer Bytes", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "65536", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The size of the TCP receive buffer (SO_RCVBUF) to use when reading data." },
+    "reconnectBackoffMs": { "kind": "property", "displayName": "Reconnect Backoff Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "50", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The amount of time to wait before attempting to reconnect to a given host. This avoids repeatedly co [...]
+    "recordMetadata": { "kind": "property", "displayName": "Record Metadata", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether the producer should store the RecordMetadata results from sending to Kafka. The results are stored in a List co [...]
+    "requestRequiredAcks": { "kind": "property", "displayName": "Request Required Acks", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "-1", "0", "1", "all" ], "deprecated": false, "secret": false, "defaultValue": "1", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of acknowledgments the producer requires the leader to  [...]
+    "requestTimeoutMs": { "kind": "property", "displayName": "Request Timeout Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "30000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The amount of time the broker will wait trying to meet the request.required.acks requirement before s [...]
+    "retries": { "kind": "property", "displayName": "Retries", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "0", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient er [...]
+    "retryBackoffMs": { "kind": "property", "displayName": "Retry Backoff Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "100", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Before each retry, the producer refreshes the metadata of relevant topics to see if a new leader has been e [...]
+    "sendBufferBytes": { "kind": "property", "displayName": "Send Buffer Bytes", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "131072", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Socket write buffer size" },
+    "valueSerializer": { "kind": "property", "displayName": "Value Serializer", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for messages." },
+    "workerPool": { "kind": "property", "displayName": "Worker Pool", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "java.util.concurrent.ExecutorService", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to [...]
+    "workerPoolCoreSize": { "kind": "property", "displayName": "Worker Pool Core Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "10", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Number of core threads for the worker pool for continue routing Exchange after kafka server has ack [...]
+    "workerPoolMaxSize": { "kind": "property", "displayName": "Worker Pool Max Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "20", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Maximum number of threads for the worker pool for continue routing Exchange after kafka server has ac [...]
+    "basicPropertyBinding": { "kind": "property", "displayName": "Basic Property Binding", "group": "advanced", "label": "advanced", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": true, "secret": false, "defaultValue": false, "description": "Whether the component should use basic property binding (Camel 2.x) or the newer property binding with additional capabilities" },
+    "schemaRegistryURL": { "kind": "property", "displayName": "Schema Registry URL", "group": "confluent", "label": "confluent", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "URL of the Confluent Platform schema registry servers to use. The format is host1:port1,host2:port2. This is known as schema [...]
+    "interceptorClasses": { "kind": "property", "displayName": "Interceptor Classes", "group": "monitoring", "label": "common,monitoring", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Sets interceptors for producer or consumers. Producer interceptors have to be classes implementing org.apache.kafk [...]
+    "kerberosBeforeReloginMinTime": { "kind": "property", "displayName": "Kerberos Before Relogin Min Time", "group": "security", "label": "common,security", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "60000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Login thread sleep time between refresh attempts." },
+    "kerberosInitCmd": { "kind": "property", "displayName": "Kerberos Init Cmd", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "\/usr\/bin\/kinit", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Kerberos kinit command path. Default is \/usr\/bin\/kinit" },
+    "kerberosPrincipalToLocalRules": { "kind": "property", "displayName": "Kerberos Principal To Local Rules", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "DEFAULT", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A list of rules for mapping from principal names to short names (t [...]
+    "kerberosRenewJitter": { "kind": "property", "displayName": "Kerberos Renew Jitter", "group": "security", "label": "common,security", "required": false, "type": "number", "javaType": "java.lang.Double", "deprecated": false, "secret": false, "defaultValue": "0.05", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Percentage of random jitter added to the renewal time." },
+    "kerberosRenewWindowFactor": { "kind": "property", "displayName": "Kerberos Renew Window Factor", "group": "security", "label": "common,security", "required": false, "type": "number", "javaType": "java.lang.Double", "deprecated": false, "secret": false, "defaultValue": "0.8", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Login thread will sleep until the specified window factor of time from last ref [...]
+    "saslJaasConfig": { "kind": "property", "displayName": "Sasl Jaas Config", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Expose the kafka sasl.jaas.config parameter Example: org.apache.kafka.common.security.plain.PlainLoginModule required username [...]
+    "saslKerberosServiceName": { "kind": "property", "displayName": "Sasl Kerberos Service Name", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The Kerberos principal name that Kafka runs as. This can be defined either in Kafka's JAAS config or in Ka [...]
+    "saslMechanism": { "kind": "property", "displayName": "Sasl Mechanism", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "GSSAPI", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The Simple Authentication and Security Layer (SASL) Mechanism used. For the valid values see http:\/\/ [...]
+    "securityProtocol": { "kind": "property", "displayName": "Security Protocol", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "PLAINTEXT", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Protocol used to communicate with brokers. SASL_PLAINTEXT, PLAINTEXT and SSL are supported" },
+    "sslCipherSuites": { "kind": "property", "displayName": "Ssl Cipher Suites", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A list of cipher suites. This is a named combination of authentication, encryption, MAC and key exchange algorithm used to  [...]
+    "sslContextParameters": { "kind": "property", "displayName": "Ssl Context Parameters", "group": "security", "label": "common,security", "required": false, "type": "object", "javaType": "org.apache.camel.support.jsse.SSLContextParameters", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "SSL configuration using a Camel SSLContextParameters object. If configured it's [...]
+    "sslEnabledProtocols": { "kind": "property", "displayName": "Ssl Enabled Protocols", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The list of protocols enabled for SSL connections. TLSv1.2, TLSv1.1 and TLSv1 are enabled by default." },
+    "sslEndpointAlgorithm": { "kind": "property", "displayName": "Ssl Endpoint Algorithm", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "https", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The endpoint identification algorithm to validate server hostname using server certificate." },
+    "sslKeymanagerAlgorithm": { "kind": "property", "displayName": "Ssl Keymanager Algorithm", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "SunX509", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The algorithm used by key manager factory for SSL connections. Default value is th [...]
+    "sslKeyPassword": { "kind": "property", "displayName": "Ssl Key Password", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The password of the private key in the key store file. This is optional for client." },
+    "sslKeystoreLocation": { "kind": "property", "displayName": "Ssl Keystore Location", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The location of the key store file. This is optional for client and can be used for two-way authentication for cl [...]
+    "sslKeystorePassword": { "kind": "property", "displayName": "Ssl Keystore Password", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The store password for the key store file.This is optional for client and only needed if ssl.keystore.location is  [...]
+    "sslKeystoreType": { "kind": "property", "displayName": "Ssl Keystore Type", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "JKS", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The file format of the key store file. This is optional for client. Default value is JKS" },
+    "sslProtocol": { "kind": "property", "displayName": "Ssl Protocol", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The SSL protocol used to generate the SSLContext. Default setting is TLS, which is fine for most cases. Allowed values in recent JVM [...]
+    "sslProvider": { "kind": "property", "displayName": "Ssl Provider", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The name of the security provider used for SSL connections. Default value is the default security provider of the JVM." },
+    "sslTrustmanagerAlgorithm": { "kind": "property", "displayName": "Ssl Trustmanager Algorithm", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "PKIX", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The algorithm used by trust manager factory for SSL connections. Default value is [...]
+    "sslTruststoreLocation": { "kind": "property", "displayName": "Ssl Truststore Location", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The location of the trust store file." },
+    "sslTruststorePassword": { "kind": "property", "displayName": "Ssl Truststore Password", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The password for the trust store file." },
+    "sslTruststoreType": { "kind": "property", "displayName": "Ssl Truststore Type", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "JKS", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The file format of the trust store file. Default value is JKS." },
+    "useGlobalSslContextParameters": { "kind": "property", "displayName": "Use Global Ssl Context Parameters", "group": "security", "label": "security", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Enable usage of global SSL context parameters." }
+  },
+  "properties": {
+    "topic": { "kind": "path", "displayName": "Topic", "group": "common", "label": "common", "required": true, "type": "string", "javaType": "java.lang.String", "deprecated": false, "deprecationNote": "", "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Name of the topic to use. On the consumer you can use comma to separate multiple topics. A producer can only send a message to a single to [...]
+    "additionalProperties": { "kind": "parameter", "displayName": "Additional Properties", "group": "common", "label": "common", "required": false, "type": "object", "javaType": "java.util.Map<java.lang.String, java.lang.Object>", "prefix": "additionalProperties.", "multiValue": true, "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Sets additional properties for eithe [...]
+    "brokers": { "kind": "parameter", "displayName": "Brokers", "group": "common", "label": "common", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "URL of the Kafka brokers to use. The format is host1:port1,host2:port2, and the list can be a subset of brokers or a VIP pointing to a subset of broker [...]
+    "clientId": { "kind": "parameter", "displayName": "Client Id", "group": "common", "label": "common", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The client id is a user-specified string sent in each request to help trace calls. It should logically identify the application making the request." },
+    "headerFilterStrategy": { "kind": "parameter", "displayName": "Header Filter Strategy", "group": "common", "label": "common", "required": false, "type": "object", "javaType": "org.apache.camel.spi.HeaderFilterStrategy", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom HeaderFilterStrategy to filter header to and from Camel message." },
+    "reconnectBackoffMaxMs": { "kind": "parameter", "displayName": "Reconnect Backoff Max Ms", "group": "common", "label": "common", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of time in milliseconds to wait when reconnecting to a broker that has repe [...]
+    "shutdownTimeout": { "kind": "parameter", "displayName": "Shutdown Timeout", "group": "common", "label": "common", "required": false, "type": "integer", "javaType": "int", "deprecated": false, "secret": false, "defaultValue": 30000, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Timeout in milli seconds to wait gracefully for the consumer or producer to shutdown and terminate its worker threads." },
+    "allowManualCommit": { "kind": "parameter", "displayName": "Allow Manual Commit", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether to allow doing manual commits via KafkaManualCommit. If this option is enabled then an instance of Kaf [...]
+    "autoCommitEnable": { "kind": "parameter", "displayName": "Auto Commit Enable", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "java.lang.Boolean", "deprecated": false, "secret": false, "defaultValue": "true", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "If true, periodically commit to ZooKeeper the offset of messages already fetched by the consumer. Thi [...]
+    "autoCommitIntervalMs": { "kind": "parameter", "displayName": "Auto Commit Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "5000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The frequency in ms that the consumer offsets are committed to zookeeper." },
+    "autoCommitOnStop": { "kind": "parameter", "displayName": "Auto Commit On Stop", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "sync", "async", "none" ], "deprecated": false, "secret": false, "defaultValue": "sync", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether to perform an explicit auto commit when the consumer stop [...]
+    "autoOffsetReset": { "kind": "parameter", "displayName": "Auto Offset Reset", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "latest", "earliest", "none" ], "deprecated": false, "secret": false, "defaultValue": "latest", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "What to do when there is no initial offset in ZooKeeper or if [...]
+    "breakOnFirstError": { "kind": "parameter", "displayName": "Break On First Error", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This options controls what happens when a consumer is processing an exchange and it fails. If the option is f [...]
+    "bridgeErrorHandler": { "kind": "parameter", "displayName": "Bridge Error Handler", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Allows for bridging the consumer to the Camel routing Error Handler, which mean any exceptions occurred while the consumer is trying to pickup incoming messages, or the likes, will now be processed as a message and handled b [...]
+    "checkCrcs": { "kind": "parameter", "displayName": "Check Crcs", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "java.lang.Boolean", "deprecated": false, "secret": false, "defaultValue": "true", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the mess [...]
+    "consumerRequestTimeoutMs": { "kind": "parameter", "displayName": "Consumer Request Timeout Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "40000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The configuration controls the maximum amount of time the client will wait for the  [...]
+    "consumersCount": { "kind": "parameter", "displayName": "Consumers Count", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "int", "deprecated": false, "secret": false, "defaultValue": 1, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of consumers that connect to kafka server" },
+    "consumerStreams": { "kind": "parameter", "displayName": "Consumer Streams", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "int", "deprecated": false, "secret": false, "defaultValue": 10, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Number of concurrent consumers on the consumer" },
+    "fetchMaxBytes": { "kind": "parameter", "displayName": "Fetch Max Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "52428800", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of data the server should return for a fetch request This is not an absolute maximum [...]
+    "fetchMinBytes": { "kind": "parameter", "displayName": "Fetch Min Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The minimum amount of data the server should return for a fetch request. If insufficient data is available the [...]
+    "fetchWaitMaxMs": { "kind": "parameter", "displayName": "Fetch Wait Max Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "500", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of time the server will block before answering the fetch request if there isn't suffic [...]
+    "groupId": { "kind": "parameter", "displayName": "Group Id", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes  [...]
+    "heartbeatIntervalMs": { "kind": "parameter", "displayName": "Heartbeat Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "3000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The expected time between heartbeats to the consumer coordinator when using Kafka's group manag [...]
+    "kafkaHeaderDeserializer": { "kind": "parameter", "displayName": "Kafka Header Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderDeserializer to deserialize kafka header [...]
+    "keyDeserializer": { "kind": "parameter", "displayName": "Key Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringDeserializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Deserializer class for key that implements the Deseria [...]
+    "maxPartitionFetchBytes": { "kind": "parameter", "displayName": "Max Partition Fetch Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1048576", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of data per-partition the server will return. The maximum total me [...]
+    "maxPollIntervalMs": { "kind": "parameter", "displayName": "Max Poll Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "duration", "javaType": "java.lang.Long", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum delay between invocations of poll() when using consumer group management. This places an upper bound on the amou [...]
+    "maxPollRecords": { "kind": "parameter", "displayName": "Max Poll Records", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "500", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum number of records returned in a single call to poll()" },
+    "offsetRepository": { "kind": "parameter", "displayName": "Offset Repository", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.spi.StateRepository<java.lang.String, java.lang.String>", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The offset repository to use in order to locally store the offset of eac [...]
+    "partitionAssignor": { "kind": "parameter", "displayName": "Partition Assignor", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.clients.consumer.RangeAssignor", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The class name of the partition assignment strategy that th [...]
+    "pollTimeoutMs": { "kind": "parameter", "displayName": "Poll Timeout Ms", "group": "consumer", "label": "consumer", "required": false, "type": "duration", "javaType": "java.lang.Long", "deprecated": false, "secret": false, "defaultValue": "5000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The timeout used when polling the KafkaConsumer." },
+    "seekTo": { "kind": "parameter", "displayName": "Seek To", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "beginning", "end" ], "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Set if KafkaConsumer will read from beginning or end on startup: beginning : read from beginning end : read from end [...]
+    "sessionTimeoutMs": { "kind": "parameter", "displayName": "Session Timeout Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "10000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The timeout used to detect failures when using Kafka's group management facilities." },
+    "specificAvroReader": { "kind": "parameter", "displayName": "Specific Avro Reader", "group": "consumer", "label": "confluent,consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This enables the use of a specific Avro reader for use with the Confluent Platform schema registry [...]
+    "topicIsPattern": { "kind": "parameter", "displayName": "Topic Is Pattern", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether the topic is a pattern (regular expression). This can be used to subscribe to dynamic number of topics match [...]
+    "valueDeserializer": { "kind": "parameter", "displayName": "Value Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringDeserializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Deserializer class for value that implements the D [...]
+    "exceptionHandler": { "kind": "parameter", "displayName": "Exception Handler", "group": "consumer (advanced)", "label": "consumer,advanced", "required": false, "type": "object", "javaType": "org.apache.camel.spi.ExceptionHandler", "optionalPrefix": "consumer.", "deprecated": false, "secret": false, "description": "To let the consumer use a custom ExceptionHandler. Notice if the option bridgeErrorHandler is enabled then this option is not in use. By default the consumer will deal with [...]
+    "exchangePattern": { "kind": "parameter", "displayName": "Exchange Pattern", "group": "consumer (advanced)", "label": "consumer,advanced", "required": false, "type": "object", "javaType": "org.apache.camel.ExchangePattern", "enum": [ "InOnly", "InOut", "InOptionalOut" ], "deprecated": false, "secret": false, "description": "Sets the exchange pattern when the consumer creates an exchange." },
+    "bufferMemorySize": { "kind": "parameter", "displayName": "Buffer Memory Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "33554432", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The total bytes of memory the producer can use to buffer records waiting to be sent to the server [...]
+    "compressionCodec": { "kind": "parameter", "displayName": "Compression Codec", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "none", "gzip", "snappy", "lz4" ], "deprecated": false, "secret": false, "defaultValue": "none", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This parameter allows you to specify the compression codec  [...]
+    "connectionMaxIdleMs": { "kind": "parameter", "displayName": "Connection Max Idle Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "540000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Close idle connections after the number of milliseconds specified by this config." },
+    "enableIdempotence": { "kind": "parameter", "displayName": "Enable Idempotence", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'f [...]
+    "kafkaHeaderSerializer": { "kind": "parameter", "displayName": "Kafka Header Serializer", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderSerializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderSerializer to serialize kafka headers values" },
+    "key": { "kind": "parameter", "displayName": "Key", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY" },
+    "keySerializer": { "kind": "parameter", "displayName": "Key Serializer", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for keys (defaults to the same as for m [...]
+    "lazyStartProducer": { "kind": "parameter", "displayName": "Lazy Start Producer", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the  [...]
+    "lingerMs": { "kind": "parameter", "displayName": "Linger Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "0", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The producer groups together any records that arrive in between request transmissions into a single batched request. Norm [...]
+    "maxBlockMs": { "kind": "parameter", "displayName": "Max Block Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "60000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The configuration controls how long sending to kafka will block. These methods can be blocked for multiple reaso [...]
+    "maxInFlightRequest": { "kind": "parameter", "displayName": "Max In Flight Request", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "5", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum number of unacknowledged requests the client will send on a single connection before bl [...]
+    "maxRequestSize": { "kind": "parameter", "displayName": "Max Request Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1048576", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum size of a request. This is also effectively a cap on the maximum record size. Note that th [...]
+    "metadataMaxAgeMs": { "kind": "parameter", "displayName": "Metadata Max Age Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "300000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The period of time in milliseconds after which we force a refresh of metadata even if we haven't s [...]
+    "metricReporters": { "kind": "parameter", "displayName": "Metric Reporters", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A list of classes to use as metrics reporters. Implementing the MetricReporter interface allows plugging in classes that will be n [...]
+    "metricsSampleWindowMs": { "kind": "parameter", "displayName": "Metrics Sample Window Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "30000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of samples maintained to compute metrics." },
+    "noOfMetricsSample": { "kind": "parameter", "displayName": "No Of Metrics Sample", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "2", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of samples maintained to compute metrics." },
+    "partitioner": { "kind": "parameter", "displayName": "Partitioner", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.clients.producer.internals.DefaultPartitioner", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The partitioner class for partitioning messages amongst s [...]
+    "partitionKey": { "kind": "parameter", "displayName": "Partition Key", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The partition to which the record will be sent (or null if no partition was specified). If this option has been configured then it tak [...]
+    "producerBatchSize": { "kind": "parameter", "displayName": "Producer Batch Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "16384", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The producer will attempt to batch records together into fewer requests whenever multiple records  [...]
+    "queueBufferingMaxMessages": { "kind": "parameter", "displayName": "Queue Buffering Max Messages", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "10000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum number of unsent messages that can be queued up the producer when usi [...]
+    "receiveBufferBytes": { "kind": "parameter", "displayName": "Receive Buffer Bytes", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "65536", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The size of the TCP receive buffer (SO_RCVBUF) to use when reading data." },
+    "reconnectBackoffMs": { "kind": "parameter", "displayName": "Reconnect Backoff Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "50", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The amount of time to wait before attempting to reconnect to a given host. This avoids repeatedly c [...]
+    "recordMetadata": { "kind": "parameter", "displayName": "Record Metadata", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether the producer should store the RecordMetadata results from sending to Kafka. The results are stored in a List c [...]
+    "requestRequiredAcks": { "kind": "parameter", "displayName": "Request Required Acks", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "-1", "0", "1", "all" ], "deprecated": false, "secret": false, "defaultValue": "1", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of acknowledgments the producer requires the leader to [...]
+    "requestTimeoutMs": { "kind": "parameter", "displayName": "Request Timeout Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "30000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The amount of time the broker will wait trying to meet the request.required.acks requirement before  [...]
+    "retries": { "kind": "parameter", "displayName": "Retries", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "0", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient e [...]
+    "retryBackoffMs": { "kind": "parameter", "displayName": "Retry Backoff Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "100", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Before each retry, the producer refreshes the metadata of relevant topics to see if a new leader has been  [...]
+    "sendBufferBytes": { "kind": "parameter", "displayName": "Send Buffer Bytes", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "131072", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Socket write buffer size" },
+    "valueSerializer": { "kind": "parameter", "displayName": "Value Serializer", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for messages." },
+    "workerPool": { "kind": "parameter", "displayName": "Worker Pool", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "java.util.concurrent.ExecutorService", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent t [...]
+    "workerPoolCoreSize": { "kind": "parameter", "displayName": "Worker Pool Core Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "10", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Number of core threads for the worker pool for continue routing Exchange after kafka server has ac [...]
+    "workerPoolMaxSize": { "kind": "parameter", "displayName": "Worker Pool Max Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "20", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Maximum number of threads for the worker pool for continue routing Exchange after kafka server has a [...]
+    "basicPropertyBinding": { "kind": "parameter", "displayName": "Basic Property Binding", "group": "advanced", "label": "advanced", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Whether the endpoint should use basic property binding (Camel 2.x) or the newer property binding with additional capabilities" },
+    "synchronous": { "kind": "parameter", "displayName": "Synchronous", "group": "advanced", "label": "advanced", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Sets whether synchronous processing should be strictly used, or Camel is allowed to use asynchronous processing (if supported)." },
+    "schemaRegistryURL": { "kind": "parameter", "displayName": "Schema Registry URL", "group": "confluent", "label": "confluent", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "URL of the Confluent Platform schema registry servers to use. The format is host1:port1,host2:port2. This is known as schem [...]
+    "interceptorClasses": { "kind": "parameter", "displayName": "Interceptor Classes", "group": "monitoring", "label": "common,monitoring", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Sets interceptors for producer or consumers. Producer interceptors have to be classes implementing org.apache.kaf [...]
+    "kerberosBeforeReloginMinTime": { "kind": "parameter", "displayName": "Kerberos Before Relogin Min Time", "group": "security", "label": "common,security", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "60000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Login thread sleep time between refresh attempts." },
+    "kerberosInitCmd": { "kind": "parameter", "displayName": "Kerberos Init Cmd", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "\/usr\/bin\/kinit", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Kerberos kinit command path. Default is \/usr\/bin\/kinit" },
+    "kerberosPrincipalToLocalRules": { "kind": "parameter", "displayName": "Kerberos Principal To Local Rules", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "DEFAULT", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A list of rules for mapping from principal names to short names ( [...]
+    "kerberosRenewJitter": { "kind": "parameter", "displayName": "Kerberos Renew Jitter", "group": "security", "label": "common,security", "required": false, "type": "number", "javaType": "java.lang.Double", "deprecated": false, "secret": false, "defaultValue": "0.05", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Percentage of random jitter added to the renewal time." },
+    "kerberosRenewWindowFactor": { "kind": "parameter", "displayName": "Kerberos Renew Window Factor", "group": "security", "label": "common,security", "required": false, "type": "number", "javaType": "java.lang.Double", "deprecated": false, "secret": false, "defaultValue": "0.8", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Login thread will sleep until the specified window factor of time from last re [...]
+    "saslJaasConfig": { "kind": "parameter", "displayName": "Sasl Jaas Config", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Expose the kafka sasl.jaas.config parameter Example: org.apache.kafka.common.security.plain.PlainLoginModule required usernam [...]
+    "saslKerberosServiceName": { "kind": "parameter", "displayName": "Sasl Kerberos Service Name", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The Kerberos principal name that Kafka runs as. This can be defined either in Kafka's JAAS config or in K [...]
+    "saslMechanism": { "kind": "parameter", "displayName": "Sasl Mechanism", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "GSSAPI", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The Simple Authentication and Security Layer (SASL) Mechanism used. For the valid values see http:\/\ [...]
+    "securityProtocol": { "kind": "parameter", "displayName": "Security Protocol", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "PLAINTEXT", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Protocol used to communicate with brokers. SASL_PLAINTEXT, PLAINTEXT and SSL are supported" },
+    "sslCipherSuites": { "kind": "parameter", "displayName": "Ssl Cipher Suites", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A list of cipher suites. This is a named combination of authentication, encryption, MAC and key exchange algorithm used to [...]
+    "sslContextParameters": { "kind": "parameter", "displayName": "Ssl Context Parameters", "group": "security", "label": "common,security", "required": false, "type": "object", "javaType": "org.apache.camel.support.jsse.SSLContextParameters", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "SSL configuration using a Camel SSLContextParameters object. If configured it' [...]
+    "sslEnabledProtocols": { "kind": "parameter", "displayName": "Ssl Enabled Protocols", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The list of protocols enabled for SSL connections. TLSv1.2, TLSv1.1 and TLSv1 are enabled by default." },
+    "sslEndpointAlgorithm": { "kind": "parameter", "displayName": "Ssl Endpoint Algorithm", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "https", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The endpoint identification algorithm to validate server hostname using server certific [...]
+    "sslKeymanagerAlgorithm": { "kind": "parameter", "displayName": "Ssl Keymanager Algorithm", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "SunX509", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The algorithm used by key manager factory for SSL connections. Default value is t [...]
+    "sslKeyPassword": { "kind": "parameter", "displayName": "Ssl Key Password", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The password of the private key in the key store file. This is optional for client." },
+    "sslKeystoreLocation": { "kind": "parameter", "displayName": "Ssl Keystore Location", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The location of the key store file. This is optional for client and can be used for two-way authentication for c [...]
+    "sslKeystorePassword": { "kind": "parameter", "displayName": "Ssl Keystore Password", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The store password for the key store file.This is optional for client and only needed if ssl.keystore.location is [...]
+    "sslKeystoreType": { "kind": "parameter", "displayName": "Ssl Keystore Type", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "JKS", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The file format of the key store file. This is optional for client. Default value is JKS" },
+    "sslProtocol": { "kind": "parameter", "displayName": "Ssl Protocol", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The SSL protocol used to generate the SSLContext. Default setting is TLS, which is fine for most cases. Allowed values in recent JV [...]
+    "sslProvider": { "kind": "parameter", "displayName": "Ssl Provider", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The name of the security provider used for SSL connections. Default value is the default security provider of the JVM." },
+    "sslTrustmanagerAlgorithm": { "kind": "parameter", "displayName": "Ssl Trustmanager Algorithm", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "PKIX", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The algorithm used by trust manager factory for SSL connections. Default value i [...]
+    "sslTruststoreLocation": { "kind": "parameter", "displayName": "Ssl Truststore Location", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The location of the trust store file." },
+    "sslTruststorePassword": { "kind": "parameter", "displayName": "Ssl Truststore Password", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The password for the trust store file." },
+    "sslTruststoreType": { "kind": "parameter", "displayName": "Ssl Truststore Type", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "JKS", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The file format of the trust store file. Default value is JKS." }
+  }
+}


[camel] 01/13: CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - keySerializerClass

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 0aa75eeb3f9fd72e69a47d7dba845327a5dc64e1
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Thu Oct 29 18:35:58 2020 +0100

    CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - keySerializerClass
---
 .../org/apache/camel/component/kafka/KafkaConfiguration.java | 12 ++++++------
 .../java/org/apache/camel/component/kafka/KafkaProducer.java |  4 ++--
 .../org/apache/camel/component/kafka/KafkaComponentTest.java |  2 +-
 3 files changed, 9 insertions(+), 9 deletions(-)

diff --git a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
index b5f580f..1f25c14 100644
--- a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
+++ b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
@@ -160,7 +160,7 @@ public class KafkaConfiguration implements Cloneable, HeaderFilterStrategyAware
     @UriParam(label = "producer", defaultValue = KafkaConstants.KAFKA_DEFAULT_SERIALIZER)
     private String serializerClass = KafkaConstants.KAFKA_DEFAULT_SERIALIZER;
     @UriParam(label = "producer", defaultValue = KafkaConstants.KAFKA_DEFAULT_SERIALIZER)
-    private String keySerializerClass = KafkaConstants.KAFKA_DEFAULT_SERIALIZER;
+    private String keySerializer = KafkaConstants.KAFKA_DEFAULT_SERIALIZER;
 
     @UriParam(label = "producer")
     private String key;
@@ -340,7 +340,7 @@ public class KafkaConfiguration implements Cloneable, HeaderFilterStrategyAware
 
     public Properties createProducerProperties() {
         Properties props = new Properties();
-        addPropertyIfNotNull(props, ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, getKeySerializerClass());
+        addPropertyIfNotNull(props, ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, getKeySerializer());
         addPropertyIfNotNull(props, ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, getSerializerClass());
         addPropertyIfNotNull(props, ProducerConfig.ACKS_CONFIG, getRequestRequiredAcks());
         addPropertyIfNotNull(props, ProducerConfig.BUFFER_MEMORY_CONFIG, getBufferMemorySize());
@@ -886,15 +886,15 @@ public class KafkaConfiguration implements Cloneable, HeaderFilterStrategyAware
         this.serializerClass = serializerClass;
     }
 
-    public String getKeySerializerClass() {
-        return keySerializerClass;
+    public String getKeySerializer() {
+        return keySerializer;
     }
 
     /**
      * The serializer class for keys (defaults to the same as for messages if nothing is given).
      */
-    public void setKeySerializerClass(String keySerializerClass) {
-        this.keySerializerClass = keySerializerClass;
+    public void setKeySerializer(String keySerializer) {
+        this.keySerializer = keySerializer;
     }
 
     public String getKerberosInitCmd() {
diff --git a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaProducer.java b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaProducer.java
index 7fbb73e..ef7b241 100644
--- a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaProducer.java
+++ b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaProducer.java
@@ -224,7 +224,7 @@ public class KafkaProducer extends DefaultAsyncProducer {
 
                             final Object messageKey = innerKey != null
                                     ? tryConvertToSerializedType(innerExchange, innerKey,
-                                            endpoint.getConfiguration().getKeySerializerClass())
+                                            endpoint.getConfiguration().getKeySerializer())
                                     : null;
                             hasMessageKey = messageKey != null;
                         }
@@ -265,7 +265,7 @@ public class KafkaProducer extends DefaultAsyncProducer {
         Object key = endpoint.getConfiguration().getKey() != null
                 ? endpoint.getConfiguration().getKey() : exchange.getIn().getHeader(KafkaConstants.KEY);
         final Object messageKey = key != null
-                ? tryConvertToSerializedType(exchange, key, endpoint.getConfiguration().getKeySerializerClass()) : null;
+                ? tryConvertToSerializedType(exchange, key, endpoint.getConfiguration().getKeySerializer()) : null;
         final boolean hasMessageKey = messageKey != null;
 
         // must convert each entry of the iterator into the value according to
diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaComponentTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaComponentTest.java
index e293e20..b47e3bf 100644
--- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaComponentTest.java
+++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaComponentTest.java
@@ -130,7 +130,7 @@ public class KafkaComponentTest extends CamelTestSupport {
         assertEquals(Integer.valueOf(3), endpoint.getConfiguration().getNoOfMetricsSample());
         assertEquals(Integer.valueOf(12344), endpoint.getConfiguration().getMetricsSampleWindowMs());
         assertEquals(KafkaConstants.KAFKA_DEFAULT_SERIALIZER, endpoint.getConfiguration().getSerializerClass());
-        assertEquals(KafkaConstants.KAFKA_DEFAULT_SERIALIZER, endpoint.getConfiguration().getKeySerializerClass());
+        assertEquals(KafkaConstants.KAFKA_DEFAULT_SERIALIZER, endpoint.getConfiguration().getKeySerializer());
         assertEquals("testing", endpoint.getConfiguration().getSslKeyPassword());
         assertEquals("/abc", endpoint.getConfiguration().getSslKeystoreLocation());
         assertEquals("testing", endpoint.getConfiguration().getSslKeystorePassword());


[camel] 03/13: CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - serializerClass

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit ebf1b0a21ca2720da402569a836c0f96d3762c9d
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Thu Oct 29 18:50:59 2020 +0100

     CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - serializerClass
---
 .../org/apache/camel/component/kafka/KafkaConfiguration.java | 12 ++++++------
 .../java/org/apache/camel/component/kafka/KafkaProducer.java |  4 ++--
 2 files changed, 8 insertions(+), 8 deletions(-)

diff --git a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
index 1f25c14..922420b 100644
--- a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
+++ b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
@@ -158,7 +158,7 @@ public class KafkaConfiguration implements Cloneable, HeaderFilterStrategyAware
     @UriParam(label = "producer", defaultValue = "10000")
     private Integer queueBufferingMaxMessages = 10000;
     @UriParam(label = "producer", defaultValue = KafkaConstants.KAFKA_DEFAULT_SERIALIZER)
-    private String serializerClass = KafkaConstants.KAFKA_DEFAULT_SERIALIZER;
+    private String valueSerializer = KafkaConstants.KAFKA_DEFAULT_SERIALIZER;
     @UriParam(label = "producer", defaultValue = KafkaConstants.KAFKA_DEFAULT_SERIALIZER)
     private String keySerializer = KafkaConstants.KAFKA_DEFAULT_SERIALIZER;
 
@@ -341,7 +341,7 @@ public class KafkaConfiguration implements Cloneable, HeaderFilterStrategyAware
     public Properties createProducerProperties() {
         Properties props = new Properties();
         addPropertyIfNotNull(props, ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, getKeySerializer());
-        addPropertyIfNotNull(props, ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, getSerializerClass());
+        addPropertyIfNotNull(props, ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, getValueSerializer());
         addPropertyIfNotNull(props, ProducerConfig.ACKS_CONFIG, getRequestRequiredAcks());
         addPropertyIfNotNull(props, ProducerConfig.BUFFER_MEMORY_CONFIG, getBufferMemorySize());
         addPropertyIfNotNull(props, ProducerConfig.COMPRESSION_TYPE_CONFIG, getCompressionCodec());
@@ -875,15 +875,15 @@ public class KafkaConfiguration implements Cloneable, HeaderFilterStrategyAware
         this.queueBufferingMaxMessages = queueBufferingMaxMessages;
     }
 
-    public String getSerializerClass() {
-        return serializerClass;
+    public String getValueSerializer() {
+        return valueSerializer;
     }
 
     /**
      * The serializer class for messages.
      */
-    public void setSerializerClass(String serializerClass) {
-        this.serializerClass = serializerClass;
+    public void setValueSerializer(String valueSerializer) {
+        this.valueSerializer = valueSerializer;
     }
 
     public String getKeySerializer() {
diff --git a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaProducer.java b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaProducer.java
index ef7b241..0466514 100644
--- a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaProducer.java
+++ b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaProducer.java
@@ -231,7 +231,7 @@ public class KafkaProducer extends DefaultAsyncProducer {
 
                         ex = innerExchange == null ? exchange : innerExchange;
                         value = tryConvertToSerializedType(ex, innerMmessage.getBody(),
-                                endpoint.getConfiguration().getSerializerClass());
+                                endpoint.getConfiguration().getValueSerializer());
 
                     }
 
@@ -270,7 +270,7 @@ public class KafkaProducer extends DefaultAsyncProducer {
 
         // must convert each entry of the iterator into the value according to
         // the serializer
-        Object value = tryConvertToSerializedType(exchange, msg, endpoint.getConfiguration().getSerializerClass());
+        Object value = tryConvertToSerializedType(exchange, msg, endpoint.getConfiguration().getValueSerializer());
 
         ProducerRecord record;
         if (hasPartitionKey && hasMessageKey) {


[camel] 12/13: CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderDeserializer tests

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 44d75a8c8349b9d052a524efd20d0ce568dc2c4f
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Thu Oct 29 19:33:41 2020 +0100

    CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderDeserializer tests
---
 .../java/org/apache/camel/component/kafka/KafkaConsumerFullTest.java    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerFullTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerFullTest.java
index 8e1cad7..79f5192 100644
--- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerFullTest.java
+++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerFullTest.java
@@ -194,7 +194,7 @@ public class KafkaConsumerFullTest extends BaseEmbeddedKafkaTest {
     @Test
     public void headerDeserializerCouldBeOverridden() {
         KafkaEndpoint kafkaEndpoint
-                = context.getEndpoint("kafka:random_topic?kafkaHeaderDeserializer=#myHeaderDeserializer", KafkaEndpoint.class);
+                = context.getEndpoint("kafka:random_topic?headerDeserializer=#myHeaderDeserializer", KafkaEndpoint.class);
         assertIsInstanceOf(MyKafkaHeaderDeserializer.class, kafkaEndpoint.getConfiguration().getHeaderDeserializer());
     }
 


[camel] 08/13: CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderSerializer tests

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 557f5b3b064f193f39e85d6ad8ffe638110fe14b
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Thu Oct 29 19:23:49 2020 +0100

    CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderSerializer tests
---
 .../java/org/apache/camel/component/kafka/KafkaProducerFullTest.java  | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java
index 2f5b664..720bbad 100644
--- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java
+++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java
@@ -419,8 +419,8 @@ public class KafkaProducerFullTest extends BaseEmbeddedKafkaTest {
     @Test
     public void headerSerializerCouldBeOverridden() {
         KafkaEndpoint kafkaEndpoint = context
-                .getEndpoint("kafka:TOPIC_PROPAGATED_HEADERS?kafkaHeaderSerializer=#myHeaderSerializer", KafkaEndpoint.class);
-        assertIsInstanceOf(MyKafkaHeadersSerializer.class, kafkaEndpoint.getConfiguration().getKafkaHeaderSerializer());
+                .getEndpoint("kafka:TOPIC_PROPAGATED_HEADERS?headerSerializer=#myHeaderSerializer", KafkaEndpoint.class);
+        assertIsInstanceOf(MyKafkaHeadersSerializer.class, kafkaEndpoint.getConfiguration().getHeaderSerializer());
     }
 
     private byte[] getHeaderValue(String headerKey, Headers headers) {


[camel] 10/13: CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderDeserializer

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 55b06778aca3d903496a66d686c6a43cc81d160c
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Thu Oct 29 19:30:51 2020 +0100

    CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderDeserializer
---
 .../org/apache/camel/component/kafka/KafkaConfiguration.java | 12 ++++++------
 .../java/org/apache/camel/component/kafka/KafkaConsumer.java |  2 +-
 2 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
index ae251a4..0bb1fec 100644
--- a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
+++ b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
@@ -75,7 +75,7 @@ public class KafkaConfiguration implements Cloneable, HeaderFilterStrategyAware
     @UriParam(label = "consumer", defaultValue = "1")
     private int consumersCount = 1;
     @UriParam(label = "consumer", description = "To use a custom KafkaHeaderDeserializer to deserialize kafka headers values")
-    private KafkaHeaderDeserializer kafkaHeaderDeserializer = new DefaultKafkaHeaderDeserializer();
+    private KafkaHeaderDeserializer headerDeserializer = new DefaultKafkaHeaderDeserializer();
 
     // interceptor.classes
     @UriParam(label = "common,monitoring")
@@ -1674,17 +1674,17 @@ public class KafkaConfiguration implements Cloneable, HeaderFilterStrategyAware
         this.headerFilterStrategy = headerFilterStrategy;
     }
 
-    public KafkaHeaderDeserializer getKafkaHeaderDeserializer() {
-        return kafkaHeaderDeserializer;
+    public KafkaHeaderDeserializer getHeaderDeserializer() {
+        return headerDeserializer;
     }
 
     /**
      * Sets custom KafkaHeaderDeserializer for deserialization kafka headers values to camel headers values.
      *
-     * @param kafkaHeaderDeserializer custom kafka header deserializer to be used
+     * @param headerDeserializer custom kafka header deserializer to be used
      */
-    public void setKafkaHeaderDeserializer(final KafkaHeaderDeserializer kafkaHeaderDeserializer) {
-        this.kafkaHeaderDeserializer = kafkaHeaderDeserializer;
+    public void setHeaderDeserializer(final KafkaHeaderDeserializer headerDeserializer) {
+        this.headerDeserializer = headerDeserializer;
     }
 
     public KafkaHeaderSerializer getHeaderSerializer() {
diff --git a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConsumer.java b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConsumer.java
index 0cba5e0..f406125 100644
--- a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConsumer.java
+++ b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConsumer.java
@@ -515,7 +515,7 @@ public class KafkaConsumer extends DefaultConsumer {
     private void propagateHeaders(
             ConsumerRecord<Object, Object> record, Exchange exchange, KafkaConfiguration kafkaConfiguration) {
         HeaderFilterStrategy headerFilterStrategy = kafkaConfiguration.getHeaderFilterStrategy();
-        KafkaHeaderDeserializer headerDeserializer = kafkaConfiguration.getKafkaHeaderDeserializer();
+        KafkaHeaderDeserializer headerDeserializer = kafkaConfiguration.getHeaderDeserializer();
         StreamSupport.stream(record.headers().spliterator(), false)
                 .filter(header -> shouldBeFiltered(header, exchange, headerFilterStrategy))
                 .forEach(header -> exchange.getIn().setHeader(header.key(),


[camel] 02/13: CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - keySerializerClass Test

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit abdd514fd61d14b320e304fa7a3c35df9346cdf2
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Thu Oct 29 18:44:40 2020 +0100

    CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - keySerializerClass Test
---
 .../org/apache/camel/catalog/docs/kafka-component.adoc         |  4 ++--
 .../apache/camel/component/kafka/KafkaComponentConfigurer.java | 10 +++++-----
 .../apache/camel/component/kafka/KafkaEndpointConfigurer.java  | 10 +++++-----
 .../apache/camel/component/kafka/KafkaEndpointUriFactory.java  |  2 +-
 .../resources/org/apache/camel/component/kafka/kafka.json      |  4 ++--
 components/camel-kafka/src/main/docs/kafka-component.adoc      |  4 ++--
 .../apache/camel/component/kafka/KafkaProducerFullTest.java    |  2 +-
 .../builder/component/dsl/KafkaComponentBuilderFactory.java    |  8 ++++----
 .../builder/endpoint/dsl/KafkaEndpointBuilderFactory.java      |  5 ++---
 docs/components/modules/ROOT/pages/kafka-component.adoc        |  4 ++--
 10 files changed, 26 insertions(+), 27 deletions(-)

diff --git a/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc b/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc
index d04c75b..57badeb 100644
--- a/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc
+++ b/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc
@@ -91,7 +91,7 @@ The Kafka component supports 97 options, which are listed below.
 | *enableIdempotence* (producer) | If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'false', producer retries may write duplicates of the retried message in the stream. If set to true this option will require max.in.flight.requests.per.connection to be set to 1 and retries cannot be zero and additionally acks must be set to 'all'. | false | boolean
 | *kafkaHeaderSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
 | *key* (producer) | The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY |  | String
-| *keySerializerClass* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
+| *keySerializer* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
 | *lazyStartProducer* (producer) | Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and [...]
 | *lingerMs* (producer) | The producer groups together any records that arrive in between request transmissions into a single batched request. Normally this occurs only under load when records arrive faster than they can be sent out. However in some circumstances the client may want to reduce the number of requests even under moderate load. This setting accomplishes this by adding a small amount of artificial delay that is, rather than immediately sending out a record the producer will w [...]
 | *maxBlockMs* (producer) | The configuration controls how long sending to kafka will block. These methods can be blocked for multiple reasons. For e.g: buffer full, metadata unavailable.This configuration imposes maximum limit on the total time spent in fetching metadata, serialization of key and value, partitioning and allocation of buffer memory when doing a send(). In case of partitionsFor(), this configuration imposes a maximum time threshold on waiting for metadata | 60000 | Integer
@@ -218,7 +218,7 @@ with the following path and query parameters:
 | *enableIdempotence* (producer) | If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'false', producer retries may write duplicates of the retried message in the stream. If set to true this option will require max.in.flight.requests.per.connection to be set to 1 and retries cannot be zero and additionally acks must be set to 'all'. | false | boolean
 | *kafkaHeaderSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
 | *key* (producer) | The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY |  | String
-| *keySerializerClass* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
+| *keySerializer* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
 | *lazyStartProducer* (producer) | Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and [...]
 | *lingerMs* (producer) | The producer groups together any records that arrive in between request transmissions into a single batched request. Normally this occurs only under load when records arrive faster than they can be sent out. However in some circumstances the client may want to reduce the number of requests even under moderate load. This setting accomplishes this by adding a small amount of artificial delay that is, rather than immediately sending out a record the producer will w [...]
 | *maxBlockMs* (producer) | The configuration controls how long sending to kafka will block. These methods can be blocked for multiple reasons. For e.g: buffer full, metadata unavailable.This configuration imposes maximum limit on the total time spent in fetching metadata, serialization of key and value, partitioning and allocation of buffer memory when doing a send(). In case of partitionsFor(), this configuration imposes a maximum time threshold on waiting for metadata | 60000 | Integer
diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java
index e90a755..ac443ba 100644
--- a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java
+++ b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java
@@ -61,7 +61,7 @@ public class KafkaComponentConfigurer extends PropertyConfigurerSupport implemen
         map.put("enableIdempotence", boolean.class);
         map.put("kafkaHeaderSerializer", org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class);
         map.put("key", java.lang.String.class);
-        map.put("keySerializerClass", java.lang.String.class);
+        map.put("keySerializer", java.lang.String.class);
         map.put("lazyStartProducer", boolean.class);
         map.put("lingerMs", java.lang.Integer.class);
         map.put("maxBlockMs", java.lang.Integer.class);
@@ -200,8 +200,8 @@ public class KafkaComponentConfigurer extends PropertyConfigurerSupport implemen
         case "key": getOrCreateConfiguration(target).setKey(property(camelContext, java.lang.String.class, value)); return true;
         case "keydeserializer":
         case "keyDeserializer": getOrCreateConfiguration(target).setKeyDeserializer(property(camelContext, java.lang.String.class, value)); return true;
-        case "keyserializerclass":
-        case "keySerializerClass": getOrCreateConfiguration(target).setKeySerializerClass(property(camelContext, java.lang.String.class, value)); return true;
+        case "keyserializer":
+        case "keySerializer": getOrCreateConfiguration(target).setKeySerializer(property(camelContext, java.lang.String.class, value)); return true;
         case "lazystartproducer":
         case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
         case "lingerms":
@@ -402,8 +402,8 @@ public class KafkaComponentConfigurer extends PropertyConfigurerSupport implemen
         case "key": return getOrCreateConfiguration(target).getKey();
         case "keydeserializer":
         case "keyDeserializer": return getOrCreateConfiguration(target).getKeyDeserializer();
-        case "keyserializerclass":
-        case "keySerializerClass": return getOrCreateConfiguration(target).getKeySerializerClass();
+        case "keyserializer":
+        case "keySerializer": return getOrCreateConfiguration(target).getKeySerializer();
         case "lazystartproducer":
         case "lazyStartProducer": return target.isLazyStartProducer();
         case "lingerms":
diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java
index 89fd0b2..bbd428c 100644
--- a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java
+++ b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java
@@ -62,7 +62,7 @@ public class KafkaEndpointConfigurer extends PropertyConfigurerSupport implement
         map.put("enableIdempotence", boolean.class);
         map.put("kafkaHeaderSerializer", org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class);
         map.put("key", java.lang.String.class);
-        map.put("keySerializerClass", java.lang.String.class);
+        map.put("keySerializer", java.lang.String.class);
         map.put("lazyStartProducer", boolean.class);
         map.put("lingerMs", java.lang.Integer.class);
         map.put("maxBlockMs", java.lang.Integer.class);
@@ -195,8 +195,8 @@ public class KafkaEndpointConfigurer extends PropertyConfigurerSupport implement
         case "key": target.getConfiguration().setKey(property(camelContext, java.lang.String.class, value)); return true;
         case "keydeserializer":
         case "keyDeserializer": target.getConfiguration().setKeyDeserializer(property(camelContext, java.lang.String.class, value)); return true;
-        case "keyserializerclass":
-        case "keySerializerClass": target.getConfiguration().setKeySerializerClass(property(camelContext, java.lang.String.class, value)); return true;
+        case "keyserializer":
+        case "keySerializer": target.getConfiguration().setKeySerializer(property(camelContext, java.lang.String.class, value)); return true;
         case "lazystartproducer":
         case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
         case "lingerms":
@@ -397,8 +397,8 @@ public class KafkaEndpointConfigurer extends PropertyConfigurerSupport implement
         case "key": return target.getConfiguration().getKey();
         case "keydeserializer":
         case "keyDeserializer": return target.getConfiguration().getKeyDeserializer();
-        case "keyserializerclass":
-        case "keySerializerClass": return target.getConfiguration().getKeySerializerClass();
+        case "keyserializer":
+        case "keySerializer": return target.getConfiguration().getKeySerializer();
         case "lazystartproducer":
         case "lazyStartProducer": return target.isLazyStartProducer();
         case "lingerms":
diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java
index 21614f1..1aae4ac 100644
--- a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java
+++ b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java
@@ -50,6 +50,7 @@ public class KafkaEndpointUriFactory extends org.apache.camel.support.component.
         props.add("metricsSampleWindowMs");
         props.add("sslContextParameters");
         props.add("sslKeyPassword");
+        props.add("keySerializer");
         props.add("noOfMetricsSample");
         props.add("maxPartitionFetchBytes");
         props.add("partitionKey");
@@ -80,7 +81,6 @@ public class KafkaEndpointUriFactory extends org.apache.camel.support.component.
         props.add("saslKerberosServiceName");
         props.add("bridgeErrorHandler");
         props.add("shutdownTimeout");
-        props.add("keySerializerClass");
         props.add("saslMechanism");
         props.add("workerPool");
         props.add("lazyStartProducer");
diff --git a/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json b/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json
index 91ab79c..20e7293 100644
--- a/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json
+++ b/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json
@@ -65,7 +65,7 @@
     "enableIdempotence": { "kind": "property", "displayName": "Enable Idempotence", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'fa [...]
     "kafkaHeaderSerializer": { "kind": "property", "displayName": "Kafka Header Serializer", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderSerializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderSerializer to serialize kafka headers values" },
     "key": { "kind": "property", "displayName": "Key", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY" },
-    "keySerializerClass": { "kind": "property", "displayName": "Key Serializer Class", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for keys (defaults to the sam [...]
+    "keySerializer": { "kind": "property", "displayName": "Key Serializer", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for keys (defaults to the same as for me [...]
     "lazyStartProducer": { "kind": "property", "displayName": "Lazy Start Producer", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the r [...]
     "lingerMs": { "kind": "property", "displayName": "Linger Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "0", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The producer groups together any records that arrive in between request transmissions into a single batched request. Norma [...]
     "maxBlockMs": { "kind": "property", "displayName": "Max Block Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "60000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The configuration controls how long sending to kafka will block. These methods can be blocked for multiple reason [...]
@@ -165,7 +165,7 @@
     "enableIdempotence": { "kind": "parameter", "displayName": "Enable Idempotence", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'f [...]
     "kafkaHeaderSerializer": { "kind": "parameter", "displayName": "Kafka Header Serializer", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderSerializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderSerializer to serialize kafka headers values" },
     "key": { "kind": "parameter", "displayName": "Key", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY" },
-    "keySerializerClass": { "kind": "parameter", "displayName": "Key Serializer Class", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for keys (defaults to the sa [...]
+    "keySerializer": { "kind": "parameter", "displayName": "Key Serializer", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for keys (defaults to the same as for m [...]
     "lazyStartProducer": { "kind": "parameter", "displayName": "Lazy Start Producer", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the  [...]
     "lingerMs": { "kind": "parameter", "displayName": "Linger Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "0", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The producer groups together any records that arrive in between request transmissions into a single batched request. Norm [...]
     "maxBlockMs": { "kind": "parameter", "displayName": "Max Block Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "60000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The configuration controls how long sending to kafka will block. These methods can be blocked for multiple reaso [...]
diff --git a/components/camel-kafka/src/main/docs/kafka-component.adoc b/components/camel-kafka/src/main/docs/kafka-component.adoc
index d04c75b..57badeb 100644
--- a/components/camel-kafka/src/main/docs/kafka-component.adoc
+++ b/components/camel-kafka/src/main/docs/kafka-component.adoc
@@ -91,7 +91,7 @@ The Kafka component supports 97 options, which are listed below.
 | *enableIdempotence* (producer) | If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'false', producer retries may write duplicates of the retried message in the stream. If set to true this option will require max.in.flight.requests.per.connection to be set to 1 and retries cannot be zero and additionally acks must be set to 'all'. | false | boolean
 | *kafkaHeaderSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
 | *key* (producer) | The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY |  | String
-| *keySerializerClass* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
+| *keySerializer* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
 | *lazyStartProducer* (producer) | Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and [...]
 | *lingerMs* (producer) | The producer groups together any records that arrive in between request transmissions into a single batched request. Normally this occurs only under load when records arrive faster than they can be sent out. However in some circumstances the client may want to reduce the number of requests even under moderate load. This setting accomplishes this by adding a small amount of artificial delay that is, rather than immediately sending out a record the producer will w [...]
 | *maxBlockMs* (producer) | The configuration controls how long sending to kafka will block. These methods can be blocked for multiple reasons. For e.g: buffer full, metadata unavailable.This configuration imposes maximum limit on the total time spent in fetching metadata, serialization of key and value, partitioning and allocation of buffer memory when doing a send(). In case of partitionsFor(), this configuration imposes a maximum time threshold on waiting for metadata | 60000 | Integer
@@ -218,7 +218,7 @@ with the following path and query parameters:
 | *enableIdempotence* (producer) | If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'false', producer retries may write duplicates of the retried message in the stream. If set to true this option will require max.in.flight.requests.per.connection to be set to 1 and retries cannot be zero and additionally acks must be set to 'all'. | false | boolean
 | *kafkaHeaderSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
 | *key* (producer) | The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY |  | String
-| *keySerializerClass* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
+| *keySerializer* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
 | *lazyStartProducer* (producer) | Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and [...]
 | *lingerMs* (producer) | The producer groups together any records that arrive in between request transmissions into a single batched request. Normally this occurs only under load when records arrive faster than they can be sent out. However in some circumstances the client may want to reduce the number of requests even under moderate load. This setting accomplishes this by adding a small amount of artificial delay that is, rather than immediately sending out a record the producer will w [...]
 | *maxBlockMs* (producer) | The configuration controls how long sending to kafka will block. These methods can be blocked for multiple reasons. For e.g: buffer full, metadata unavailable.This configuration imposes maximum limit on the total time spent in fetching metadata, serialization of key and value, partitioning and allocation of buffer memory when doing a send(). In case of partitionsFor(), this configuration imposes a maximum time threshold on waiting for metadata | 60000 | Integer
diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java
index 6515f0e..95e673c 100644
--- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java
+++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java
@@ -84,7 +84,7 @@ public class KafkaProducerFullTest extends BaseEmbeddedKafkaTest {
 
     @EndpointInject("kafka:" + TOPIC_BYTES + "?requestRequiredAcks=-1"
                     + "&serializerClass=org.apache.kafka.common.serialization.ByteArraySerializer&"
-                    + "keySerializerClass=org.apache.kafka.common.serialization.ByteArraySerializer")
+                    + "keySerializer=org.apache.kafka.common.serialization.ByteArraySerializer")
     private Endpoint toBytes;
 
     @EndpointInject("kafka:" + TOPIC_PROPAGATED_HEADERS + "?requestRequiredAcks=-1")
diff --git a/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java b/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java
index 6110068..061fcc1 100644
--- a/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java
+++ b/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java
@@ -717,9 +717,9 @@ public interface KafkaComponentBuilderFactory {
          * Default: org.apache.kafka.common.serialization.StringSerializer
          * Group: producer
          */
-        default KafkaComponentBuilder keySerializerClass(
-                java.lang.String keySerializerClass) {
-            doSetProperty("keySerializerClass", keySerializerClass);
+        default KafkaComponentBuilder keySerializer(
+                java.lang.String keySerializer) {
+            doSetProperty("keySerializer", keySerializer);
             return this;
         }
         /**
@@ -1608,7 +1608,7 @@ public interface KafkaComponentBuilderFactory {
             case "enableIdempotence": getOrCreateConfiguration((KafkaComponent) component).setEnableIdempotence((boolean) value); return true;
             case "kafkaHeaderSerializer": getOrCreateConfiguration((KafkaComponent) component).setKafkaHeaderSerializer((org.apache.camel.component.kafka.serde.KafkaHeaderSerializer) value); return true;
             case "key": getOrCreateConfiguration((KafkaComponent) component).setKey((java.lang.String) value); return true;
-            case "keySerializerClass": getOrCreateConfiguration((KafkaComponent) component).setKeySerializerClass((java.lang.String) value); return true;
+            case "keySerializer": getOrCreateConfiguration((KafkaComponent) component).setKeySerializer((java.lang.String) value); return true;
             case "lazyStartProducer": ((KafkaComponent) component).setLazyStartProducer((boolean) value); return true;
             case "lingerMs": getOrCreateConfiguration((KafkaComponent) component).setLingerMs((java.lang.Integer) value); return true;
             case "maxBlockMs": getOrCreateConfiguration((KafkaComponent) component).setMaxBlockMs((java.lang.Integer) value); return true;
diff --git a/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java b/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java
index b4419dc..27f6a8f 100644
--- a/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java
+++ b/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java
@@ -1838,9 +1838,8 @@ public interface KafkaEndpointBuilderFactory {
          * Default: org.apache.kafka.common.serialization.StringSerializer
          * Group: producer
          */
-        default KafkaEndpointProducerBuilder keySerializerClass(
-                String keySerializerClass) {
-            doSetProperty("keySerializerClass", keySerializerClass);
+        default KafkaEndpointProducerBuilder keySerializer(String keySerializer) {
+            doSetProperty("keySerializer", keySerializer);
             return this;
         }
         /**
diff --git a/docs/components/modules/ROOT/pages/kafka-component.adoc b/docs/components/modules/ROOT/pages/kafka-component.adoc
index a580bb2..9e2b92f 100644
--- a/docs/components/modules/ROOT/pages/kafka-component.adoc
+++ b/docs/components/modules/ROOT/pages/kafka-component.adoc
@@ -93,7 +93,7 @@ The Kafka component supports 97 options, which are listed below.
 | *enableIdempotence* (producer) | If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'false', producer retries may write duplicates of the retried message in the stream. If set to true this option will require max.in.flight.requests.per.connection to be set to 1 and retries cannot be zero and additionally acks must be set to 'all'. | false | boolean
 | *kafkaHeaderSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
 | *key* (producer) | The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY |  | String
-| *keySerializerClass* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
+| *keySerializer* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
 | *lazyStartProducer* (producer) | Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and [...]
 | *lingerMs* (producer) | The producer groups together any records that arrive in between request transmissions into a single batched request. Normally this occurs only under load when records arrive faster than they can be sent out. However in some circumstances the client may want to reduce the number of requests even under moderate load. This setting accomplishes this by adding a small amount of artificial delay that is, rather than immediately sending out a record the producer will w [...]
 | *maxBlockMs* (producer) | The configuration controls how long sending to kafka will block. These methods can be blocked for multiple reasons. For e.g: buffer full, metadata unavailable.This configuration imposes maximum limit on the total time spent in fetching metadata, serialization of key and value, partitioning and allocation of buffer memory when doing a send(). In case of partitionsFor(), this configuration imposes a maximum time threshold on waiting for metadata | 60000 | Integer
@@ -220,7 +220,7 @@ with the following path and query parameters:
 | *enableIdempotence* (producer) | If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'false', producer retries may write duplicates of the retried message in the stream. If set to true this option will require max.in.flight.requests.per.connection to be set to 1 and retries cannot be zero and additionally acks must be set to 'all'. | false | boolean
 | *kafkaHeaderSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
 | *key* (producer) | The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY |  | String
-| *keySerializerClass* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
+| *keySerializer* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
 | *lazyStartProducer* (producer) | Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and [...]
 | *lingerMs* (producer) | The producer groups together any records that arrive in between request transmissions into a single batched request. Normally this occurs only under load when records arrive faster than they can be sent out. However in some circumstances the client may want to reduce the number of requests even under moderate load. This setting accomplishes this by adding a small amount of artificial delay that is, rather than immediately sending out a record the producer will w [...]
 | *maxBlockMs* (producer) | The configuration controls how long sending to kafka will block. These methods can be blocked for multiple reasons. For e.g: buffer full, metadata unavailable.This configuration imposes maximum limit on the total time spent in fetching metadata, serialization of key and value, partitioning and allocation of buffer memory when doing a send(). In case of partitionsFor(), this configuration imposes a maximum time threshold on waiting for metadata | 60000 | Integer


[camel] 05/13: CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - serializerClass test

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 7bc1fc1fcb5ea0f33aed99fe10b1906a4defb8c9
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Thu Oct 29 18:52:54 2020 +0100

    CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - serializerClass test
---
 .../resources/org/apache/camel/catalog/docs/kafka-component.adoc  | 4 ++--
 components/camel-kafka/src/main/docs/kafka-component.adoc         | 4 ++--
 .../java/org/apache/camel/component/kafka/KafkaComponentTest.java | 2 +-
 .../camel/builder/component/dsl/KafkaComponentBuilderFactory.java | 8 ++++----
 .../camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java   | 6 +++---
 docs/components/modules/ROOT/pages/kafka-component.adoc           | 4 ++--
 6 files changed, 14 insertions(+), 14 deletions(-)

diff --git a/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc b/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc
index 57badeb..9cabbeb 100644
--- a/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc
+++ b/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc
@@ -113,7 +113,7 @@ The Kafka component supports 97 options, which are listed below.
 | *retries* (producer) | Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient error. Note that this retry is no different than if the client resent the record upon receiving the error. Allowing retries will potentially change the ordering of records because if two records are sent to a single partition, and the first fails and is retried but the second succeeds, then the second record may appear first. | 0 | Integer
 | *retryBackoffMs* (producer) | Before each retry, the producer refreshes the metadata of relevant topics to see if a new leader has been elected. Since leader election takes a bit of time, this property specifies the amount of time that the producer waits before refreshing the metadata. | 100 | Integer
 | *sendBufferBytes* (producer) | Socket write buffer size | 131072 | Integer
-| *serializerClass* (producer) | The serializer class for messages. | org.apache.kafka.common.serialization.StringSerializer | String
+| *valueSerializer* (producer) | The serializer class for messages. | org.apache.kafka.common.serialization.StringSerializer | String
 | *workerPool* (producer) | To use a custom worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. If using this option then you must handle the lifecycle of the thread pool to shut the pool down when no longer needed. |  | ExecutorService
 | *workerPoolCoreSize* (producer) | Number of core threads for the worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. | 10 | Integer
 | *workerPoolMaxSize* (producer) | Maximum number of threads for the worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. | 20 | Integer
@@ -240,7 +240,7 @@ with the following path and query parameters:
 | *retries* (producer) | Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient error. Note that this retry is no different than if the client resent the record upon receiving the error. Allowing retries will potentially change the ordering of records because if two records are sent to a single partition, and the first fails and is retried but the second succeeds, then the second record may appear first. | 0 | Integer
 | *retryBackoffMs* (producer) | Before each retry, the producer refreshes the metadata of relevant topics to see if a new leader has been elected. Since leader election takes a bit of time, this property specifies the amount of time that the producer waits before refreshing the metadata. | 100 | Integer
 | *sendBufferBytes* (producer) | Socket write buffer size | 131072 | Integer
-| *serializerClass* (producer) | The serializer class for messages. | org.apache.kafka.common.serialization.StringSerializer | String
+| *valueSerializer* (producer) | The serializer class for messages. | org.apache.kafka.common.serialization.StringSerializer | String
 | *workerPool* (producer) | To use a custom worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. If using this option then you must handle the lifecycle of the thread pool to shut the pool down when no longer needed. |  | ExecutorService
 | *workerPoolCoreSize* (producer) | Number of core threads for the worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. | 10 | Integer
 | *workerPoolMaxSize* (producer) | Maximum number of threads for the worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. | 20 | Integer
diff --git a/components/camel-kafka/src/main/docs/kafka-component.adoc b/components/camel-kafka/src/main/docs/kafka-component.adoc
index 57badeb..9cabbeb 100644
--- a/components/camel-kafka/src/main/docs/kafka-component.adoc
+++ b/components/camel-kafka/src/main/docs/kafka-component.adoc
@@ -113,7 +113,7 @@ The Kafka component supports 97 options, which are listed below.
 | *retries* (producer) | Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient error. Note that this retry is no different than if the client resent the record upon receiving the error. Allowing retries will potentially change the ordering of records because if two records are sent to a single partition, and the first fails and is retried but the second succeeds, then the second record may appear first. | 0 | Integer
 | *retryBackoffMs* (producer) | Before each retry, the producer refreshes the metadata of relevant topics to see if a new leader has been elected. Since leader election takes a bit of time, this property specifies the amount of time that the producer waits before refreshing the metadata. | 100 | Integer
 | *sendBufferBytes* (producer) | Socket write buffer size | 131072 | Integer
-| *serializerClass* (producer) | The serializer class for messages. | org.apache.kafka.common.serialization.StringSerializer | String
+| *valueSerializer* (producer) | The serializer class for messages. | org.apache.kafka.common.serialization.StringSerializer | String
 | *workerPool* (producer) | To use a custom worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. If using this option then you must handle the lifecycle of the thread pool to shut the pool down when no longer needed. |  | ExecutorService
 | *workerPoolCoreSize* (producer) | Number of core threads for the worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. | 10 | Integer
 | *workerPoolMaxSize* (producer) | Maximum number of threads for the worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. | 20 | Integer
@@ -240,7 +240,7 @@ with the following path and query parameters:
 | *retries* (producer) | Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient error. Note that this retry is no different than if the client resent the record upon receiving the error. Allowing retries will potentially change the ordering of records because if two records are sent to a single partition, and the first fails and is retried but the second succeeds, then the second record may appear first. | 0 | Integer
 | *retryBackoffMs* (producer) | Before each retry, the producer refreshes the metadata of relevant topics to see if a new leader has been elected. Since leader election takes a bit of time, this property specifies the amount of time that the producer waits before refreshing the metadata. | 100 | Integer
 | *sendBufferBytes* (producer) | Socket write buffer size | 131072 | Integer
-| *serializerClass* (producer) | The serializer class for messages. | org.apache.kafka.common.serialization.StringSerializer | String
+| *valueSerializer* (producer) | The serializer class for messages. | org.apache.kafka.common.serialization.StringSerializer | String
 | *workerPool* (producer) | To use a custom worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. If using this option then you must handle the lifecycle of the thread pool to shut the pool down when no longer needed. |  | ExecutorService
 | *workerPoolCoreSize* (producer) | Number of core threads for the worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. | 10 | Integer
 | *workerPoolMaxSize* (producer) | Maximum number of threads for the worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. | 20 | Integer
diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaComponentTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaComponentTest.java
index b47e3bf..6af3393 100644
--- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaComponentTest.java
+++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaComponentTest.java
@@ -129,7 +129,7 @@ public class KafkaComponentTest extends CamelTestSupport {
                 endpoint.getConfiguration().getMetricReporters());
         assertEquals(Integer.valueOf(3), endpoint.getConfiguration().getNoOfMetricsSample());
         assertEquals(Integer.valueOf(12344), endpoint.getConfiguration().getMetricsSampleWindowMs());
-        assertEquals(KafkaConstants.KAFKA_DEFAULT_SERIALIZER, endpoint.getConfiguration().getSerializerClass());
+        assertEquals(KafkaConstants.KAFKA_DEFAULT_SERIALIZER, endpoint.getConfiguration().getValueSerializer());
         assertEquals(KafkaConstants.KAFKA_DEFAULT_SERIALIZER, endpoint.getConfiguration().getKeySerializer());
         assertEquals("testing", endpoint.getConfiguration().getSslKeyPassword());
         assertEquals("/abc", endpoint.getConfiguration().getSslKeystoreLocation());
diff --git a/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java b/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java
index 061fcc1..0f38264 100644
--- a/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java
+++ b/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java
@@ -1090,9 +1090,9 @@ public interface KafkaComponentBuilderFactory {
          * Default: org.apache.kafka.common.serialization.StringSerializer
          * Group: producer
          */
-        default KafkaComponentBuilder serializerClass(
-                java.lang.String serializerClass) {
-            doSetProperty("serializerClass", serializerClass);
+        default KafkaComponentBuilder valueSerializer(
+                java.lang.String valueSerializer) {
+            doSetProperty("valueSerializer", valueSerializer);
             return this;
         }
         /**
@@ -1630,7 +1630,7 @@ public interface KafkaComponentBuilderFactory {
             case "retries": getOrCreateConfiguration((KafkaComponent) component).setRetries((java.lang.Integer) value); return true;
             case "retryBackoffMs": getOrCreateConfiguration((KafkaComponent) component).setRetryBackoffMs((java.lang.Integer) value); return true;
             case "sendBufferBytes": getOrCreateConfiguration((KafkaComponent) component).setSendBufferBytes((java.lang.Integer) value); return true;
-            case "serializerClass": getOrCreateConfiguration((KafkaComponent) component).setSerializerClass((java.lang.String) value); return true;
+            case "valueSerializer": getOrCreateConfiguration((KafkaComponent) component).setValueSerializer((java.lang.String) value); return true;
             case "workerPool": getOrCreateConfiguration((KafkaComponent) component).setWorkerPool((java.util.concurrent.ExecutorService) value); return true;
             case "workerPoolCoreSize": getOrCreateConfiguration((KafkaComponent) component).setWorkerPoolCoreSize((java.lang.Integer) value); return true;
             case "workerPoolMaxSize": getOrCreateConfiguration((KafkaComponent) component).setWorkerPoolMaxSize((java.lang.Integer) value); return true;
diff --git a/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java b/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java
index 27f6a8f..67be3f5 100644
--- a/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java
+++ b/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java
@@ -2527,9 +2527,9 @@ public interface KafkaEndpointBuilderFactory {
          * Default: org.apache.kafka.common.serialization.StringSerializer
          * Group: producer
          */
-        default KafkaEndpointProducerBuilder serializerClass(
-                String serializerClass) {
-            doSetProperty("serializerClass", serializerClass);
+        default KafkaEndpointProducerBuilder valueSerializer(
+                String valueSerializer) {
+            doSetProperty("valueSerializer", valueSerializer);
             return this;
         }
         /**
diff --git a/docs/components/modules/ROOT/pages/kafka-component.adoc b/docs/components/modules/ROOT/pages/kafka-component.adoc
index 9e2b92f..e307248 100644
--- a/docs/components/modules/ROOT/pages/kafka-component.adoc
+++ b/docs/components/modules/ROOT/pages/kafka-component.adoc
@@ -115,7 +115,7 @@ The Kafka component supports 97 options, which are listed below.
 | *retries* (producer) | Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient error. Note that this retry is no different than if the client resent the record upon receiving the error. Allowing retries will potentially change the ordering of records because if two records are sent to a single partition, and the first fails and is retried but the second succeeds, then the second record may appear first. | 0 | Integer
 | *retryBackoffMs* (producer) | Before each retry, the producer refreshes the metadata of relevant topics to see if a new leader has been elected. Since leader election takes a bit of time, this property specifies the amount of time that the producer waits before refreshing the metadata. | 100 | Integer
 | *sendBufferBytes* (producer) | Socket write buffer size | 131072 | Integer
-| *serializerClass* (producer) | The serializer class for messages. | org.apache.kafka.common.serialization.StringSerializer | String
+| *valueSerializer* (producer) | The serializer class for messages. | org.apache.kafka.common.serialization.StringSerializer | String
 | *workerPool* (producer) | To use a custom worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. If using this option then you must handle the lifecycle of the thread pool to shut the pool down when no longer needed. |  | ExecutorService
 | *workerPoolCoreSize* (producer) | Number of core threads for the worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. | 10 | Integer
 | *workerPoolMaxSize* (producer) | Maximum number of threads for the worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. | 20 | Integer
@@ -242,7 +242,7 @@ with the following path and query parameters:
 | *retries* (producer) | Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient error. Note that this retry is no different than if the client resent the record upon receiving the error. Allowing retries will potentially change the ordering of records because if two records are sent to a single partition, and the first fails and is retried but the second succeeds, then the second record may appear first. | 0 | Integer
 | *retryBackoffMs* (producer) | Before each retry, the producer refreshes the metadata of relevant topics to see if a new leader has been elected. Since leader election takes a bit of time, this property specifies the amount of time that the producer waits before refreshing the metadata. | 100 | Integer
 | *sendBufferBytes* (producer) | Socket write buffer size | 131072 | Integer
-| *serializerClass* (producer) | The serializer class for messages. | org.apache.kafka.common.serialization.StringSerializer | String
+| *valueSerializer* (producer) | The serializer class for messages. | org.apache.kafka.common.serialization.StringSerializer | String
 | *workerPool* (producer) | To use a custom worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. If using this option then you must handle the lifecycle of the thread pool to shut the pool down when no longer needed. |  | ExecutorService
 | *workerPoolCoreSize* (producer) | Number of core threads for the worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. | 10 | Integer
 | *workerPoolMaxSize* (producer) | Maximum number of threads for the worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to it from KafkaProducer using asynchronous non-blocking processing. | 20 | Integer


[camel] 04/13: CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - serializerClass test

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 5acb8e07efe93b241a4726635d77a62a4b30dc40
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Thu Oct 29 18:52:07 2020 +0100

    CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - serializerClass test
---
 .../component/kafka/KafkaComponentConfigurer.java  | 527 ---------------------
 .../component/kafka/KafkaEndpointConfigurer.java   | 521 --------------------
 .../component/kafka/KafkaEndpointUriFactory.java   | 163 -------
 .../services/org/apache/camel/component.properties |   7 -
 .../services/org/apache/camel/component/kafka      |   2 -
 .../org/apache/camel/configurer/kafka-component    |   2 -
 .../org/apache/camel/configurer/kafka-endpoint     |   2 -
 .../org/apache/camel/urifactory/kafka-endpoint     |   2 -
 .../org/apache/camel/component/kafka/kafka.json    | 223 ---------
 .../component/kafka/KafkaProducerFullTest.java     |   2 +-
 10 files changed, 1 insertion(+), 1450 deletions(-)

diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java
deleted file mode 100644
index ac443ba..0000000
--- a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java
+++ /dev/null
@@ -1,527 +0,0 @@
-/* Generated by camel build tools - do NOT edit this file! */
-package org.apache.camel.component.kafka;
-
-import java.util.Map;
-
-import org.apache.camel.CamelContext;
-import org.apache.camel.spi.GeneratedPropertyConfigurer;
-import org.apache.camel.spi.PropertyConfigurerGetter;
-import org.apache.camel.util.CaseInsensitiveMap;
-import org.apache.camel.support.component.PropertyConfigurerSupport;
-
-/**
- * Generated by camel build tools - do NOT edit this file!
- */
-@SuppressWarnings("unchecked")
-public class KafkaComponentConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
-
-    private static final Map<String, Object> ALL_OPTIONS;
-    static {
-        Map<String, Object> map = new CaseInsensitiveMap();
-        map.put("additionalProperties", java.util.Map.class);
-        map.put("brokers", java.lang.String.class);
-        map.put("clientId", java.lang.String.class);
-        map.put("configuration", org.apache.camel.component.kafka.KafkaConfiguration.class);
-        map.put("headerFilterStrategy", org.apache.camel.spi.HeaderFilterStrategy.class);
-        map.put("reconnectBackoffMaxMs", java.lang.Integer.class);
-        map.put("shutdownTimeout", int.class);
-        map.put("allowManualCommit", boolean.class);
-        map.put("autoCommitEnable", java.lang.Boolean.class);
-        map.put("autoCommitIntervalMs", java.lang.Integer.class);
-        map.put("autoCommitOnStop", java.lang.String.class);
-        map.put("autoOffsetReset", java.lang.String.class);
-        map.put("breakOnFirstError", boolean.class);
-        map.put("bridgeErrorHandler", boolean.class);
-        map.put("checkCrcs", java.lang.Boolean.class);
-        map.put("consumerRequestTimeoutMs", java.lang.Integer.class);
-        map.put("consumersCount", int.class);
-        map.put("consumerStreams", int.class);
-        map.put("fetchMaxBytes", java.lang.Integer.class);
-        map.put("fetchMinBytes", java.lang.Integer.class);
-        map.put("fetchWaitMaxMs", java.lang.Integer.class);
-        map.put("groupId", java.lang.String.class);
-        map.put("heartbeatIntervalMs", java.lang.Integer.class);
-        map.put("kafkaHeaderDeserializer", org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class);
-        map.put("keyDeserializer", java.lang.String.class);
-        map.put("maxPartitionFetchBytes", java.lang.Integer.class);
-        map.put("maxPollIntervalMs", java.lang.Long.class);
-        map.put("maxPollRecords", java.lang.Integer.class);
-        map.put("offsetRepository", org.apache.camel.spi.StateRepository.class);
-        map.put("partitionAssignor", java.lang.String.class);
-        map.put("pollTimeoutMs", java.lang.Long.class);
-        map.put("seekTo", java.lang.String.class);
-        map.put("sessionTimeoutMs", java.lang.Integer.class);
-        map.put("specificAvroReader", boolean.class);
-        map.put("topicIsPattern", boolean.class);
-        map.put("valueDeserializer", java.lang.String.class);
-        map.put("kafkaManualCommitFactory", org.apache.camel.component.kafka.KafkaManualCommitFactory.class);
-        map.put("bufferMemorySize", java.lang.Integer.class);
-        map.put("compressionCodec", java.lang.String.class);
-        map.put("connectionMaxIdleMs", java.lang.Integer.class);
-        map.put("enableIdempotence", boolean.class);
-        map.put("kafkaHeaderSerializer", org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class);
-        map.put("key", java.lang.String.class);
-        map.put("keySerializer", java.lang.String.class);
-        map.put("lazyStartProducer", boolean.class);
-        map.put("lingerMs", java.lang.Integer.class);
-        map.put("maxBlockMs", java.lang.Integer.class);
-        map.put("maxInFlightRequest", java.lang.Integer.class);
-        map.put("maxRequestSize", java.lang.Integer.class);
-        map.put("metadataMaxAgeMs", java.lang.Integer.class);
-        map.put("metricReporters", java.lang.String.class);
-        map.put("metricsSampleWindowMs", java.lang.Integer.class);
-        map.put("noOfMetricsSample", java.lang.Integer.class);
-        map.put("partitioner", java.lang.String.class);
-        map.put("partitionKey", java.lang.Integer.class);
-        map.put("producerBatchSize", java.lang.Integer.class);
-        map.put("queueBufferingMaxMessages", java.lang.Integer.class);
-        map.put("receiveBufferBytes", java.lang.Integer.class);
-        map.put("reconnectBackoffMs", java.lang.Integer.class);
-        map.put("recordMetadata", boolean.class);
-        map.put("requestRequiredAcks", java.lang.String.class);
-        map.put("requestTimeoutMs", java.lang.Integer.class);
-        map.put("retries", java.lang.Integer.class);
-        map.put("retryBackoffMs", java.lang.Integer.class);
-        map.put("sendBufferBytes", java.lang.Integer.class);
-        map.put("serializerClass", java.lang.String.class);
-        map.put("workerPool", java.util.concurrent.ExecutorService.class);
-        map.put("workerPoolCoreSize", java.lang.Integer.class);
-        map.put("workerPoolMaxSize", java.lang.Integer.class);
-        map.put("basicPropertyBinding", boolean.class);
-        map.put("schemaRegistryURL", java.lang.String.class);
-        map.put("interceptorClasses", java.lang.String.class);
-        map.put("kerberosBeforeReloginMinTime", java.lang.Integer.class);
-        map.put("kerberosInitCmd", java.lang.String.class);
-        map.put("kerberosPrincipalToLocalRules", java.lang.String.class);
-        map.put("kerberosRenewJitter", java.lang.Double.class);
-        map.put("kerberosRenewWindowFactor", java.lang.Double.class);
-        map.put("saslJaasConfig", java.lang.String.class);
-        map.put("saslKerberosServiceName", java.lang.String.class);
-        map.put("saslMechanism", java.lang.String.class);
-        map.put("securityProtocol", java.lang.String.class);
-        map.put("sslCipherSuites", java.lang.String.class);
-        map.put("sslContextParameters", org.apache.camel.support.jsse.SSLContextParameters.class);
-        map.put("sslEnabledProtocols", java.lang.String.class);
-        map.put("sslEndpointAlgorithm", java.lang.String.class);
-        map.put("sslKeymanagerAlgorithm", java.lang.String.class);
-        map.put("sslKeyPassword", java.lang.String.class);
-        map.put("sslKeystoreLocation", java.lang.String.class);
-        map.put("sslKeystorePassword", java.lang.String.class);
-        map.put("sslKeystoreType", java.lang.String.class);
-        map.put("sslProtocol", java.lang.String.class);
-        map.put("sslProvider", java.lang.String.class);
-        map.put("sslTrustmanagerAlgorithm", java.lang.String.class);
-        map.put("sslTruststoreLocation", java.lang.String.class);
-        map.put("sslTruststorePassword", java.lang.String.class);
-        map.put("sslTruststoreType", java.lang.String.class);
-        map.put("useGlobalSslContextParameters", boolean.class);
-        ALL_OPTIONS = map;
-    }
-
-    private org.apache.camel.component.kafka.KafkaConfiguration getOrCreateConfiguration(KafkaComponent target) {
-        if (target.getConfiguration() == null) {
-            target.setConfiguration(new org.apache.camel.component.kafka.KafkaConfiguration());
-        }
-        return target.getConfiguration();
-    }
-
-    @Override
-    public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
-        KafkaComponent target = (KafkaComponent) obj;
-        switch (ignoreCase ? name.toLowerCase() : name) {
-        case "additionalproperties":
-        case "additionalProperties": getOrCreateConfiguration(target).setAdditionalProperties(property(camelContext, java.util.Map.class, value)); return true;
-        case "allowmanualcommit":
-        case "allowManualCommit": getOrCreateConfiguration(target).setAllowManualCommit(property(camelContext, boolean.class, value)); return true;
-        case "autocommitenable":
-        case "autoCommitEnable": getOrCreateConfiguration(target).setAutoCommitEnable(property(camelContext, java.lang.Boolean.class, value)); return true;
-        case "autocommitintervalms":
-        case "autoCommitIntervalMs": getOrCreateConfiguration(target).setAutoCommitIntervalMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "autocommitonstop":
-        case "autoCommitOnStop": getOrCreateConfiguration(target).setAutoCommitOnStop(property(camelContext, java.lang.String.class, value)); return true;
-        case "autooffsetreset":
-        case "autoOffsetReset": getOrCreateConfiguration(target).setAutoOffsetReset(property(camelContext, java.lang.String.class, value)); return true;
-        case "basicpropertybinding":
-        case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true;
-        case "breakonfirsterror":
-        case "breakOnFirstError": getOrCreateConfiguration(target).setBreakOnFirstError(property(camelContext, boolean.class, value)); return true;
-        case "bridgeerrorhandler":
-        case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
-        case "brokers": getOrCreateConfiguration(target).setBrokers(property(camelContext, java.lang.String.class, value)); return true;
-        case "buffermemorysize":
-        case "bufferMemorySize": getOrCreateConfiguration(target).setBufferMemorySize(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "checkcrcs":
-        case "checkCrcs": getOrCreateConfiguration(target).setCheckCrcs(property(camelContext, java.lang.Boolean.class, value)); return true;
-        case "clientid":
-        case "clientId": getOrCreateConfiguration(target).setClientId(property(camelContext, java.lang.String.class, value)); return true;
-        case "compressioncodec":
-        case "compressionCodec": getOrCreateConfiguration(target).setCompressionCodec(property(camelContext, java.lang.String.class, value)); return true;
-        case "configuration": target.setConfiguration(property(camelContext, org.apache.camel.component.kafka.KafkaConfiguration.class, value)); return true;
-        case "connectionmaxidlems":
-        case "connectionMaxIdleMs": getOrCreateConfiguration(target).setConnectionMaxIdleMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "consumerrequesttimeoutms":
-        case "consumerRequestTimeoutMs": getOrCreateConfiguration(target).setConsumerRequestTimeoutMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "consumerstreams":
-        case "consumerStreams": getOrCreateConfiguration(target).setConsumerStreams(property(camelContext, int.class, value)); return true;
-        case "consumerscount":
-        case "consumersCount": getOrCreateConfiguration(target).setConsumersCount(property(camelContext, int.class, value)); return true;
-        case "enableidempotence":
-        case "enableIdempotence": getOrCreateConfiguration(target).setEnableIdempotence(property(camelContext, boolean.class, value)); return true;
-        case "fetchmaxbytes":
-        case "fetchMaxBytes": getOrCreateConfiguration(target).setFetchMaxBytes(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "fetchminbytes":
-        case "fetchMinBytes": getOrCreateConfiguration(target).setFetchMinBytes(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "fetchwaitmaxms":
-        case "fetchWaitMaxMs": getOrCreateConfiguration(target).setFetchWaitMaxMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "groupid":
-        case "groupId": getOrCreateConfiguration(target).setGroupId(property(camelContext, java.lang.String.class, value)); return true;
-        case "headerfilterstrategy":
-        case "headerFilterStrategy": getOrCreateConfiguration(target).setHeaderFilterStrategy(property(camelContext, org.apache.camel.spi.HeaderFilterStrategy.class, value)); return true;
-        case "heartbeatintervalms":
-        case "heartbeatIntervalMs": getOrCreateConfiguration(target).setHeartbeatIntervalMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "interceptorclasses":
-        case "interceptorClasses": getOrCreateConfiguration(target).setInterceptorClasses(property(camelContext, java.lang.String.class, value)); return true;
-        case "kafkaheaderdeserializer":
-        case "kafkaHeaderDeserializer": getOrCreateConfiguration(target).setKafkaHeaderDeserializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class, value)); return true;
-        case "kafkaheaderserializer":
-        case "kafkaHeaderSerializer": getOrCreateConfiguration(target).setKafkaHeaderSerializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class, value)); return true;
-        case "kafkamanualcommitfactory":
-        case "kafkaManualCommitFactory": target.setKafkaManualCommitFactory(property(camelContext, org.apache.camel.component.kafka.KafkaManualCommitFactory.class, value)); return true;
-        case "kerberosbeforereloginmintime":
-        case "kerberosBeforeReloginMinTime": getOrCreateConfiguration(target).setKerberosBeforeReloginMinTime(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "kerberosinitcmd":
-        case "kerberosInitCmd": getOrCreateConfiguration(target).setKerberosInitCmd(property(camelContext, java.lang.String.class, value)); return true;
-        case "kerberosprincipaltolocalrules":
-        case "kerberosPrincipalToLocalRules": getOrCreateConfiguration(target).setKerberosPrincipalToLocalRules(property(camelContext, java.lang.String.class, value)); return true;
-        case "kerberosrenewjitter":
-        case "kerberosRenewJitter": getOrCreateConfiguration(target).setKerberosRenewJitter(property(camelContext, java.lang.Double.class, value)); return true;
-        case "kerberosrenewwindowfactor":
-        case "kerberosRenewWindowFactor": getOrCreateConfiguration(target).setKerberosRenewWindowFactor(property(camelContext, java.lang.Double.class, value)); return true;
-        case "key": getOrCreateConfiguration(target).setKey(property(camelContext, java.lang.String.class, value)); return true;
-        case "keydeserializer":
-        case "keyDeserializer": getOrCreateConfiguration(target).setKeyDeserializer(property(camelContext, java.lang.String.class, value)); return true;
-        case "keyserializer":
-        case "keySerializer": getOrCreateConfiguration(target).setKeySerializer(property(camelContext, java.lang.String.class, value)); return true;
-        case "lazystartproducer":
-        case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
-        case "lingerms":
-        case "lingerMs": getOrCreateConfiguration(target).setLingerMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "maxblockms":
-        case "maxBlockMs": getOrCreateConfiguration(target).setMaxBlockMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "maxinflightrequest":
-        case "maxInFlightRequest": getOrCreateConfiguration(target).setMaxInFlightRequest(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "maxpartitionfetchbytes":
-        case "maxPartitionFetchBytes": getOrCreateConfiguration(target).setMaxPartitionFetchBytes(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "maxpollintervalms":
-        case "maxPollIntervalMs": getOrCreateConfiguration(target).setMaxPollIntervalMs(property(camelContext, java.lang.Long.class, value)); return true;
-        case "maxpollrecords":
-        case "maxPollRecords": getOrCreateConfiguration(target).setMaxPollRecords(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "maxrequestsize":
-        case "maxRequestSize": getOrCreateConfiguration(target).setMaxRequestSize(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "metadatamaxagems":
-        case "metadataMaxAgeMs": getOrCreateConfiguration(target).setMetadataMaxAgeMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "metricreporters":
-        case "metricReporters": getOrCreateConfiguration(target).setMetricReporters(property(camelContext, java.lang.String.class, value)); return true;
-        case "metricssamplewindowms":
-        case "metricsSampleWindowMs": getOrCreateConfiguration(target).setMetricsSampleWindowMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "noofmetricssample":
-        case "noOfMetricsSample": getOrCreateConfiguration(target).setNoOfMetricsSample(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "offsetrepository":
-        case "offsetRepository": getOrCreateConfiguration(target).setOffsetRepository(property(camelContext, org.apache.camel.spi.StateRepository.class, value)); return true;
-        case "partitionassignor":
-        case "partitionAssignor": getOrCreateConfiguration(target).setPartitionAssignor(property(camelContext, java.lang.String.class, value)); return true;
-        case "partitionkey":
-        case "partitionKey": getOrCreateConfiguration(target).setPartitionKey(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "partitioner": getOrCreateConfiguration(target).setPartitioner(property(camelContext, java.lang.String.class, value)); return true;
-        case "polltimeoutms":
-        case "pollTimeoutMs": getOrCreateConfiguration(target).setPollTimeoutMs(property(camelContext, java.lang.Long.class, value)); return true;
-        case "producerbatchsize":
-        case "producerBatchSize": getOrCreateConfiguration(target).setProducerBatchSize(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "queuebufferingmaxmessages":
-        case "queueBufferingMaxMessages": getOrCreateConfiguration(target).setQueueBufferingMaxMessages(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "receivebufferbytes":
-        case "receiveBufferBytes": getOrCreateConfiguration(target).setReceiveBufferBytes(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "reconnectbackoffmaxms":
-        case "reconnectBackoffMaxMs": getOrCreateConfiguration(target).setReconnectBackoffMaxMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "reconnectbackoffms":
-        case "reconnectBackoffMs": getOrCreateConfiguration(target).setReconnectBackoffMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "recordmetadata":
-        case "recordMetadata": getOrCreateConfiguration(target).setRecordMetadata(property(camelContext, boolean.class, value)); return true;
-        case "requestrequiredacks":
-        case "requestRequiredAcks": getOrCreateConfiguration(target).setRequestRequiredAcks(property(camelContext, java.lang.String.class, value)); return true;
-        case "requesttimeoutms":
-        case "requestTimeoutMs": getOrCreateConfiguration(target).setRequestTimeoutMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "retries": getOrCreateConfiguration(target).setRetries(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "retrybackoffms":
-        case "retryBackoffMs": getOrCreateConfiguration(target).setRetryBackoffMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "sasljaasconfig":
-        case "saslJaasConfig": getOrCreateConfiguration(target).setSaslJaasConfig(property(camelContext, java.lang.String.class, value)); return true;
-        case "saslkerberosservicename":
-        case "saslKerberosServiceName": getOrCreateConfiguration(target).setSaslKerberosServiceName(property(camelContext, java.lang.String.class, value)); return true;
-        case "saslmechanism":
-        case "saslMechanism": getOrCreateConfiguration(target).setSaslMechanism(property(camelContext, java.lang.String.class, value)); return true;
-        case "schemaregistryurl":
-        case "schemaRegistryURL": getOrCreateConfiguration(target).setSchemaRegistryURL(property(camelContext, java.lang.String.class, value)); return true;
-        case "securityprotocol":
-        case "securityProtocol": getOrCreateConfiguration(target).setSecurityProtocol(property(camelContext, java.lang.String.class, value)); return true;
-        case "seekto":
-        case "seekTo": getOrCreateConfiguration(target).setSeekTo(property(camelContext, java.lang.String.class, value)); return true;
-        case "sendbufferbytes":
-        case "sendBufferBytes": getOrCreateConfiguration(target).setSendBufferBytes(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "serializerclass":
-        case "serializerClass": getOrCreateConfiguration(target).setSerializerClass(property(camelContext, java.lang.String.class, value)); return true;
-        case "sessiontimeoutms":
-        case "sessionTimeoutMs": getOrCreateConfiguration(target).setSessionTimeoutMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "shutdowntimeout":
-        case "shutdownTimeout": getOrCreateConfiguration(target).setShutdownTimeout(property(camelContext, int.class, value)); return true;
-        case "specificavroreader":
-        case "specificAvroReader": getOrCreateConfiguration(target).setSpecificAvroReader(property(camelContext, boolean.class, value)); return true;
-        case "sslciphersuites":
-        case "sslCipherSuites": getOrCreateConfiguration(target).setSslCipherSuites(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslcontextparameters":
-        case "sslContextParameters": getOrCreateConfiguration(target).setSslContextParameters(property(camelContext, org.apache.camel.support.jsse.SSLContextParameters.class, value)); return true;
-        case "sslenabledprotocols":
-        case "sslEnabledProtocols": getOrCreateConfiguration(target).setSslEnabledProtocols(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslendpointalgorithm":
-        case "sslEndpointAlgorithm": getOrCreateConfiguration(target).setSslEndpointAlgorithm(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslkeypassword":
-        case "sslKeyPassword": getOrCreateConfiguration(target).setSslKeyPassword(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslkeymanageralgorithm":
-        case "sslKeymanagerAlgorithm": getOrCreateConfiguration(target).setSslKeymanagerAlgorithm(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslkeystorelocation":
-        case "sslKeystoreLocation": getOrCreateConfiguration(target).setSslKeystoreLocation(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslkeystorepassword":
-        case "sslKeystorePassword": getOrCreateConfiguration(target).setSslKeystorePassword(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslkeystoretype":
-        case "sslKeystoreType": getOrCreateConfiguration(target).setSslKeystoreType(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslprotocol":
-        case "sslProtocol": getOrCreateConfiguration(target).setSslProtocol(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslprovider":
-        case "sslProvider": getOrCreateConfiguration(target).setSslProvider(property(camelContext, java.lang.String.class, value)); return true;
-        case "ssltrustmanageralgorithm":
-        case "sslTrustmanagerAlgorithm": getOrCreateConfiguration(target).setSslTrustmanagerAlgorithm(property(camelContext, java.lang.String.class, value)); return true;
-        case "ssltruststorelocation":
-        case "sslTruststoreLocation": getOrCreateConfiguration(target).setSslTruststoreLocation(property(camelContext, java.lang.String.class, value)); return true;
-        case "ssltruststorepassword":
-        case "sslTruststorePassword": getOrCreateConfiguration(target).setSslTruststorePassword(property(camelContext, java.lang.String.class, value)); return true;
-        case "ssltruststoretype":
-        case "sslTruststoreType": getOrCreateConfiguration(target).setSslTruststoreType(property(camelContext, java.lang.String.class, value)); return true;
-        case "topicispattern":
-        case "topicIsPattern": getOrCreateConfiguration(target).setTopicIsPattern(property(camelContext, boolean.class, value)); return true;
-        case "useglobalsslcontextparameters":
-        case "useGlobalSslContextParameters": target.setUseGlobalSslContextParameters(property(camelContext, boolean.class, value)); return true;
-        case "valuedeserializer":
-        case "valueDeserializer": getOrCreateConfiguration(target).setValueDeserializer(property(camelContext, java.lang.String.class, value)); return true;
-        case "workerpool":
-        case "workerPool": getOrCreateConfiguration(target).setWorkerPool(property(camelContext, java.util.concurrent.ExecutorService.class, value)); return true;
-        case "workerpoolcoresize":
-        case "workerPoolCoreSize": getOrCreateConfiguration(target).setWorkerPoolCoreSize(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "workerpoolmaxsize":
-        case "workerPoolMaxSize": getOrCreateConfiguration(target).setWorkerPoolMaxSize(property(camelContext, java.lang.Integer.class, value)); return true;
-        default: return false;
-        }
-    }
-
-    @Override
-    public Map<String, Object> getAllOptions(Object target) {
-        return ALL_OPTIONS;
-    }
-
-    @Override
-    public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
-        KafkaComponent target = (KafkaComponent) obj;
-        switch (ignoreCase ? name.toLowerCase() : name) {
-        case "additionalproperties":
-        case "additionalProperties": return getOrCreateConfiguration(target).getAdditionalProperties();
-        case "allowmanualcommit":
-        case "allowManualCommit": return getOrCreateConfiguration(target).isAllowManualCommit();
-        case "autocommitenable":
-        case "autoCommitEnable": return getOrCreateConfiguration(target).getAutoCommitEnable();
-        case "autocommitintervalms":
-        case "autoCommitIntervalMs": return getOrCreateConfiguration(target).getAutoCommitIntervalMs();
-        case "autocommitonstop":
-        case "autoCommitOnStop": return getOrCreateConfiguration(target).getAutoCommitOnStop();
-        case "autooffsetreset":
-        case "autoOffsetReset": return getOrCreateConfiguration(target).getAutoOffsetReset();
-        case "basicpropertybinding":
-        case "basicPropertyBinding": return target.isBasicPropertyBinding();
-        case "breakonfirsterror":
-        case "breakOnFirstError": return getOrCreateConfiguration(target).isBreakOnFirstError();
-        case "bridgeerrorhandler":
-        case "bridgeErrorHandler": return target.isBridgeErrorHandler();
-        case "brokers": return getOrCreateConfiguration(target).getBrokers();
-        case "buffermemorysize":
-        case "bufferMemorySize": return getOrCreateConfiguration(target).getBufferMemorySize();
-        case "checkcrcs":
-        case "checkCrcs": return getOrCreateConfiguration(target).getCheckCrcs();
-        case "clientid":
-        case "clientId": return getOrCreateConfiguration(target).getClientId();
-        case "compressioncodec":
-        case "compressionCodec": return getOrCreateConfiguration(target).getCompressionCodec();
-        case "configuration": return target.getConfiguration();
-        case "connectionmaxidlems":
-        case "connectionMaxIdleMs": return getOrCreateConfiguration(target).getConnectionMaxIdleMs();
-        case "consumerrequesttimeoutms":
-        case "consumerRequestTimeoutMs": return getOrCreateConfiguration(target).getConsumerRequestTimeoutMs();
-        case "consumerstreams":
-        case "consumerStreams": return getOrCreateConfiguration(target).getConsumerStreams();
-        case "consumerscount":
-        case "consumersCount": return getOrCreateConfiguration(target).getConsumersCount();
-        case "enableidempotence":
-        case "enableIdempotence": return getOrCreateConfiguration(target).isEnableIdempotence();
-        case "fetchmaxbytes":
-        case "fetchMaxBytes": return getOrCreateConfiguration(target).getFetchMaxBytes();
-        case "fetchminbytes":
-        case "fetchMinBytes": return getOrCreateConfiguration(target).getFetchMinBytes();
-        case "fetchwaitmaxms":
-        case "fetchWaitMaxMs": return getOrCreateConfiguration(target).getFetchWaitMaxMs();
-        case "groupid":
-        case "groupId": return getOrCreateConfiguration(target).getGroupId();
-        case "headerfilterstrategy":
-        case "headerFilterStrategy": return getOrCreateConfiguration(target).getHeaderFilterStrategy();
-        case "heartbeatintervalms":
-        case "heartbeatIntervalMs": return getOrCreateConfiguration(target).getHeartbeatIntervalMs();
-        case "interceptorclasses":
-        case "interceptorClasses": return getOrCreateConfiguration(target).getInterceptorClasses();
-        case "kafkaheaderdeserializer":
-        case "kafkaHeaderDeserializer": return getOrCreateConfiguration(target).getKafkaHeaderDeserializer();
-        case "kafkaheaderserializer":
-        case "kafkaHeaderSerializer": return getOrCreateConfiguration(target).getKafkaHeaderSerializer();
-        case "kafkamanualcommitfactory":
-        case "kafkaManualCommitFactory": return target.getKafkaManualCommitFactory();
-        case "kerberosbeforereloginmintime":
-        case "kerberosBeforeReloginMinTime": return getOrCreateConfiguration(target).getKerberosBeforeReloginMinTime();
-        case "kerberosinitcmd":
-        case "kerberosInitCmd": return getOrCreateConfiguration(target).getKerberosInitCmd();
-        case "kerberosprincipaltolocalrules":
-        case "kerberosPrincipalToLocalRules": return getOrCreateConfiguration(target).getKerberosPrincipalToLocalRules();
-        case "kerberosrenewjitter":
-        case "kerberosRenewJitter": return getOrCreateConfiguration(target).getKerberosRenewJitter();
-        case "kerberosrenewwindowfactor":
-        case "kerberosRenewWindowFactor": return getOrCreateConfiguration(target).getKerberosRenewWindowFactor();
-        case "key": return getOrCreateConfiguration(target).getKey();
-        case "keydeserializer":
-        case "keyDeserializer": return getOrCreateConfiguration(target).getKeyDeserializer();
-        case "keyserializer":
-        case "keySerializer": return getOrCreateConfiguration(target).getKeySerializer();
-        case "lazystartproducer":
-        case "lazyStartProducer": return target.isLazyStartProducer();
-        case "lingerms":
-        case "lingerMs": return getOrCreateConfiguration(target).getLingerMs();
-        case "maxblockms":
-        case "maxBlockMs": return getOrCreateConfiguration(target).getMaxBlockMs();
-        case "maxinflightrequest":
-        case "maxInFlightRequest": return getOrCreateConfiguration(target).getMaxInFlightRequest();
-        case "maxpartitionfetchbytes":
-        case "maxPartitionFetchBytes": return getOrCreateConfiguration(target).getMaxPartitionFetchBytes();
-        case "maxpollintervalms":
-        case "maxPollIntervalMs": return getOrCreateConfiguration(target).getMaxPollIntervalMs();
-        case "maxpollrecords":
-        case "maxPollRecords": return getOrCreateConfiguration(target).getMaxPollRecords();
-        case "maxrequestsize":
-        case "maxRequestSize": return getOrCreateConfiguration(target).getMaxRequestSize();
-        case "metadatamaxagems":
-        case "metadataMaxAgeMs": return getOrCreateConfiguration(target).getMetadataMaxAgeMs();
-        case "metricreporters":
-        case "metricReporters": return getOrCreateConfiguration(target).getMetricReporters();
-        case "metricssamplewindowms":
-        case "metricsSampleWindowMs": return getOrCreateConfiguration(target).getMetricsSampleWindowMs();
-        case "noofmetricssample":
-        case "noOfMetricsSample": return getOrCreateConfiguration(target).getNoOfMetricsSample();
-        case "offsetrepository":
-        case "offsetRepository": return getOrCreateConfiguration(target).getOffsetRepository();
-        case "partitionassignor":
-        case "partitionAssignor": return getOrCreateConfiguration(target).getPartitionAssignor();
-        case "partitionkey":
-        case "partitionKey": return getOrCreateConfiguration(target).getPartitionKey();
-        case "partitioner": return getOrCreateConfiguration(target).getPartitioner();
-        case "polltimeoutms":
-        case "pollTimeoutMs": return getOrCreateConfiguration(target).getPollTimeoutMs();
-        case "producerbatchsize":
-        case "producerBatchSize": return getOrCreateConfiguration(target).getProducerBatchSize();
-        case "queuebufferingmaxmessages":
-        case "queueBufferingMaxMessages": return getOrCreateConfiguration(target).getQueueBufferingMaxMessages();
-        case "receivebufferbytes":
-        case "receiveBufferBytes": return getOrCreateConfiguration(target).getReceiveBufferBytes();
-        case "reconnectbackoffmaxms":
-        case "reconnectBackoffMaxMs": return getOrCreateConfiguration(target).getReconnectBackoffMaxMs();
-        case "reconnectbackoffms":
-        case "reconnectBackoffMs": return getOrCreateConfiguration(target).getReconnectBackoffMs();
-        case "recordmetadata":
-        case "recordMetadata": return getOrCreateConfiguration(target).isRecordMetadata();
-        case "requestrequiredacks":
-        case "requestRequiredAcks": return getOrCreateConfiguration(target).getRequestRequiredAcks();
-        case "requesttimeoutms":
-        case "requestTimeoutMs": return getOrCreateConfiguration(target).getRequestTimeoutMs();
-        case "retries": return getOrCreateConfiguration(target).getRetries();
-        case "retrybackoffms":
-        case "retryBackoffMs": return getOrCreateConfiguration(target).getRetryBackoffMs();
-        case "sasljaasconfig":
-        case "saslJaasConfig": return getOrCreateConfiguration(target).getSaslJaasConfig();
-        case "saslkerberosservicename":
-        case "saslKerberosServiceName": return getOrCreateConfiguration(target).getSaslKerberosServiceName();
-        case "saslmechanism":
-        case "saslMechanism": return getOrCreateConfiguration(target).getSaslMechanism();
-        case "schemaregistryurl":
-        case "schemaRegistryURL": return getOrCreateConfiguration(target).getSchemaRegistryURL();
-        case "securityprotocol":
-        case "securityProtocol": return getOrCreateConfiguration(target).getSecurityProtocol();
-        case "seekto":
-        case "seekTo": return getOrCreateConfiguration(target).getSeekTo();
-        case "sendbufferbytes":
-        case "sendBufferBytes": return getOrCreateConfiguration(target).getSendBufferBytes();
-        case "serializerclass":
-        case "serializerClass": return getOrCreateConfiguration(target).getSerializerClass();
-        case "sessiontimeoutms":
-        case "sessionTimeoutMs": return getOrCreateConfiguration(target).getSessionTimeoutMs();
-        case "shutdowntimeout":
-        case "shutdownTimeout": return getOrCreateConfiguration(target).getShutdownTimeout();
-        case "specificavroreader":
-        case "specificAvroReader": return getOrCreateConfiguration(target).isSpecificAvroReader();
-        case "sslciphersuites":
-        case "sslCipherSuites": return getOrCreateConfiguration(target).getSslCipherSuites();
-        case "sslcontextparameters":
-        case "sslContextParameters": return getOrCreateConfiguration(target).getSslContextParameters();
-        case "sslenabledprotocols":
-        case "sslEnabledProtocols": return getOrCreateConfiguration(target).getSslEnabledProtocols();
-        case "sslendpointalgorithm":
-        case "sslEndpointAlgorithm": return getOrCreateConfiguration(target).getSslEndpointAlgorithm();
-        case "sslkeypassword":
-        case "sslKeyPassword": return getOrCreateConfiguration(target).getSslKeyPassword();
-        case "sslkeymanageralgorithm":
-        case "sslKeymanagerAlgorithm": return getOrCreateConfiguration(target).getSslKeymanagerAlgorithm();
-        case "sslkeystorelocation":
-        case "sslKeystoreLocation": return getOrCreateConfiguration(target).getSslKeystoreLocation();
-        case "sslkeystorepassword":
-        case "sslKeystorePassword": return getOrCreateConfiguration(target).getSslKeystorePassword();
-        case "sslkeystoretype":
-        case "sslKeystoreType": return getOrCreateConfiguration(target).getSslKeystoreType();
-        case "sslprotocol":
-        case "sslProtocol": return getOrCreateConfiguration(target).getSslProtocol();
-        case "sslprovider":
-        case "sslProvider": return getOrCreateConfiguration(target).getSslProvider();
-        case "ssltrustmanageralgorithm":
-        case "sslTrustmanagerAlgorithm": return getOrCreateConfiguration(target).getSslTrustmanagerAlgorithm();
-        case "ssltruststorelocation":
-        case "sslTruststoreLocation": return getOrCreateConfiguration(target).getSslTruststoreLocation();
-        case "ssltruststorepassword":
-        case "sslTruststorePassword": return getOrCreateConfiguration(target).getSslTruststorePassword();
-        case "ssltruststoretype":
-        case "sslTruststoreType": return getOrCreateConfiguration(target).getSslTruststoreType();
-        case "topicispattern":
-        case "topicIsPattern": return getOrCreateConfiguration(target).isTopicIsPattern();
-        case "useglobalsslcontextparameters":
-        case "useGlobalSslContextParameters": return target.isUseGlobalSslContextParameters();
-        case "valuedeserializer":
-        case "valueDeserializer": return getOrCreateConfiguration(target).getValueDeserializer();
-        case "workerpool":
-        case "workerPool": return getOrCreateConfiguration(target).getWorkerPool();
-        case "workerpoolcoresize":
-        case "workerPoolCoreSize": return getOrCreateConfiguration(target).getWorkerPoolCoreSize();
-        case "workerpoolmaxsize":
-        case "workerPoolMaxSize": return getOrCreateConfiguration(target).getWorkerPoolMaxSize();
-        default: return null;
-        }
-    }
-}
-
diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java
deleted file mode 100644
index bbd428c..0000000
--- a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java
+++ /dev/null
@@ -1,521 +0,0 @@
-/* Generated by camel build tools - do NOT edit this file! */
-package org.apache.camel.component.kafka;
-
-import java.util.Map;
-
-import org.apache.camel.CamelContext;
-import org.apache.camel.spi.GeneratedPropertyConfigurer;
-import org.apache.camel.spi.PropertyConfigurerGetter;
-import org.apache.camel.util.CaseInsensitiveMap;
-import org.apache.camel.support.component.PropertyConfigurerSupport;
-
-/**
- * Generated by camel build tools - do NOT edit this file!
- */
-@SuppressWarnings("unchecked")
-public class KafkaEndpointConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
-
-    private static final Map<String, Object> ALL_OPTIONS;
-    static {
-        Map<String, Object> map = new CaseInsensitiveMap();
-        map.put("topic", java.lang.String.class);
-        map.put("additionalProperties", java.util.Map.class);
-        map.put("brokers", java.lang.String.class);
-        map.put("clientId", java.lang.String.class);
-        map.put("headerFilterStrategy", org.apache.camel.spi.HeaderFilterStrategy.class);
-        map.put("reconnectBackoffMaxMs", java.lang.Integer.class);
-        map.put("shutdownTimeout", int.class);
-        map.put("allowManualCommit", boolean.class);
-        map.put("autoCommitEnable", java.lang.Boolean.class);
-        map.put("autoCommitIntervalMs", java.lang.Integer.class);
-        map.put("autoCommitOnStop", java.lang.String.class);
-        map.put("autoOffsetReset", java.lang.String.class);
-        map.put("breakOnFirstError", boolean.class);
-        map.put("bridgeErrorHandler", boolean.class);
-        map.put("checkCrcs", java.lang.Boolean.class);
-        map.put("consumerRequestTimeoutMs", java.lang.Integer.class);
-        map.put("consumersCount", int.class);
-        map.put("consumerStreams", int.class);
-        map.put("fetchMaxBytes", java.lang.Integer.class);
-        map.put("fetchMinBytes", java.lang.Integer.class);
-        map.put("fetchWaitMaxMs", java.lang.Integer.class);
-        map.put("groupId", java.lang.String.class);
-        map.put("heartbeatIntervalMs", java.lang.Integer.class);
-        map.put("kafkaHeaderDeserializer", org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class);
-        map.put("keyDeserializer", java.lang.String.class);
-        map.put("maxPartitionFetchBytes", java.lang.Integer.class);
-        map.put("maxPollIntervalMs", java.lang.Long.class);
-        map.put("maxPollRecords", java.lang.Integer.class);
-        map.put("offsetRepository", org.apache.camel.spi.StateRepository.class);
-        map.put("partitionAssignor", java.lang.String.class);
-        map.put("pollTimeoutMs", java.lang.Long.class);
-        map.put("seekTo", java.lang.String.class);
-        map.put("sessionTimeoutMs", java.lang.Integer.class);
-        map.put("specificAvroReader", boolean.class);
-        map.put("topicIsPattern", boolean.class);
-        map.put("valueDeserializer", java.lang.String.class);
-        map.put("exceptionHandler", org.apache.camel.spi.ExceptionHandler.class);
-        map.put("exchangePattern", org.apache.camel.ExchangePattern.class);
-        map.put("bufferMemorySize", java.lang.Integer.class);
-        map.put("compressionCodec", java.lang.String.class);
-        map.put("connectionMaxIdleMs", java.lang.Integer.class);
-        map.put("enableIdempotence", boolean.class);
-        map.put("kafkaHeaderSerializer", org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class);
-        map.put("key", java.lang.String.class);
-        map.put("keySerializer", java.lang.String.class);
-        map.put("lazyStartProducer", boolean.class);
-        map.put("lingerMs", java.lang.Integer.class);
-        map.put("maxBlockMs", java.lang.Integer.class);
-        map.put("maxInFlightRequest", java.lang.Integer.class);
-        map.put("maxRequestSize", java.lang.Integer.class);
-        map.put("metadataMaxAgeMs", java.lang.Integer.class);
-        map.put("metricReporters", java.lang.String.class);
-        map.put("metricsSampleWindowMs", java.lang.Integer.class);
-        map.put("noOfMetricsSample", java.lang.Integer.class);
-        map.put("partitioner", java.lang.String.class);
-        map.put("partitionKey", java.lang.Integer.class);
-        map.put("producerBatchSize", java.lang.Integer.class);
-        map.put("queueBufferingMaxMessages", java.lang.Integer.class);
-        map.put("receiveBufferBytes", java.lang.Integer.class);
-        map.put("reconnectBackoffMs", java.lang.Integer.class);
-        map.put("recordMetadata", boolean.class);
-        map.put("requestRequiredAcks", java.lang.String.class);
-        map.put("requestTimeoutMs", java.lang.Integer.class);
-        map.put("retries", java.lang.Integer.class);
-        map.put("retryBackoffMs", java.lang.Integer.class);
-        map.put("sendBufferBytes", java.lang.Integer.class);
-        map.put("serializerClass", java.lang.String.class);
-        map.put("workerPool", java.util.concurrent.ExecutorService.class);
-        map.put("workerPoolCoreSize", java.lang.Integer.class);
-        map.put("workerPoolMaxSize", java.lang.Integer.class);
-        map.put("basicPropertyBinding", boolean.class);
-        map.put("synchronous", boolean.class);
-        map.put("schemaRegistryURL", java.lang.String.class);
-        map.put("interceptorClasses", java.lang.String.class);
-        map.put("kerberosBeforeReloginMinTime", java.lang.Integer.class);
-        map.put("kerberosInitCmd", java.lang.String.class);
-        map.put("kerberosPrincipalToLocalRules", java.lang.String.class);
-        map.put("kerberosRenewJitter", java.lang.Double.class);
-        map.put("kerberosRenewWindowFactor", java.lang.Double.class);
-        map.put("saslJaasConfig", java.lang.String.class);
-        map.put("saslKerberosServiceName", java.lang.String.class);
-        map.put("saslMechanism", java.lang.String.class);
-        map.put("securityProtocol", java.lang.String.class);
-        map.put("sslCipherSuites", java.lang.String.class);
-        map.put("sslContextParameters", org.apache.camel.support.jsse.SSLContextParameters.class);
-        map.put("sslEnabledProtocols", java.lang.String.class);
-        map.put("sslEndpointAlgorithm", java.lang.String.class);
-        map.put("sslKeymanagerAlgorithm", java.lang.String.class);
-        map.put("sslKeyPassword", java.lang.String.class);
-        map.put("sslKeystoreLocation", java.lang.String.class);
-        map.put("sslKeystorePassword", java.lang.String.class);
-        map.put("sslKeystoreType", java.lang.String.class);
-        map.put("sslProtocol", java.lang.String.class);
-        map.put("sslProvider", java.lang.String.class);
-        map.put("sslTrustmanagerAlgorithm", java.lang.String.class);
-        map.put("sslTruststoreLocation", java.lang.String.class);
-        map.put("sslTruststorePassword", java.lang.String.class);
-        map.put("sslTruststoreType", java.lang.String.class);
-        ALL_OPTIONS = map;
-    }
-
-    @Override
-    public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
-        KafkaEndpoint target = (KafkaEndpoint) obj;
-        switch (ignoreCase ? name.toLowerCase() : name) {
-        case "additionalproperties":
-        case "additionalProperties": target.getConfiguration().setAdditionalProperties(property(camelContext, java.util.Map.class, value)); return true;
-        case "allowmanualcommit":
-        case "allowManualCommit": target.getConfiguration().setAllowManualCommit(property(camelContext, boolean.class, value)); return true;
-        case "autocommitenable":
-        case "autoCommitEnable": target.getConfiguration().setAutoCommitEnable(property(camelContext, java.lang.Boolean.class, value)); return true;
-        case "autocommitintervalms":
-        case "autoCommitIntervalMs": target.getConfiguration().setAutoCommitIntervalMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "autocommitonstop":
-        case "autoCommitOnStop": target.getConfiguration().setAutoCommitOnStop(property(camelContext, java.lang.String.class, value)); return true;
-        case "autooffsetreset":
-        case "autoOffsetReset": target.getConfiguration().setAutoOffsetReset(property(camelContext, java.lang.String.class, value)); return true;
-        case "basicpropertybinding":
-        case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true;
-        case "breakonfirsterror":
-        case "breakOnFirstError": target.getConfiguration().setBreakOnFirstError(property(camelContext, boolean.class, value)); return true;
-        case "bridgeerrorhandler":
-        case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
-        case "brokers": target.getConfiguration().setBrokers(property(camelContext, java.lang.String.class, value)); return true;
-        case "buffermemorysize":
-        case "bufferMemorySize": target.getConfiguration().setBufferMemorySize(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "checkcrcs":
-        case "checkCrcs": target.getConfiguration().setCheckCrcs(property(camelContext, java.lang.Boolean.class, value)); return true;
-        case "clientid":
-        case "clientId": target.getConfiguration().setClientId(property(camelContext, java.lang.String.class, value)); return true;
-        case "compressioncodec":
-        case "compressionCodec": target.getConfiguration().setCompressionCodec(property(camelContext, java.lang.String.class, value)); return true;
-        case "connectionmaxidlems":
-        case "connectionMaxIdleMs": target.getConfiguration().setConnectionMaxIdleMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "consumerrequesttimeoutms":
-        case "consumerRequestTimeoutMs": target.getConfiguration().setConsumerRequestTimeoutMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "consumerstreams":
-        case "consumerStreams": target.getConfiguration().setConsumerStreams(property(camelContext, int.class, value)); return true;
-        case "consumerscount":
-        case "consumersCount": target.getConfiguration().setConsumersCount(property(camelContext, int.class, value)); return true;
-        case "enableidempotence":
-        case "enableIdempotence": target.getConfiguration().setEnableIdempotence(property(camelContext, boolean.class, value)); return true;
-        case "exceptionhandler":
-        case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
-        case "exchangepattern":
-        case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
-        case "fetchmaxbytes":
-        case "fetchMaxBytes": target.getConfiguration().setFetchMaxBytes(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "fetchminbytes":
-        case "fetchMinBytes": target.getConfiguration().setFetchMinBytes(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "fetchwaitmaxms":
-        case "fetchWaitMaxMs": target.getConfiguration().setFetchWaitMaxMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "groupid":
-        case "groupId": target.getConfiguration().setGroupId(property(camelContext, java.lang.String.class, value)); return true;
-        case "headerfilterstrategy":
-        case "headerFilterStrategy": target.getConfiguration().setHeaderFilterStrategy(property(camelContext, org.apache.camel.spi.HeaderFilterStrategy.class, value)); return true;
-        case "heartbeatintervalms":
-        case "heartbeatIntervalMs": target.getConfiguration().setHeartbeatIntervalMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "interceptorclasses":
-        case "interceptorClasses": target.getConfiguration().setInterceptorClasses(property(camelContext, java.lang.String.class, value)); return true;
-        case "kafkaheaderdeserializer":
-        case "kafkaHeaderDeserializer": target.getConfiguration().setKafkaHeaderDeserializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class, value)); return true;
-        case "kafkaheaderserializer":
-        case "kafkaHeaderSerializer": target.getConfiguration().setKafkaHeaderSerializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class, value)); return true;
-        case "kerberosbeforereloginmintime":
-        case "kerberosBeforeReloginMinTime": target.getConfiguration().setKerberosBeforeReloginMinTime(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "kerberosinitcmd":
-        case "kerberosInitCmd": target.getConfiguration().setKerberosInitCmd(property(camelContext, java.lang.String.class, value)); return true;
-        case "kerberosprincipaltolocalrules":
-        case "kerberosPrincipalToLocalRules": target.getConfiguration().setKerberosPrincipalToLocalRules(property(camelContext, java.lang.String.class, value)); return true;
-        case "kerberosrenewjitter":
-        case "kerberosRenewJitter": target.getConfiguration().setKerberosRenewJitter(property(camelContext, java.lang.Double.class, value)); return true;
-        case "kerberosrenewwindowfactor":
-        case "kerberosRenewWindowFactor": target.getConfiguration().setKerberosRenewWindowFactor(property(camelContext, java.lang.Double.class, value)); return true;
-        case "key": target.getConfiguration().setKey(property(camelContext, java.lang.String.class, value)); return true;
-        case "keydeserializer":
-        case "keyDeserializer": target.getConfiguration().setKeyDeserializer(property(camelContext, java.lang.String.class, value)); return true;
-        case "keyserializer":
-        case "keySerializer": target.getConfiguration().setKeySerializer(property(camelContext, java.lang.String.class, value)); return true;
-        case "lazystartproducer":
-        case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
-        case "lingerms":
-        case "lingerMs": target.getConfiguration().setLingerMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "maxblockms":
-        case "maxBlockMs": target.getConfiguration().setMaxBlockMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "maxinflightrequest":
-        case "maxInFlightRequest": target.getConfiguration().setMaxInFlightRequest(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "maxpartitionfetchbytes":
-        case "maxPartitionFetchBytes": target.getConfiguration().setMaxPartitionFetchBytes(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "maxpollintervalms":
-        case "maxPollIntervalMs": target.getConfiguration().setMaxPollIntervalMs(property(camelContext, java.lang.Long.class, value)); return true;
-        case "maxpollrecords":
-        case "maxPollRecords": target.getConfiguration().setMaxPollRecords(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "maxrequestsize":
-        case "maxRequestSize": target.getConfiguration().setMaxRequestSize(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "metadatamaxagems":
-        case "metadataMaxAgeMs": target.getConfiguration().setMetadataMaxAgeMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "metricreporters":
-        case "metricReporters": target.getConfiguration().setMetricReporters(property(camelContext, java.lang.String.class, value)); return true;
-        case "metricssamplewindowms":
-        case "metricsSampleWindowMs": target.getConfiguration().setMetricsSampleWindowMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "noofmetricssample":
-        case "noOfMetricsSample": target.getConfiguration().setNoOfMetricsSample(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "offsetrepository":
-        case "offsetRepository": target.getConfiguration().setOffsetRepository(property(camelContext, org.apache.camel.spi.StateRepository.class, value)); return true;
-        case "partitionassignor":
-        case "partitionAssignor": target.getConfiguration().setPartitionAssignor(property(camelContext, java.lang.String.class, value)); return true;
-        case "partitionkey":
-        case "partitionKey": target.getConfiguration().setPartitionKey(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "partitioner": target.getConfiguration().setPartitioner(property(camelContext, java.lang.String.class, value)); return true;
-        case "polltimeoutms":
-        case "pollTimeoutMs": target.getConfiguration().setPollTimeoutMs(property(camelContext, java.lang.Long.class, value)); return true;
-        case "producerbatchsize":
-        case "producerBatchSize": target.getConfiguration().setProducerBatchSize(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "queuebufferingmaxmessages":
-        case "queueBufferingMaxMessages": target.getConfiguration().setQueueBufferingMaxMessages(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "receivebufferbytes":
-        case "receiveBufferBytes": target.getConfiguration().setReceiveBufferBytes(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "reconnectbackoffmaxms":
-        case "reconnectBackoffMaxMs": target.getConfiguration().setReconnectBackoffMaxMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "reconnectbackoffms":
-        case "reconnectBackoffMs": target.getConfiguration().setReconnectBackoffMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "recordmetadata":
-        case "recordMetadata": target.getConfiguration().setRecordMetadata(property(camelContext, boolean.class, value)); return true;
-        case "requestrequiredacks":
-        case "requestRequiredAcks": target.getConfiguration().setRequestRequiredAcks(property(camelContext, java.lang.String.class, value)); return true;
-        case "requesttimeoutms":
-        case "requestTimeoutMs": target.getConfiguration().setRequestTimeoutMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "retries": target.getConfiguration().setRetries(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "retrybackoffms":
-        case "retryBackoffMs": target.getConfiguration().setRetryBackoffMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "sasljaasconfig":
-        case "saslJaasConfig": target.getConfiguration().setSaslJaasConfig(property(camelContext, java.lang.String.class, value)); return true;
-        case "saslkerberosservicename":
-        case "saslKerberosServiceName": target.getConfiguration().setSaslKerberosServiceName(property(camelContext, java.lang.String.class, value)); return true;
-        case "saslmechanism":
-        case "saslMechanism": target.getConfiguration().setSaslMechanism(property(camelContext, java.lang.String.class, value)); return true;
-        case "schemaregistryurl":
-        case "schemaRegistryURL": target.getConfiguration().setSchemaRegistryURL(property(camelContext, java.lang.String.class, value)); return true;
-        case "securityprotocol":
-        case "securityProtocol": target.getConfiguration().setSecurityProtocol(property(camelContext, java.lang.String.class, value)); return true;
-        case "seekto":
-        case "seekTo": target.getConfiguration().setSeekTo(property(camelContext, java.lang.String.class, value)); return true;
-        case "sendbufferbytes":
-        case "sendBufferBytes": target.getConfiguration().setSendBufferBytes(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "serializerclass":
-        case "serializerClass": target.getConfiguration().setSerializerClass(property(camelContext, java.lang.String.class, value)); return true;
-        case "sessiontimeoutms":
-        case "sessionTimeoutMs": target.getConfiguration().setSessionTimeoutMs(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "shutdowntimeout":
-        case "shutdownTimeout": target.getConfiguration().setShutdownTimeout(property(camelContext, int.class, value)); return true;
-        case "specificavroreader":
-        case "specificAvroReader": target.getConfiguration().setSpecificAvroReader(property(camelContext, boolean.class, value)); return true;
-        case "sslciphersuites":
-        case "sslCipherSuites": target.getConfiguration().setSslCipherSuites(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslcontextparameters":
-        case "sslContextParameters": target.getConfiguration().setSslContextParameters(property(camelContext, org.apache.camel.support.jsse.SSLContextParameters.class, value)); return true;
-        case "sslenabledprotocols":
-        case "sslEnabledProtocols": target.getConfiguration().setSslEnabledProtocols(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslendpointalgorithm":
-        case "sslEndpointAlgorithm": target.getConfiguration().setSslEndpointAlgorithm(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslkeypassword":
-        case "sslKeyPassword": target.getConfiguration().setSslKeyPassword(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslkeymanageralgorithm":
-        case "sslKeymanagerAlgorithm": target.getConfiguration().setSslKeymanagerAlgorithm(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslkeystorelocation":
-        case "sslKeystoreLocation": target.getConfiguration().setSslKeystoreLocation(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslkeystorepassword":
-        case "sslKeystorePassword": target.getConfiguration().setSslKeystorePassword(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslkeystoretype":
-        case "sslKeystoreType": target.getConfiguration().setSslKeystoreType(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslprotocol":
-        case "sslProtocol": target.getConfiguration().setSslProtocol(property(camelContext, java.lang.String.class, value)); return true;
-        case "sslprovider":
-        case "sslProvider": target.getConfiguration().setSslProvider(property(camelContext, java.lang.String.class, value)); return true;
-        case "ssltrustmanageralgorithm":
-        case "sslTrustmanagerAlgorithm": target.getConfiguration().setSslTrustmanagerAlgorithm(property(camelContext, java.lang.String.class, value)); return true;
-        case "ssltruststorelocation":
-        case "sslTruststoreLocation": target.getConfiguration().setSslTruststoreLocation(property(camelContext, java.lang.String.class, value)); return true;
-        case "ssltruststorepassword":
-        case "sslTruststorePassword": target.getConfiguration().setSslTruststorePassword(property(camelContext, java.lang.String.class, value)); return true;
-        case "ssltruststoretype":
-        case "sslTruststoreType": target.getConfiguration().setSslTruststoreType(property(camelContext, java.lang.String.class, value)); return true;
-        case "synchronous": target.setSynchronous(property(camelContext, boolean.class, value)); return true;
-        case "topicispattern":
-        case "topicIsPattern": target.getConfiguration().setTopicIsPattern(property(camelContext, boolean.class, value)); return true;
-        case "valuedeserializer":
-        case "valueDeserializer": target.getConfiguration().setValueDeserializer(property(camelContext, java.lang.String.class, value)); return true;
-        case "workerpool":
-        case "workerPool": target.getConfiguration().setWorkerPool(property(camelContext, java.util.concurrent.ExecutorService.class, value)); return true;
-        case "workerpoolcoresize":
-        case "workerPoolCoreSize": target.getConfiguration().setWorkerPoolCoreSize(property(camelContext, java.lang.Integer.class, value)); return true;
-        case "workerpoolmaxsize":
-        case "workerPoolMaxSize": target.getConfiguration().setWorkerPoolMaxSize(property(camelContext, java.lang.Integer.class, value)); return true;
-        default: return false;
-        }
-    }
-
-    @Override
-    public Map<String, Object> getAllOptions(Object target) {
-        return ALL_OPTIONS;
-    }
-
-    @Override
-    public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
-        KafkaEndpoint target = (KafkaEndpoint) obj;
-        switch (ignoreCase ? name.toLowerCase() : name) {
-        case "additionalproperties":
-        case "additionalProperties": return target.getConfiguration().getAdditionalProperties();
-        case "allowmanualcommit":
-        case "allowManualCommit": return target.getConfiguration().isAllowManualCommit();
-        case "autocommitenable":
-        case "autoCommitEnable": return target.getConfiguration().getAutoCommitEnable();
-        case "autocommitintervalms":
-        case "autoCommitIntervalMs": return target.getConfiguration().getAutoCommitIntervalMs();
-        case "autocommitonstop":
-        case "autoCommitOnStop": return target.getConfiguration().getAutoCommitOnStop();
-        case "autooffsetreset":
-        case "autoOffsetReset": return target.getConfiguration().getAutoOffsetReset();
-        case "basicpropertybinding":
-        case "basicPropertyBinding": return target.isBasicPropertyBinding();
-        case "breakonfirsterror":
-        case "breakOnFirstError": return target.getConfiguration().isBreakOnFirstError();
-        case "bridgeerrorhandler":
-        case "bridgeErrorHandler": return target.isBridgeErrorHandler();
-        case "brokers": return target.getConfiguration().getBrokers();
-        case "buffermemorysize":
-        case "bufferMemorySize": return target.getConfiguration().getBufferMemorySize();
-        case "checkcrcs":
-        case "checkCrcs": return target.getConfiguration().getCheckCrcs();
-        case "clientid":
-        case "clientId": return target.getConfiguration().getClientId();
-        case "compressioncodec":
-        case "compressionCodec": return target.getConfiguration().getCompressionCodec();
-        case "connectionmaxidlems":
-        case "connectionMaxIdleMs": return target.getConfiguration().getConnectionMaxIdleMs();
-        case "consumerrequesttimeoutms":
-        case "consumerRequestTimeoutMs": return target.getConfiguration().getConsumerRequestTimeoutMs();
-        case "consumerstreams":
-        case "consumerStreams": return target.getConfiguration().getConsumerStreams();
-        case "consumerscount":
-        case "consumersCount": return target.getConfiguration().getConsumersCount();
-        case "enableidempotence":
-        case "enableIdempotence": return target.getConfiguration().isEnableIdempotence();
-        case "exceptionhandler":
-        case "exceptionHandler": return target.getExceptionHandler();
-        case "exchangepattern":
-        case "exchangePattern": return target.getExchangePattern();
-        case "fetchmaxbytes":
-        case "fetchMaxBytes": return target.getConfiguration().getFetchMaxBytes();
-        case "fetchminbytes":
-        case "fetchMinBytes": return target.getConfiguration().getFetchMinBytes();
-        case "fetchwaitmaxms":
-        case "fetchWaitMaxMs": return target.getConfiguration().getFetchWaitMaxMs();
-        case "groupid":
-        case "groupId": return target.getConfiguration().getGroupId();
-        case "headerfilterstrategy":
-        case "headerFilterStrategy": return target.getConfiguration().getHeaderFilterStrategy();
-        case "heartbeatintervalms":
-        case "heartbeatIntervalMs": return target.getConfiguration().getHeartbeatIntervalMs();
-        case "interceptorclasses":
-        case "interceptorClasses": return target.getConfiguration().getInterceptorClasses();
-        case "kafkaheaderdeserializer":
-        case "kafkaHeaderDeserializer": return target.getConfiguration().getKafkaHeaderDeserializer();
-        case "kafkaheaderserializer":
-        case "kafkaHeaderSerializer": return target.getConfiguration().getKafkaHeaderSerializer();
-        case "kerberosbeforereloginmintime":
-        case "kerberosBeforeReloginMinTime": return target.getConfiguration().getKerberosBeforeReloginMinTime();
-        case "kerberosinitcmd":
-        case "kerberosInitCmd": return target.getConfiguration().getKerberosInitCmd();
-        case "kerberosprincipaltolocalrules":
-        case "kerberosPrincipalToLocalRules": return target.getConfiguration().getKerberosPrincipalToLocalRules();
-        case "kerberosrenewjitter":
-        case "kerberosRenewJitter": return target.getConfiguration().getKerberosRenewJitter();
-        case "kerberosrenewwindowfactor":
-        case "kerberosRenewWindowFactor": return target.getConfiguration().getKerberosRenewWindowFactor();
-        case "key": return target.getConfiguration().getKey();
-        case "keydeserializer":
-        case "keyDeserializer": return target.getConfiguration().getKeyDeserializer();
-        case "keyserializer":
-        case "keySerializer": return target.getConfiguration().getKeySerializer();
-        case "lazystartproducer":
-        case "lazyStartProducer": return target.isLazyStartProducer();
-        case "lingerms":
-        case "lingerMs": return target.getConfiguration().getLingerMs();
-        case "maxblockms":
-        case "maxBlockMs": return target.getConfiguration().getMaxBlockMs();
-        case "maxinflightrequest":
-        case "maxInFlightRequest": return target.getConfiguration().getMaxInFlightRequest();
-        case "maxpartitionfetchbytes":
-        case "maxPartitionFetchBytes": return target.getConfiguration().getMaxPartitionFetchBytes();
-        case "maxpollintervalms":
-        case "maxPollIntervalMs": return target.getConfiguration().getMaxPollIntervalMs();
-        case "maxpollrecords":
-        case "maxPollRecords": return target.getConfiguration().getMaxPollRecords();
-        case "maxrequestsize":
-        case "maxRequestSize": return target.getConfiguration().getMaxRequestSize();
-        case "metadatamaxagems":
-        case "metadataMaxAgeMs": return target.getConfiguration().getMetadataMaxAgeMs();
-        case "metricreporters":
-        case "metricReporters": return target.getConfiguration().getMetricReporters();
-        case "metricssamplewindowms":
-        case "metricsSampleWindowMs": return target.getConfiguration().getMetricsSampleWindowMs();
-        case "noofmetricssample":
-        case "noOfMetricsSample": return target.getConfiguration().getNoOfMetricsSample();
-        case "offsetrepository":
-        case "offsetRepository": return target.getConfiguration().getOffsetRepository();
-        case "partitionassignor":
-        case "partitionAssignor": return target.getConfiguration().getPartitionAssignor();
-        case "partitionkey":
-        case "partitionKey": return target.getConfiguration().getPartitionKey();
-        case "partitioner": return target.getConfiguration().getPartitioner();
-        case "polltimeoutms":
-        case "pollTimeoutMs": return target.getConfiguration().getPollTimeoutMs();
-        case "producerbatchsize":
-        case "producerBatchSize": return target.getConfiguration().getProducerBatchSize();
-        case "queuebufferingmaxmessages":
-        case "queueBufferingMaxMessages": return target.getConfiguration().getQueueBufferingMaxMessages();
-        case "receivebufferbytes":
-        case "receiveBufferBytes": return target.getConfiguration().getReceiveBufferBytes();
-        case "reconnectbackoffmaxms":
-        case "reconnectBackoffMaxMs": return target.getConfiguration().getReconnectBackoffMaxMs();
-        case "reconnectbackoffms":
-        case "reconnectBackoffMs": return target.getConfiguration().getReconnectBackoffMs();
-        case "recordmetadata":
-        case "recordMetadata": return target.getConfiguration().isRecordMetadata();
-        case "requestrequiredacks":
-        case "requestRequiredAcks": return target.getConfiguration().getRequestRequiredAcks();
-        case "requesttimeoutms":
-        case "requestTimeoutMs": return target.getConfiguration().getRequestTimeoutMs();
-        case "retries": return target.getConfiguration().getRetries();
-        case "retrybackoffms":
-        case "retryBackoffMs": return target.getConfiguration().getRetryBackoffMs();
-        case "sasljaasconfig":
-        case "saslJaasConfig": return target.getConfiguration().getSaslJaasConfig();
-        case "saslkerberosservicename":
-        case "saslKerberosServiceName": return target.getConfiguration().getSaslKerberosServiceName();
-        case "saslmechanism":
-        case "saslMechanism": return target.getConfiguration().getSaslMechanism();
-        case "schemaregistryurl":
-        case "schemaRegistryURL": return target.getConfiguration().getSchemaRegistryURL();
-        case "securityprotocol":
-        case "securityProtocol": return target.getConfiguration().getSecurityProtocol();
-        case "seekto":
-        case "seekTo": return target.getConfiguration().getSeekTo();
-        case "sendbufferbytes":
-        case "sendBufferBytes": return target.getConfiguration().getSendBufferBytes();
-        case "serializerclass":
-        case "serializerClass": return target.getConfiguration().getSerializerClass();
-        case "sessiontimeoutms":
-        case "sessionTimeoutMs": return target.getConfiguration().getSessionTimeoutMs();
-        case "shutdowntimeout":
-        case "shutdownTimeout": return target.getConfiguration().getShutdownTimeout();
-        case "specificavroreader":
-        case "specificAvroReader": return target.getConfiguration().isSpecificAvroReader();
-        case "sslciphersuites":
-        case "sslCipherSuites": return target.getConfiguration().getSslCipherSuites();
-        case "sslcontextparameters":
-        case "sslContextParameters": return target.getConfiguration().getSslContextParameters();
-        case "sslenabledprotocols":
-        case "sslEnabledProtocols": return target.getConfiguration().getSslEnabledProtocols();
-        case "sslendpointalgorithm":
-        case "sslEndpointAlgorithm": return target.getConfiguration().getSslEndpointAlgorithm();
-        case "sslkeypassword":
-        case "sslKeyPassword": return target.getConfiguration().getSslKeyPassword();
-        case "sslkeymanageralgorithm":
-        case "sslKeymanagerAlgorithm": return target.getConfiguration().getSslKeymanagerAlgorithm();
-        case "sslkeystorelocation":
-        case "sslKeystoreLocation": return target.getConfiguration().getSslKeystoreLocation();
-        case "sslkeystorepassword":
-        case "sslKeystorePassword": return target.getConfiguration().getSslKeystorePassword();
-        case "sslkeystoretype":
-        case "sslKeystoreType": return target.getConfiguration().getSslKeystoreType();
-        case "sslprotocol":
-        case "sslProtocol": return target.getConfiguration().getSslProtocol();
-        case "sslprovider":
-        case "sslProvider": return target.getConfiguration().getSslProvider();
-        case "ssltrustmanageralgorithm":
-        case "sslTrustmanagerAlgorithm": return target.getConfiguration().getSslTrustmanagerAlgorithm();
-        case "ssltruststorelocation":
-        case "sslTruststoreLocation": return target.getConfiguration().getSslTruststoreLocation();
-        case "ssltruststorepassword":
-        case "sslTruststorePassword": return target.getConfiguration().getSslTruststorePassword();
-        case "ssltruststoretype":
-        case "sslTruststoreType": return target.getConfiguration().getSslTruststoreType();
-        case "synchronous": return target.isSynchronous();
-        case "topicispattern":
-        case "topicIsPattern": return target.getConfiguration().isTopicIsPattern();
-        case "valuedeserializer":
-        case "valueDeserializer": return target.getConfiguration().getValueDeserializer();
-        case "workerpool":
-        case "workerPool": return target.getConfiguration().getWorkerPool();
-        case "workerpoolcoresize":
-        case "workerPoolCoreSize": return target.getConfiguration().getWorkerPoolCoreSize();
-        case "workerpoolmaxsize":
-        case "workerPoolMaxSize": return target.getConfiguration().getWorkerPoolMaxSize();
-        default: return null;
-        }
-    }
-}
-
diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java
deleted file mode 100644
index 1aae4ac..0000000
--- a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/* Generated by camel build tools - do NOT edit this file! */
-package org.apache.camel.component.kafka;
-
-import java.net.URISyntaxException;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.camel.spi.EndpointUriFactory;
-
-/**
- * Generated by camel build tools - do NOT edit this file!
- */
-public class KafkaEndpointUriFactory extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
-
-    private static final String BASE = ":topic";
-
-    private static final Set<String> PROPERTY_NAMES;
-    private static final Set<String> SECRET_PROPERTY_NAMES;
-    static {
-        Set<String> props = new HashSet<>(98);
-        props.add("synchronous");
-        props.add("queueBufferingMaxMessages");
-        props.add("allowManualCommit");
-        props.add("consumersCount");
-        props.add("receiveBufferBytes");
-        props.add("reconnectBackoffMaxMs");
-        props.add("valueDeserializer");
-        props.add("metricReporters");
-        props.add("sslTruststoreType");
-        props.add("sendBufferBytes");
-        props.add("heartbeatIntervalMs");
-        props.add("consumerStreams");
-        props.add("kafkaHeaderSerializer");
-        props.add("interceptorClasses");
-        props.add("sslKeystoreType");
-        props.add("breakOnFirstError");
-        props.add("requestRequiredAcks");
-        props.add("enableIdempotence");
-        props.add("fetchWaitMaxMs");
-        props.add("retries");
-        props.add("maxPollRecords");
-        props.add("additionalProperties");
-        props.add("keyDeserializer");
-        props.add("producerBatchSize");
-        props.add("retryBackoffMs");
-        props.add("brokers");
-        props.add("metricsSampleWindowMs");
-        props.add("sslContextParameters");
-        props.add("sslKeyPassword");
-        props.add("keySerializer");
-        props.add("noOfMetricsSample");
-        props.add("maxPartitionFetchBytes");
-        props.add("partitionKey");
-        props.add("headerFilterStrategy");
-        props.add("sslTruststorePassword");
-        props.add("sessionTimeoutMs");
-        props.add("key");
-        props.add("topicIsPattern");
-        props.add("sslTruststoreLocation");
-        props.add("clientId");
-        props.add("maxRequestSize");
-        props.add("recordMetadata");
-        props.add("sslTrustmanagerAlgorithm");
-        props.add("compressionCodec");
-        props.add("autoCommitOnStop");
-        props.add("workerPoolCoreSize");
-        props.add("autoCommitEnable");
-        props.add("consumerRequestTimeoutMs");
-        props.add("maxPollIntervalMs");
-        props.add("kerberosInitCmd");
-        props.add("workerPoolMaxSize");
-        props.add("reconnectBackoffMs");
-        props.add("groupId");
-        props.add("offsetRepository");
-        props.add("kerberosRenewJitter");
-        props.add("sslProvider");
-        props.add("serializerClass");
-        props.add("saslKerberosServiceName");
-        props.add("bridgeErrorHandler");
-        props.add("shutdownTimeout");
-        props.add("saslMechanism");
-        props.add("workerPool");
-        props.add("lazyStartProducer");
-        props.add("sslKeystorePassword");
-        props.add("sslEndpointAlgorithm");
-        props.add("topic");
-        props.add("sslProtocol");
-        props.add("sslKeymanagerAlgorithm");
-        props.add("pollTimeoutMs");
-        props.add("exceptionHandler");
-        props.add("maxBlockMs");
-        props.add("kerberosBeforeReloginMinTime");
-        props.add("bufferMemorySize");
-        props.add("basicPropertyBinding");
-        props.add("metadataMaxAgeMs");
-        props.add("sslCipherSuites");
-        props.add("specificAvroReader");
-        props.add("saslJaasConfig");
-        props.add("fetchMinBytes");
-        props.add("connectionMaxIdleMs");
-        props.add("lingerMs");
-        props.add("kerberosRenewWindowFactor");
-        props.add("securityProtocol");
-        props.add("autoCommitIntervalMs");
-        props.add("partitioner");
-        props.add("kerberosPrincipalToLocalRules");
-        props.add("sslEnabledProtocols");
-        props.add("sslKeystoreLocation");
-        props.add("schemaRegistryURL");
-        props.add("maxInFlightRequest");
-        props.add("exchangePattern");
-        props.add("autoOffsetReset");
-        props.add("seekTo");
-        props.add("requestTimeoutMs");
-        props.add("kafkaHeaderDeserializer");
-        props.add("fetchMaxBytes");
-        props.add("checkCrcs");
-        props.add("partitionAssignor");
-        PROPERTY_NAMES = Collections.unmodifiableSet(props);
-        Set<String> secretProps = new HashSet<>(4);
-        secretProps.add("sslKeystorePassword");
-        secretProps.add("sslTruststorePassword");
-        secretProps.add("saslJaasConfig");
-        secretProps.add("sslKeyPassword");
-        SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
-    }
-
-    @Override
-    public boolean isEnabled(String scheme) {
-        return "kafka".equals(scheme);
-    }
-
-    @Override
-    public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
-        String syntax = scheme + BASE;
-        String uri = syntax;
-
-        Map<String, Object> copy = new HashMap<>(properties);
-
-        uri = buildPathParameter(syntax, uri, "topic", null, true, copy);
-        uri = buildQueryParameters(uri, copy, encode);
-        return uri;
-    }
-
-    @Override
-    public Set<String> propertyNames() {
-        return PROPERTY_NAMES;
-    }
-
-    @Override
-    public Set<String> secretPropertyNames() {
-        return SECRET_PROPERTY_NAMES;
-    }
-
-    @Override
-    public boolean isLenientProperties() {
-        return false;
-    }
-}
-
diff --git a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/component.properties b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/component.properties
deleted file mode 100644
index c2670a0..0000000
--- a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/component.properties
+++ /dev/null
@@ -1,7 +0,0 @@
-# Generated by camel build tools - do NOT edit this file!
-components=kafka
-groupId=org.apache.camel
-artifactId=camel-kafka
-version=3.7.0-SNAPSHOT
-projectName=Camel :: Kafka
-projectDescription=Camel Kafka support
diff --git a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/component/kafka b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/component/kafka
deleted file mode 100644
index e34127d..0000000
--- a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/component/kafka
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by camel build tools - do NOT edit this file!
-class=org.apache.camel.component.kafka.KafkaComponent
diff --git a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/configurer/kafka-component b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/configurer/kafka-component
deleted file mode 100644
index 6b9be66..0000000
--- a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/configurer/kafka-component
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by camel build tools - do NOT edit this file!
-class=org.apache.camel.component.kafka.KafkaComponentConfigurer
diff --git a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/configurer/kafka-endpoint b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/configurer/kafka-endpoint
deleted file mode 100644
index aa0bd1a..0000000
--- a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/configurer/kafka-endpoint
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by camel build tools - do NOT edit this file!
-class=org.apache.camel.component.kafka.KafkaEndpointConfigurer
diff --git a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/urifactory/kafka-endpoint b/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/urifactory/kafka-endpoint
deleted file mode 100644
index dce8718..0000000
--- a/components/camel-kafka/src/generated/resources/META-INF/services/org/apache/camel/urifactory/kafka-endpoint
+++ /dev/null
@@ -1,2 +0,0 @@
-# Generated by camel build tools - do NOT edit this file!
-class=org.apache.camel.component.kafka.KafkaEndpointUriFactory
diff --git a/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json b/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json
deleted file mode 100644
index 20e7293..0000000
--- a/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json
+++ /dev/null
@@ -1,223 +0,0 @@
-{
-  "component": {
-    "kind": "component",
-    "name": "kafka",
-    "title": "Kafka",
-    "description": "Sent and receive messages to\/from an Apache Kafka broker.",
-    "deprecated": false,
-    "firstVersion": "2.13.0",
-    "label": "messaging",
-    "javaType": "org.apache.camel.component.kafka.KafkaComponent",
-    "supportLevel": "Stable",
-    "groupId": "org.apache.camel",
-    "artifactId": "camel-kafka",
-    "version": "3.7.0-SNAPSHOT",
-    "scheme": "kafka",
-    "extendsScheme": "",
-    "syntax": "kafka:topic",
-    "async": false,
-    "api": false,
-    "consumerOnly": false,
-    "producerOnly": false,
-    "lenientProperties": false
-  },
-  "componentProperties": {
-    "additionalProperties": { "kind": "property", "displayName": "Additional Properties", "group": "common", "label": "common", "required": false, "type": "object", "javaType": "java.util.Map<java.lang.String, java.lang.Object>", "prefix": "additionalProperties.", "multiValue": true, "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Sets additional properties for either [...]
-    "brokers": { "kind": "property", "displayName": "Brokers", "group": "common", "label": "common", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "URL of the Kafka brokers to use. The format is host1:port1,host2:port2, and the list can be a subset of brokers or a VIP pointing to a subset of brokers [...]
-    "clientId": { "kind": "property", "displayName": "Client Id", "group": "common", "label": "common", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The client id is a user-specified string sent in each request to help trace calls. It should logically identify the application making the request." },
-    "configuration": { "kind": "property", "displayName": "Configuration", "group": "common", "label": "", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.KafkaConfiguration", "deprecated": false, "secret": false, "description": "Allows to pre-configure the Kafka component with common options that the endpoints will reuse." },
-    "headerFilterStrategy": { "kind": "property", "displayName": "Header Filter Strategy", "group": "common", "label": "common", "required": false, "type": "object", "javaType": "org.apache.camel.spi.HeaderFilterStrategy", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom HeaderFilterStrategy to filter header to and from Camel message." },
-    "reconnectBackoffMaxMs": { "kind": "property", "displayName": "Reconnect Backoff Max Ms", "group": "common", "label": "common", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of time in milliseconds to wait when reconnecting to a broker that has repea [...]
-    "shutdownTimeout": { "kind": "property", "displayName": "Shutdown Timeout", "group": "common", "label": "common", "required": false, "type": "integer", "javaType": "int", "deprecated": false, "secret": false, "defaultValue": 30000, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Timeout in milli seconds to wait gracefully for the consumer or producer to shutdown and terminate its worker threads." },
-    "allowManualCommit": { "kind": "property", "displayName": "Allow Manual Commit", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether to allow doing manual commits via KafkaManualCommit. If this option is enabled then an instance of Kafk [...]
-    "autoCommitEnable": { "kind": "property", "displayName": "Auto Commit Enable", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "java.lang.Boolean", "deprecated": false, "secret": false, "defaultValue": "true", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "If true, periodically commit to ZooKeeper the offset of messages already fetched by the consumer. This [...]
-    "autoCommitIntervalMs": { "kind": "property", "displayName": "Auto Commit Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "5000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The frequency in ms that the consumer offsets are committed to zookeeper." },
-    "autoCommitOnStop": { "kind": "property", "displayName": "Auto Commit On Stop", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "sync", "async", "none" ], "deprecated": false, "secret": false, "defaultValue": "sync", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether to perform an explicit auto commit when the consumer stops [...]
-    "autoOffsetReset": { "kind": "property", "displayName": "Auto Offset Reset", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "latest", "earliest", "none" ], "deprecated": false, "secret": false, "defaultValue": "latest", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "What to do when there is no initial offset in ZooKeeper or if  [...]
-    "breakOnFirstError": { "kind": "property", "displayName": "Break On First Error", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This options controls what happens when a consumer is processing an exchange and it fails. If the option is fa [...]
-    "bridgeErrorHandler": { "kind": "property", "displayName": "Bridge Error Handler", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Allows for bridging the consumer to the Camel routing Error Handler, which mean any exceptions occurred while the consumer is trying to pickup incoming messages, or the likes, will now be processed as a message and handled by [...]
-    "checkCrcs": { "kind": "property", "displayName": "Check Crcs", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "java.lang.Boolean", "deprecated": false, "secret": false, "defaultValue": "true", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messa [...]
-    "consumerRequestTimeoutMs": { "kind": "property", "displayName": "Consumer Request Timeout Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "40000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The configuration controls the maximum amount of time the client will wait for the r [...]
-    "consumersCount": { "kind": "property", "displayName": "Consumers Count", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "int", "deprecated": false, "secret": false, "defaultValue": 1, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of consumers that connect to kafka server" },
-    "consumerStreams": { "kind": "property", "displayName": "Consumer Streams", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "int", "deprecated": false, "secret": false, "defaultValue": 10, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Number of concurrent consumers on the consumer" },
-    "fetchMaxBytes": { "kind": "property", "displayName": "Fetch Max Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "52428800", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of data the server should return for a fetch request This is not an absolute maximum, [...]
-    "fetchMinBytes": { "kind": "property", "displayName": "Fetch Min Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The minimum amount of data the server should return for a fetch request. If insufficient data is available the  [...]
-    "fetchWaitMaxMs": { "kind": "property", "displayName": "Fetch Wait Max Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "500", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of time the server will block before answering the fetch request if there isn't suffici [...]
-    "groupId": { "kind": "property", "displayName": "Group Id", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes i [...]
-    "heartbeatIntervalMs": { "kind": "property", "displayName": "Heartbeat Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "3000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The expected time between heartbeats to the consumer coordinator when using Kafka's group manage [...]
-    "kafkaHeaderDeserializer": { "kind": "property", "displayName": "Kafka Header Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderDeserializer to deserialize kafka headers [...]
-    "keyDeserializer": { "kind": "property", "displayName": "Key Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringDeserializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Deserializer class for key that implements the Deserial [...]
-    "maxPartitionFetchBytes": { "kind": "property", "displayName": "Max Partition Fetch Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1048576", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of data per-partition the server will return. The maximum total mem [...]
-    "maxPollIntervalMs": { "kind": "property", "displayName": "Max Poll Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "duration", "javaType": "java.lang.Long", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum delay between invocations of poll() when using consumer group management. This places an upper bound on the amoun [...]
-    "maxPollRecords": { "kind": "property", "displayName": "Max Poll Records", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "500", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum number of records returned in a single call to poll()" },
-    "offsetRepository": { "kind": "property", "displayName": "Offset Repository", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.spi.StateRepository<java.lang.String, java.lang.String>", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The offset repository to use in order to locally store the offset of each [...]
-    "partitionAssignor": { "kind": "property", "displayName": "Partition Assignor", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.clients.consumer.RangeAssignor", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The class name of the partition assignment strategy that the [...]
-    "pollTimeoutMs": { "kind": "property", "displayName": "Poll Timeout Ms", "group": "consumer", "label": "consumer", "required": false, "type": "duration", "javaType": "java.lang.Long", "deprecated": false, "secret": false, "defaultValue": "5000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The timeout used when polling the KafkaConsumer." },
-    "seekTo": { "kind": "property", "displayName": "Seek To", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "beginning", "end" ], "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Set if KafkaConsumer will read from beginning or end on startup: beginning : read from beginning end : read from end  [...]
-    "sessionTimeoutMs": { "kind": "property", "displayName": "Session Timeout Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "10000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The timeout used to detect failures when using Kafka's group management facilities." },
-    "specificAvroReader": { "kind": "property", "displayName": "Specific Avro Reader", "group": "consumer", "label": "confluent,consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This enables the use of a specific Avro reader for use with the Confluent Platform schema registry  [...]
-    "topicIsPattern": { "kind": "property", "displayName": "Topic Is Pattern", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether the topic is a pattern (regular expression). This can be used to subscribe to dynamic number of topics matchi [...]
-    "valueDeserializer": { "kind": "property", "displayName": "Value Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringDeserializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Deserializer class for value that implements the De [...]
-    "kafkaManualCommitFactory": { "kind": "property", "displayName": "Kafka Manual Commit Factory", "group": "consumer (advanced)", "label": "consumer,advanced", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.KafkaManualCommitFactory", "deprecated": false, "secret": false, "description": "Factory to use for creating KafkaManualCommit instances. This allows to plugin a custom factory to create custom KafkaManualCommit instances in case special logic is  [...]
-    "bufferMemorySize": { "kind": "property", "displayName": "Buffer Memory Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "33554432", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The total bytes of memory the producer can use to buffer records waiting to be sent to the server. [...]
-    "compressionCodec": { "kind": "property", "displayName": "Compression Codec", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "none", "gzip", "snappy", "lz4" ], "deprecated": false, "secret": false, "defaultValue": "none", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This parameter allows you to specify the compression codec f [...]
-    "connectionMaxIdleMs": { "kind": "property", "displayName": "Connection Max Idle Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "540000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Close idle connections after the number of milliseconds specified by this config." },
-    "enableIdempotence": { "kind": "property", "displayName": "Enable Idempotence", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'fa [...]
-    "kafkaHeaderSerializer": { "kind": "property", "displayName": "Kafka Header Serializer", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderSerializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderSerializer to serialize kafka headers values" },
-    "key": { "kind": "property", "displayName": "Key", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY" },
-    "keySerializer": { "kind": "property", "displayName": "Key Serializer", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for keys (defaults to the same as for me [...]
-    "lazyStartProducer": { "kind": "property", "displayName": "Lazy Start Producer", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the r [...]
-    "lingerMs": { "kind": "property", "displayName": "Linger Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "0", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The producer groups together any records that arrive in between request transmissions into a single batched request. Norma [...]
-    "maxBlockMs": { "kind": "property", "displayName": "Max Block Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "60000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The configuration controls how long sending to kafka will block. These methods can be blocked for multiple reason [...]
-    "maxInFlightRequest": { "kind": "property", "displayName": "Max In Flight Request", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "5", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum number of unacknowledged requests the client will send on a single connection before blo [...]
-    "maxRequestSize": { "kind": "property", "displayName": "Max Request Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1048576", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum size of a request. This is also effectively a cap on the maximum record size. Note that the [...]
-    "metadataMaxAgeMs": { "kind": "property", "displayName": "Metadata Max Age Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "300000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The period of time in milliseconds after which we force a refresh of metadata even if we haven't se [...]
-    "metricReporters": { "kind": "property", "displayName": "Metric Reporters", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A list of classes to use as metrics reporters. Implementing the MetricReporter interface allows plugging in classes that will be no [...]
-    "metricsSampleWindowMs": { "kind": "property", "displayName": "Metrics Sample Window Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "30000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of samples maintained to compute metrics." },
-    "noOfMetricsSample": { "kind": "property", "displayName": "No Of Metrics Sample", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "2", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of samples maintained to compute metrics." },
-    "partitioner": { "kind": "property", "displayName": "Partitioner", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.clients.producer.internals.DefaultPartitioner", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The partitioner class for partitioning messages amongst su [...]
-    "partitionKey": { "kind": "property", "displayName": "Partition Key", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The partition to which the record will be sent (or null if no partition was specified). If this option has been configured then it take [...]
-    "producerBatchSize": { "kind": "property", "displayName": "Producer Batch Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "16384", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The producer will attempt to batch records together into fewer requests whenever multiple records a [...]
-    "queueBufferingMaxMessages": { "kind": "property", "displayName": "Queue Buffering Max Messages", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "10000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum number of unsent messages that can be queued up the producer when usin [...]
-    "receiveBufferBytes": { "kind": "property", "displayName": "Receive Buffer Bytes", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "65536", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The size of the TCP receive buffer (SO_RCVBUF) to use when reading data." },
-    "reconnectBackoffMs": { "kind": "property", "displayName": "Reconnect Backoff Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "50", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The amount of time to wait before attempting to reconnect to a given host. This avoids repeatedly co [...]
-    "recordMetadata": { "kind": "property", "displayName": "Record Metadata", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether the producer should store the RecordMetadata results from sending to Kafka. The results are stored in a List co [...]
-    "requestRequiredAcks": { "kind": "property", "displayName": "Request Required Acks", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "-1", "0", "1", "all" ], "deprecated": false, "secret": false, "defaultValue": "1", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of acknowledgments the producer requires the leader to  [...]
-    "requestTimeoutMs": { "kind": "property", "displayName": "Request Timeout Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "30000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The amount of time the broker will wait trying to meet the request.required.acks requirement before s [...]
-    "retries": { "kind": "property", "displayName": "Retries", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "0", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient er [...]
-    "retryBackoffMs": { "kind": "property", "displayName": "Retry Backoff Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "100", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Before each retry, the producer refreshes the metadata of relevant topics to see if a new leader has been e [...]
-    "sendBufferBytes": { "kind": "property", "displayName": "Send Buffer Bytes", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "131072", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Socket write buffer size" },
-    "serializerClass": { "kind": "property", "displayName": "Serializer Class", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for messages." },
-    "workerPool": { "kind": "property", "displayName": "Worker Pool", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "java.util.concurrent.ExecutorService", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to [...]
-    "workerPoolCoreSize": { "kind": "property", "displayName": "Worker Pool Core Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "10", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Number of core threads for the worker pool for continue routing Exchange after kafka server has ack [...]
-    "workerPoolMaxSize": { "kind": "property", "displayName": "Worker Pool Max Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "20", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Maximum number of threads for the worker pool for continue routing Exchange after kafka server has ac [...]
-    "basicPropertyBinding": { "kind": "property", "displayName": "Basic Property Binding", "group": "advanced", "label": "advanced", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": true, "secret": false, "defaultValue": false, "description": "Whether the component should use basic property binding (Camel 2.x) or the newer property binding with additional capabilities" },
-    "schemaRegistryURL": { "kind": "property", "displayName": "Schema Registry URL", "group": "confluent", "label": "confluent", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "URL of the Confluent Platform schema registry servers to use. The format is host1:port1,host2:port2. This is known as schema [...]
-    "interceptorClasses": { "kind": "property", "displayName": "Interceptor Classes", "group": "monitoring", "label": "common,monitoring", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Sets interceptors for producer or consumers. Producer interceptors have to be classes implementing org.apache.kafk [...]
-    "kerberosBeforeReloginMinTime": { "kind": "property", "displayName": "Kerberos Before Relogin Min Time", "group": "security", "label": "common,security", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "60000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Login thread sleep time between refresh attempts." },
-    "kerberosInitCmd": { "kind": "property", "displayName": "Kerberos Init Cmd", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "\/usr\/bin\/kinit", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Kerberos kinit command path. Default is \/usr\/bin\/kinit" },
-    "kerberosPrincipalToLocalRules": { "kind": "property", "displayName": "Kerberos Principal To Local Rules", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "DEFAULT", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A list of rules for mapping from principal names to short names (t [...]
-    "kerberosRenewJitter": { "kind": "property", "displayName": "Kerberos Renew Jitter", "group": "security", "label": "common,security", "required": false, "type": "number", "javaType": "java.lang.Double", "deprecated": false, "secret": false, "defaultValue": "0.05", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Percentage of random jitter added to the renewal time." },
-    "kerberosRenewWindowFactor": { "kind": "property", "displayName": "Kerberos Renew Window Factor", "group": "security", "label": "common,security", "required": false, "type": "number", "javaType": "java.lang.Double", "deprecated": false, "secret": false, "defaultValue": "0.8", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Login thread will sleep until the specified window factor of time from last ref [...]
-    "saslJaasConfig": { "kind": "property", "displayName": "Sasl Jaas Config", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Expose the kafka sasl.jaas.config parameter Example: org.apache.kafka.common.security.plain.PlainLoginModule required username [...]
-    "saslKerberosServiceName": { "kind": "property", "displayName": "Sasl Kerberos Service Name", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The Kerberos principal name that Kafka runs as. This can be defined either in Kafka's JAAS config or in Ka [...]
-    "saslMechanism": { "kind": "property", "displayName": "Sasl Mechanism", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "GSSAPI", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The Simple Authentication and Security Layer (SASL) Mechanism used. For the valid values see http:\/\/ [...]
-    "securityProtocol": { "kind": "property", "displayName": "Security Protocol", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "PLAINTEXT", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Protocol used to communicate with brokers. SASL_PLAINTEXT, PLAINTEXT and SSL are supported" },
-    "sslCipherSuites": { "kind": "property", "displayName": "Ssl Cipher Suites", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A list of cipher suites. This is a named combination of authentication, encryption, MAC and key exchange algorithm used to  [...]
-    "sslContextParameters": { "kind": "property", "displayName": "Ssl Context Parameters", "group": "security", "label": "common,security", "required": false, "type": "object", "javaType": "org.apache.camel.support.jsse.SSLContextParameters", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "SSL configuration using a Camel SSLContextParameters object. If configured it's [...]
-    "sslEnabledProtocols": { "kind": "property", "displayName": "Ssl Enabled Protocols", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The list of protocols enabled for SSL connections. TLSv1.2, TLSv1.1 and TLSv1 are enabled by default." },
-    "sslEndpointAlgorithm": { "kind": "property", "displayName": "Ssl Endpoint Algorithm", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "https", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The endpoint identification algorithm to validate server hostname using server certificate." },
-    "sslKeymanagerAlgorithm": { "kind": "property", "displayName": "Ssl Keymanager Algorithm", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "SunX509", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The algorithm used by key manager factory for SSL connections. Default value is th [...]
-    "sslKeyPassword": { "kind": "property", "displayName": "Ssl Key Password", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The password of the private key in the key store file. This is optional for client." },
-    "sslKeystoreLocation": { "kind": "property", "displayName": "Ssl Keystore Location", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The location of the key store file. This is optional for client and can be used for two-way authentication for cl [...]
-    "sslKeystorePassword": { "kind": "property", "displayName": "Ssl Keystore Password", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The store password for the key store file.This is optional for client and only needed if ssl.keystore.location is  [...]
-    "sslKeystoreType": { "kind": "property", "displayName": "Ssl Keystore Type", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "JKS", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The file format of the key store file. This is optional for client. Default value is JKS" },
-    "sslProtocol": { "kind": "property", "displayName": "Ssl Protocol", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The SSL protocol used to generate the SSLContext. Default setting is TLS, which is fine for most cases. Allowed values in recent JVM [...]
-    "sslProvider": { "kind": "property", "displayName": "Ssl Provider", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The name of the security provider used for SSL connections. Default value is the default security provider of the JVM." },
-    "sslTrustmanagerAlgorithm": { "kind": "property", "displayName": "Ssl Trustmanager Algorithm", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "PKIX", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The algorithm used by trust manager factory for SSL connections. Default value is [...]
-    "sslTruststoreLocation": { "kind": "property", "displayName": "Ssl Truststore Location", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The location of the trust store file." },
-    "sslTruststorePassword": { "kind": "property", "displayName": "Ssl Truststore Password", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The password for the trust store file." },
-    "sslTruststoreType": { "kind": "property", "displayName": "Ssl Truststore Type", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "JKS", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The file format of the trust store file. Default value is JKS." },
-    "useGlobalSslContextParameters": { "kind": "property", "displayName": "Use Global Ssl Context Parameters", "group": "security", "label": "security", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Enable usage of global SSL context parameters." }
-  },
-  "properties": {
-    "topic": { "kind": "path", "displayName": "Topic", "group": "common", "label": "common", "required": true, "type": "string", "javaType": "java.lang.String", "deprecated": false, "deprecationNote": "", "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Name of the topic to use. On the consumer you can use comma to separate multiple topics. A producer can only send a message to a single to [...]
-    "additionalProperties": { "kind": "parameter", "displayName": "Additional Properties", "group": "common", "label": "common", "required": false, "type": "object", "javaType": "java.util.Map<java.lang.String, java.lang.Object>", "prefix": "additionalProperties.", "multiValue": true, "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Sets additional properties for eithe [...]
-    "brokers": { "kind": "parameter", "displayName": "Brokers", "group": "common", "label": "common", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "URL of the Kafka brokers to use. The format is host1:port1,host2:port2, and the list can be a subset of brokers or a VIP pointing to a subset of broker [...]
-    "clientId": { "kind": "parameter", "displayName": "Client Id", "group": "common", "label": "common", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The client id is a user-specified string sent in each request to help trace calls. It should logically identify the application making the request." },
-    "headerFilterStrategy": { "kind": "parameter", "displayName": "Header Filter Strategy", "group": "common", "label": "common", "required": false, "type": "object", "javaType": "org.apache.camel.spi.HeaderFilterStrategy", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom HeaderFilterStrategy to filter header to and from Camel message." },
-    "reconnectBackoffMaxMs": { "kind": "parameter", "displayName": "Reconnect Backoff Max Ms", "group": "common", "label": "common", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of time in milliseconds to wait when reconnecting to a broker that has repe [...]
-    "shutdownTimeout": { "kind": "parameter", "displayName": "Shutdown Timeout", "group": "common", "label": "common", "required": false, "type": "integer", "javaType": "int", "deprecated": false, "secret": false, "defaultValue": 30000, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Timeout in milli seconds to wait gracefully for the consumer or producer to shutdown and terminate its worker threads." },
-    "allowManualCommit": { "kind": "parameter", "displayName": "Allow Manual Commit", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether to allow doing manual commits via KafkaManualCommit. If this option is enabled then an instance of Kaf [...]
-    "autoCommitEnable": { "kind": "parameter", "displayName": "Auto Commit Enable", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "java.lang.Boolean", "deprecated": false, "secret": false, "defaultValue": "true", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "If true, periodically commit to ZooKeeper the offset of messages already fetched by the consumer. Thi [...]
-    "autoCommitIntervalMs": { "kind": "parameter", "displayName": "Auto Commit Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "5000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The frequency in ms that the consumer offsets are committed to zookeeper." },
-    "autoCommitOnStop": { "kind": "parameter", "displayName": "Auto Commit On Stop", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "sync", "async", "none" ], "deprecated": false, "secret": false, "defaultValue": "sync", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether to perform an explicit auto commit when the consumer stop [...]
-    "autoOffsetReset": { "kind": "parameter", "displayName": "Auto Offset Reset", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "latest", "earliest", "none" ], "deprecated": false, "secret": false, "defaultValue": "latest", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "What to do when there is no initial offset in ZooKeeper or if [...]
-    "breakOnFirstError": { "kind": "parameter", "displayName": "Break On First Error", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This options controls what happens when a consumer is processing an exchange and it fails. If the option is f [...]
-    "bridgeErrorHandler": { "kind": "parameter", "displayName": "Bridge Error Handler", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Allows for bridging the consumer to the Camel routing Error Handler, which mean any exceptions occurred while the consumer is trying to pickup incoming messages, or the likes, will now be processed as a message and handled b [...]
-    "checkCrcs": { "kind": "parameter", "displayName": "Check Crcs", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "java.lang.Boolean", "deprecated": false, "secret": false, "defaultValue": "true", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the mess [...]
-    "consumerRequestTimeoutMs": { "kind": "parameter", "displayName": "Consumer Request Timeout Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "40000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The configuration controls the maximum amount of time the client will wait for the  [...]
-    "consumersCount": { "kind": "parameter", "displayName": "Consumers Count", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "int", "deprecated": false, "secret": false, "defaultValue": 1, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of consumers that connect to kafka server" },
-    "consumerStreams": { "kind": "parameter", "displayName": "Consumer Streams", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "int", "deprecated": false, "secret": false, "defaultValue": 10, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Number of concurrent consumers on the consumer" },
-    "fetchMaxBytes": { "kind": "parameter", "displayName": "Fetch Max Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "52428800", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of data the server should return for a fetch request This is not an absolute maximum [...]
-    "fetchMinBytes": { "kind": "parameter", "displayName": "Fetch Min Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The minimum amount of data the server should return for a fetch request. If insufficient data is available the [...]
-    "fetchWaitMaxMs": { "kind": "parameter", "displayName": "Fetch Wait Max Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "500", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of time the server will block before answering the fetch request if there isn't suffic [...]
-    "groupId": { "kind": "parameter", "displayName": "Group Id", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes  [...]
-    "heartbeatIntervalMs": { "kind": "parameter", "displayName": "Heartbeat Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "3000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The expected time between heartbeats to the consumer coordinator when using Kafka's group manag [...]
-    "kafkaHeaderDeserializer": { "kind": "parameter", "displayName": "Kafka Header Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderDeserializer to deserialize kafka header [...]
-    "keyDeserializer": { "kind": "parameter", "displayName": "Key Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringDeserializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Deserializer class for key that implements the Deseria [...]
-    "maxPartitionFetchBytes": { "kind": "parameter", "displayName": "Max Partition Fetch Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1048576", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of data per-partition the server will return. The maximum total me [...]
-    "maxPollIntervalMs": { "kind": "parameter", "displayName": "Max Poll Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "duration", "javaType": "java.lang.Long", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum delay between invocations of poll() when using consumer group management. This places an upper bound on the amou [...]
-    "maxPollRecords": { "kind": "parameter", "displayName": "Max Poll Records", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "500", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum number of records returned in a single call to poll()" },
-    "offsetRepository": { "kind": "parameter", "displayName": "Offset Repository", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.spi.StateRepository<java.lang.String, java.lang.String>", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The offset repository to use in order to locally store the offset of eac [...]
-    "partitionAssignor": { "kind": "parameter", "displayName": "Partition Assignor", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.clients.consumer.RangeAssignor", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The class name of the partition assignment strategy that th [...]
-    "pollTimeoutMs": { "kind": "parameter", "displayName": "Poll Timeout Ms", "group": "consumer", "label": "consumer", "required": false, "type": "duration", "javaType": "java.lang.Long", "deprecated": false, "secret": false, "defaultValue": "5000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The timeout used when polling the KafkaConsumer." },
-    "seekTo": { "kind": "parameter", "displayName": "Seek To", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "beginning", "end" ], "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Set if KafkaConsumer will read from beginning or end on startup: beginning : read from beginning end : read from end [...]
-    "sessionTimeoutMs": { "kind": "parameter", "displayName": "Session Timeout Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "10000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The timeout used to detect failures when using Kafka's group management facilities." },
-    "specificAvroReader": { "kind": "parameter", "displayName": "Specific Avro Reader", "group": "consumer", "label": "confluent,consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This enables the use of a specific Avro reader for use with the Confluent Platform schema registry [...]
-    "topicIsPattern": { "kind": "parameter", "displayName": "Topic Is Pattern", "group": "consumer", "label": "consumer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether the topic is a pattern (regular expression). This can be used to subscribe to dynamic number of topics match [...]
-    "valueDeserializer": { "kind": "parameter", "displayName": "Value Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringDeserializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Deserializer class for value that implements the D [...]
-    "exceptionHandler": { "kind": "parameter", "displayName": "Exception Handler", "group": "consumer (advanced)", "label": "consumer,advanced", "required": false, "type": "object", "javaType": "org.apache.camel.spi.ExceptionHandler", "optionalPrefix": "consumer.", "deprecated": false, "secret": false, "description": "To let the consumer use a custom ExceptionHandler. Notice if the option bridgeErrorHandler is enabled then this option is not in use. By default the consumer will deal with [...]
-    "exchangePattern": { "kind": "parameter", "displayName": "Exchange Pattern", "group": "consumer (advanced)", "label": "consumer,advanced", "required": false, "type": "object", "javaType": "org.apache.camel.ExchangePattern", "enum": [ "InOnly", "InOut", "InOptionalOut" ], "deprecated": false, "secret": false, "description": "Sets the exchange pattern when the consumer creates an exchange." },
-    "bufferMemorySize": { "kind": "parameter", "displayName": "Buffer Memory Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "33554432", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The total bytes of memory the producer can use to buffer records waiting to be sent to the server [...]
-    "compressionCodec": { "kind": "parameter", "displayName": "Compression Codec", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "none", "gzip", "snappy", "lz4" ], "deprecated": false, "secret": false, "defaultValue": "none", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This parameter allows you to specify the compression codec  [...]
-    "connectionMaxIdleMs": { "kind": "parameter", "displayName": "Connection Max Idle Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "540000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Close idle connections after the number of milliseconds specified by this config." },
-    "enableIdempotence": { "kind": "parameter", "displayName": "Enable Idempotence", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'f [...]
-    "kafkaHeaderSerializer": { "kind": "parameter", "displayName": "Kafka Header Serializer", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderSerializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderSerializer to serialize kafka headers values" },
-    "key": { "kind": "parameter", "displayName": "Key", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY" },
-    "keySerializer": { "kind": "parameter", "displayName": "Key Serializer", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for keys (defaults to the same as for m [...]
-    "lazyStartProducer": { "kind": "parameter", "displayName": "Lazy Start Producer", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the  [...]
-    "lingerMs": { "kind": "parameter", "displayName": "Linger Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "0", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The producer groups together any records that arrive in between request transmissions into a single batched request. Norm [...]
-    "maxBlockMs": { "kind": "parameter", "displayName": "Max Block Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "60000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The configuration controls how long sending to kafka will block. These methods can be blocked for multiple reaso [...]
-    "maxInFlightRequest": { "kind": "parameter", "displayName": "Max In Flight Request", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "5", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum number of unacknowledged requests the client will send on a single connection before bl [...]
-    "maxRequestSize": { "kind": "parameter", "displayName": "Max Request Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1048576", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum size of a request. This is also effectively a cap on the maximum record size. Note that th [...]
-    "metadataMaxAgeMs": { "kind": "parameter", "displayName": "Metadata Max Age Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "300000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The period of time in milliseconds after which we force a refresh of metadata even if we haven't s [...]
-    "metricReporters": { "kind": "parameter", "displayName": "Metric Reporters", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A list of classes to use as metrics reporters. Implementing the MetricReporter interface allows plugging in classes that will be n [...]
-    "metricsSampleWindowMs": { "kind": "parameter", "displayName": "Metrics Sample Window Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "30000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of samples maintained to compute metrics." },
-    "noOfMetricsSample": { "kind": "parameter", "displayName": "No Of Metrics Sample", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "2", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of samples maintained to compute metrics." },
-    "partitioner": { "kind": "parameter", "displayName": "Partitioner", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.clients.producer.internals.DefaultPartitioner", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The partitioner class for partitioning messages amongst s [...]
-    "partitionKey": { "kind": "parameter", "displayName": "Partition Key", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The partition to which the record will be sent (or null if no partition was specified). If this option has been configured then it tak [...]
-    "producerBatchSize": { "kind": "parameter", "displayName": "Producer Batch Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "16384", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The producer will attempt to batch records together into fewer requests whenever multiple records  [...]
-    "queueBufferingMaxMessages": { "kind": "parameter", "displayName": "Queue Buffering Max Messages", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "10000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum number of unsent messages that can be queued up the producer when usi [...]
-    "receiveBufferBytes": { "kind": "parameter", "displayName": "Receive Buffer Bytes", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "65536", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The size of the TCP receive buffer (SO_RCVBUF) to use when reading data." },
-    "reconnectBackoffMs": { "kind": "parameter", "displayName": "Reconnect Backoff Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "50", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The amount of time to wait before attempting to reconnect to a given host. This avoids repeatedly c [...]
-    "recordMetadata": { "kind": "parameter", "displayName": "Record Metadata", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Whether the producer should store the RecordMetadata results from sending to Kafka. The results are stored in a List c [...]
-    "requestRequiredAcks": { "kind": "parameter", "displayName": "Request Required Acks", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "-1", "0", "1", "all" ], "deprecated": false, "secret": false, "defaultValue": "1", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The number of acknowledgments the producer requires the leader to [...]
-    "requestTimeoutMs": { "kind": "parameter", "displayName": "Request Timeout Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "30000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The amount of time the broker will wait trying to meet the request.required.acks requirement before  [...]
-    "retries": { "kind": "parameter", "displayName": "Retries", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "0", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient e [...]
-    "retryBackoffMs": { "kind": "parameter", "displayName": "Retry Backoff Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "100", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Before each retry, the producer refreshes the metadata of relevant topics to see if a new leader has been  [...]
-    "sendBufferBytes": { "kind": "parameter", "displayName": "Send Buffer Bytes", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "131072", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Socket write buffer size" },
-    "serializerClass": { "kind": "parameter", "displayName": "Serializer Class", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for messages." },
-    "workerPool": { "kind": "parameter", "displayName": "Worker Pool", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "java.util.concurrent.ExecutorService", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent t [...]
-    "workerPoolCoreSize": { "kind": "parameter", "displayName": "Worker Pool Core Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "10", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Number of core threads for the worker pool for continue routing Exchange after kafka server has ac [...]
-    "workerPoolMaxSize": { "kind": "parameter", "displayName": "Worker Pool Max Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "20", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Maximum number of threads for the worker pool for continue routing Exchange after kafka server has a [...]
-    "basicPropertyBinding": { "kind": "parameter", "displayName": "Basic Property Binding", "group": "advanced", "label": "advanced", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Whether the endpoint should use basic property binding (Camel 2.x) or the newer property binding with additional capabilities" },
-    "synchronous": { "kind": "parameter", "displayName": "Synchronous", "group": "advanced", "label": "advanced", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Sets whether synchronous processing should be strictly used, or Camel is allowed to use asynchronous processing (if supported)." },
-    "schemaRegistryURL": { "kind": "parameter", "displayName": "Schema Registry URL", "group": "confluent", "label": "confluent", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "URL of the Confluent Platform schema registry servers to use. The format is host1:port1,host2:port2. This is known as schem [...]
-    "interceptorClasses": { "kind": "parameter", "displayName": "Interceptor Classes", "group": "monitoring", "label": "common,monitoring", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Sets interceptors for producer or consumers. Producer interceptors have to be classes implementing org.apache.kaf [...]
-    "kerberosBeforeReloginMinTime": { "kind": "parameter", "displayName": "Kerberos Before Relogin Min Time", "group": "security", "label": "common,security", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "60000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Login thread sleep time between refresh attempts." },
-    "kerberosInitCmd": { "kind": "parameter", "displayName": "Kerberos Init Cmd", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "\/usr\/bin\/kinit", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Kerberos kinit command path. Default is \/usr\/bin\/kinit" },
-    "kerberosPrincipalToLocalRules": { "kind": "parameter", "displayName": "Kerberos Principal To Local Rules", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "DEFAULT", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A list of rules for mapping from principal names to short names ( [...]
-    "kerberosRenewJitter": { "kind": "parameter", "displayName": "Kerberos Renew Jitter", "group": "security", "label": "common,security", "required": false, "type": "number", "javaType": "java.lang.Double", "deprecated": false, "secret": false, "defaultValue": "0.05", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Percentage of random jitter added to the renewal time." },
-    "kerberosRenewWindowFactor": { "kind": "parameter", "displayName": "Kerberos Renew Window Factor", "group": "security", "label": "common,security", "required": false, "type": "number", "javaType": "java.lang.Double", "deprecated": false, "secret": false, "defaultValue": "0.8", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Login thread will sleep until the specified window factor of time from last re [...]
-    "saslJaasConfig": { "kind": "parameter", "displayName": "Sasl Jaas Config", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Expose the kafka sasl.jaas.config parameter Example: org.apache.kafka.common.security.plain.PlainLoginModule required usernam [...]
-    "saslKerberosServiceName": { "kind": "parameter", "displayName": "Sasl Kerberos Service Name", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The Kerberos principal name that Kafka runs as. This can be defined either in Kafka's JAAS config or in K [...]
-    "saslMechanism": { "kind": "parameter", "displayName": "Sasl Mechanism", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "GSSAPI", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The Simple Authentication and Security Layer (SASL) Mechanism used. For the valid values see http:\/\ [...]
-    "securityProtocol": { "kind": "parameter", "displayName": "Security Protocol", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "PLAINTEXT", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Protocol used to communicate with brokers. SASL_PLAINTEXT, PLAINTEXT and SSL are supported" },
-    "sslCipherSuites": { "kind": "parameter", "displayName": "Ssl Cipher Suites", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A list of cipher suites. This is a named combination of authentication, encryption, MAC and key exchange algorithm used to [...]
-    "sslContextParameters": { "kind": "parameter", "displayName": "Ssl Context Parameters", "group": "security", "label": "common,security", "required": false, "type": "object", "javaType": "org.apache.camel.support.jsse.SSLContextParameters", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "SSL configuration using a Camel SSLContextParameters object. If configured it' [...]
-    "sslEnabledProtocols": { "kind": "parameter", "displayName": "Ssl Enabled Protocols", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The list of protocols enabled for SSL connections. TLSv1.2, TLSv1.1 and TLSv1 are enabled by default." },
-    "sslEndpointAlgorithm": { "kind": "parameter", "displayName": "Ssl Endpoint Algorithm", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "https", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The endpoint identification algorithm to validate server hostname using server certific [...]
-    "sslKeymanagerAlgorithm": { "kind": "parameter", "displayName": "Ssl Keymanager Algorithm", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "SunX509", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The algorithm used by key manager factory for SSL connections. Default value is t [...]
-    "sslKeyPassword": { "kind": "parameter", "displayName": "Ssl Key Password", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The password of the private key in the key store file. This is optional for client." },
-    "sslKeystoreLocation": { "kind": "parameter", "displayName": "Ssl Keystore Location", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The location of the key store file. This is optional for client and can be used for two-way authentication for c [...]
-    "sslKeystorePassword": { "kind": "parameter", "displayName": "Ssl Keystore Password", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The store password for the key store file.This is optional for client and only needed if ssl.keystore.location is [...]
-    "sslKeystoreType": { "kind": "parameter", "displayName": "Ssl Keystore Type", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "JKS", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The file format of the key store file. This is optional for client. Default value is JKS" },
-    "sslProtocol": { "kind": "parameter", "displayName": "Ssl Protocol", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The SSL protocol used to generate the SSLContext. Default setting is TLS, which is fine for most cases. Allowed values in recent JV [...]
-    "sslProvider": { "kind": "parameter", "displayName": "Ssl Provider", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The name of the security provider used for SSL connections. Default value is the default security provider of the JVM." },
-    "sslTrustmanagerAlgorithm": { "kind": "parameter", "displayName": "Ssl Trustmanager Algorithm", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "PKIX", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The algorithm used by trust manager factory for SSL connections. Default value i [...]
-    "sslTruststoreLocation": { "kind": "parameter", "displayName": "Ssl Truststore Location", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The location of the trust store file." },
-    "sslTruststorePassword": { "kind": "parameter", "displayName": "Ssl Truststore Password", "group": "security", "label": "producer,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": true, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The password for the trust store file." },
-    "sslTruststoreType": { "kind": "parameter", "displayName": "Ssl Truststore Type", "group": "security", "label": "common,security", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "JKS", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The file format of the trust store file. Default value is JKS." }
-  }
-}
diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java
index 95e673c..2f5b664 100644
--- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java
+++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerFullTest.java
@@ -83,7 +83,7 @@ public class KafkaProducerFullTest extends BaseEmbeddedKafkaTest {
     private MockEndpoint mockEndpoint;
 
     @EndpointInject("kafka:" + TOPIC_BYTES + "?requestRequiredAcks=-1"
-                    + "&serializerClass=org.apache.kafka.common.serialization.ByteArraySerializer&"
+                    + "&valueSerializer=org.apache.kafka.common.serialization.ByteArraySerializer&"
                     + "keySerializer=org.apache.kafka.common.serialization.ByteArraySerializer")
     private Endpoint toBytes;
 


[camel] 07/13: CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderSerializer

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit dab36bdfe783fe6f2e385f7eb8f2cff43bb697a1
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Thu Oct 29 19:21:45 2020 +0100

    CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderSerializer
---
 .../org/apache/camel/component/kafka/KafkaConfiguration.java   | 10 +++++-----
 .../java/org/apache/camel/component/kafka/KafkaProducer.java   |  2 +-
 2 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
index 922420b..266012c 100644
--- a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
+++ b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
@@ -226,7 +226,7 @@ public class KafkaConfiguration implements Cloneable, HeaderFilterStrategyAware
     @UriParam(label = "producer", defaultValue = "false")
     private boolean enableIdempotence;
     @UriParam(label = "producer", description = "To use a custom KafkaHeaderSerializer to serialize kafka headers values")
-    private KafkaHeaderSerializer kafkaHeaderSerializer = new DefaultKafkaHeaderSerializer();
+    private KafkaHeaderSerializer headerSerializer = new DefaultKafkaHeaderSerializer();
 
     // reconnect.backoff.max.ms
     @UriParam(label = "common", defaultValue = "1000")
@@ -1687,8 +1687,8 @@ public class KafkaConfiguration implements Cloneable, HeaderFilterStrategyAware
         this.kafkaHeaderDeserializer = kafkaHeaderDeserializer;
     }
 
-    public KafkaHeaderSerializer getKafkaHeaderSerializer() {
-        return kafkaHeaderSerializer;
+    public KafkaHeaderSerializer getHeaderSerializer() {
+        return headerSerializer;
     }
 
     /**
@@ -1696,8 +1696,8 @@ public class KafkaConfiguration implements Cloneable, HeaderFilterStrategyAware
      *
      * @param kafkaHeaderSerializer custom kafka header serializer to be used
      */
-    public void setKafkaHeaderSerializer(final KafkaHeaderSerializer kafkaHeaderSerializer) {
-        this.kafkaHeaderSerializer = kafkaHeaderSerializer;
+    public void setKafkaHeaderSerializer(final KafkaHeaderSerializer headerSerializer) {
+        this.headerSerializer = headerSerializer;
     }
 
     /**
diff --git a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaProducer.java b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaProducer.java
index 0466514..153e225 100644
--- a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaProducer.java
+++ b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaProducer.java
@@ -285,7 +285,7 @@ public class KafkaProducer extends DefaultAsyncProducer {
 
     private List<Header> getPropagatedHeaders(Exchange exchange, KafkaConfiguration getConfiguration) {
         HeaderFilterStrategy headerFilterStrategy = getConfiguration.getHeaderFilterStrategy();
-        KafkaHeaderSerializer headerSerializer = getConfiguration.getKafkaHeaderSerializer();
+        KafkaHeaderSerializer headerSerializer = getConfiguration.getHeaderSerializer();
         return exchange.getIn().getHeaders().entrySet().stream()
                 .filter(entry -> shouldBeFiltered(entry, exchange, headerFilterStrategy))
                 .map(entry -> getRecordHeader(entry, headerSerializer)).filter(Objects::nonNull).collect(Collectors.toList());


[camel] 11/13: CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderDeserializer tests

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 9f124e7149e35ea7231ea7a7dbe3b9cae0566b10
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Thu Oct 29 19:31:42 2020 +0100

    CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderDeserializer tests
---
 .../apache/camel/catalog/docs/kafka-component.adoc |  4 +-
 .../component/kafka/KafkaComponentConfigurer.java  | 10 ++--
 .../component/kafka/KafkaEndpointConfigurer.java   | 10 ++--
 .../component/kafka/KafkaEndpointUriFactory.java   |  2 +-
 .../org/apache/camel/component/kafka/kafka.json    |  4 +-
 .../camel-kafka/src/main/docs/kafka-component.adoc |  4 +-
 .../component/kafka/KafkaConsumerFullTest.java     |  2 +-
 .../dsl/KafkaComponentBuilderFactory.java          | 30 ++++++------
 .../endpoint/dsl/KafkaEndpointBuilderFactory.java  | 56 +++++++++++-----------
 .../modules/ROOT/pages/kafka-component.adoc        |  4 +-
 10 files changed, 63 insertions(+), 63 deletions(-)

diff --git a/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc b/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc
index 26f7fef..0e1408b 100644
--- a/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc
+++ b/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc
@@ -70,8 +70,8 @@ The Kafka component supports 97 options, which are listed below.
 | *fetchMinBytes* (consumer) | The minimum amount of data the server should return for a fetch request. If insufficient data is available the request will wait for that much data to accumulate before answering the request. | 1 | Integer
 | *fetchWaitMaxMs* (consumer) | The maximum amount of time the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy fetch.min.bytes | 500 | Integer
 | *groupId* (consumer) | A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes indicate that they are all part of the same consumer group. This option is required for consumers. |  | String
+| *headerDeserializer* (consumer) | To use a custom KafkaHeaderDeserializer to deserialize kafka headers values |  | KafkaHeaderDeserializer
 | *heartbeatIntervalMs* (consumer) | The expected time between heartbeats to the consumer coordinator when using Kafka's group management facilities. Heartbeats are used to ensure that the consumer's session stays active and to facilitate rebalancing when new consumers join or leave the group. The value must be set lower than session.timeout.ms, but typically should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances.  [...]
-| *kafkaHeaderDeserializer* (consumer) | To use a custom KafkaHeaderDeserializer to deserialize kafka headers values |  | KafkaHeaderDeserializer
 | *keyDeserializer* (consumer) | Deserializer class for key that implements the Deserializer interface. | org.apache.kafka.common.serialization.StringDeserializer | String
 | *maxPartitionFetchBytes* (consumer) | The maximum amount of data per-partition the server will return. The maximum total memory used for a request will be #partitions max.partition.fetch.bytes. This size must be at least as large as the maximum message size the server allows or else it is possible for the producer to send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. | 1048576 | Integer
 | *maxPollIntervalMs* (consumer) | The maximum delay between invocations of poll() when using consumer group management. This places an upper bound on the amount of time that the consumer can be idle before fetching more records. If poll() is not called before expiration of this timeout, then the consumer is considered failed and the group will rebalance in order to reassign the partitions to another member. |  | Long
@@ -196,8 +196,8 @@ with the following path and query parameters:
 | *fetchMinBytes* (consumer) | The minimum amount of data the server should return for a fetch request. If insufficient data is available the request will wait for that much data to accumulate before answering the request. | 1 | Integer
 | *fetchWaitMaxMs* (consumer) | The maximum amount of time the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy fetch.min.bytes | 500 | Integer
 | *groupId* (consumer) | A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes indicate that they are all part of the same consumer group. This option is required for consumers. |  | String
+| *headerDeserializer* (consumer) | To use a custom KafkaHeaderDeserializer to deserialize kafka headers values |  | KafkaHeaderDeserializer
 | *heartbeatIntervalMs* (consumer) | The expected time between heartbeats to the consumer coordinator when using Kafka's group management facilities. Heartbeats are used to ensure that the consumer's session stays active and to facilitate rebalancing when new consumers join or leave the group. The value must be set lower than session.timeout.ms, but typically should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances.  [...]
-| *kafkaHeaderDeserializer* (consumer) | To use a custom KafkaHeaderDeserializer to deserialize kafka headers values |  | KafkaHeaderDeserializer
 | *keyDeserializer* (consumer) | Deserializer class for key that implements the Deserializer interface. | org.apache.kafka.common.serialization.StringDeserializer | String
 | *maxPartitionFetchBytes* (consumer) | The maximum amount of data per-partition the server will return. The maximum total memory used for a request will be #partitions max.partition.fetch.bytes. This size must be at least as large as the maximum message size the server allows or else it is possible for the producer to send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. | 1048576 | Integer
 | *maxPollIntervalMs* (consumer) | The maximum delay between invocations of poll() when using consumer group management. This places an upper bound on the amount of time that the consumer can be idle before fetching more records. If poll() is not called before expiration of this timeout, then the consumer is considered failed and the group will rebalance in order to reassign the partitions to another member. |  | Long
diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java
index c5b3a26..d154931 100644
--- a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java
+++ b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java
@@ -40,8 +40,8 @@ public class KafkaComponentConfigurer extends PropertyConfigurerSupport implemen
         map.put("fetchMinBytes", java.lang.Integer.class);
         map.put("fetchWaitMaxMs", java.lang.Integer.class);
         map.put("groupId", java.lang.String.class);
+        map.put("headerDeserializer", org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class);
         map.put("heartbeatIntervalMs", java.lang.Integer.class);
-        map.put("kafkaHeaderDeserializer", org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class);
         map.put("keyDeserializer", java.lang.String.class);
         map.put("maxPartitionFetchBytes", java.lang.Integer.class);
         map.put("maxPollIntervalMs", java.lang.Long.class);
@@ -175,6 +175,8 @@ public class KafkaComponentConfigurer extends PropertyConfigurerSupport implemen
         case "fetchWaitMaxMs": getOrCreateConfiguration(target).setFetchWaitMaxMs(property(camelContext, java.lang.Integer.class, value)); return true;
         case "groupid":
         case "groupId": getOrCreateConfiguration(target).setGroupId(property(camelContext, java.lang.String.class, value)); return true;
+        case "headerdeserializer":
+        case "headerDeserializer": getOrCreateConfiguration(target).setHeaderDeserializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class, value)); return true;
         case "headerfilterstrategy":
         case "headerFilterStrategy": getOrCreateConfiguration(target).setHeaderFilterStrategy(property(camelContext, org.apache.camel.spi.HeaderFilterStrategy.class, value)); return true;
         case "headerserializer":
@@ -183,8 +185,6 @@ public class KafkaComponentConfigurer extends PropertyConfigurerSupport implemen
         case "heartbeatIntervalMs": getOrCreateConfiguration(target).setHeartbeatIntervalMs(property(camelContext, java.lang.Integer.class, value)); return true;
         case "interceptorclasses":
         case "interceptorClasses": getOrCreateConfiguration(target).setInterceptorClasses(property(camelContext, java.lang.String.class, value)); return true;
-        case "kafkaheaderdeserializer":
-        case "kafkaHeaderDeserializer": getOrCreateConfiguration(target).setKafkaHeaderDeserializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class, value)); return true;
         case "kafkamanualcommitfactory":
         case "kafkaManualCommitFactory": target.setKafkaManualCommitFactory(property(camelContext, org.apache.camel.component.kafka.KafkaManualCommitFactory.class, value)); return true;
         case "kerberosbeforereloginmintime":
@@ -377,6 +377,8 @@ public class KafkaComponentConfigurer extends PropertyConfigurerSupport implemen
         case "fetchWaitMaxMs": return getOrCreateConfiguration(target).getFetchWaitMaxMs();
         case "groupid":
         case "groupId": return getOrCreateConfiguration(target).getGroupId();
+        case "headerdeserializer":
+        case "headerDeserializer": return getOrCreateConfiguration(target).getHeaderDeserializer();
         case "headerfilterstrategy":
         case "headerFilterStrategy": return getOrCreateConfiguration(target).getHeaderFilterStrategy();
         case "headerserializer":
@@ -385,8 +387,6 @@ public class KafkaComponentConfigurer extends PropertyConfigurerSupport implemen
         case "heartbeatIntervalMs": return getOrCreateConfiguration(target).getHeartbeatIntervalMs();
         case "interceptorclasses":
         case "interceptorClasses": return getOrCreateConfiguration(target).getInterceptorClasses();
-        case "kafkaheaderdeserializer":
-        case "kafkaHeaderDeserializer": return getOrCreateConfiguration(target).getKafkaHeaderDeserializer();
         case "kafkamanualcommitfactory":
         case "kafkaManualCommitFactory": return target.getKafkaManualCommitFactory();
         case "kerberosbeforereloginmintime":
diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java
index 733a607..e85859e 100644
--- a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java
+++ b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java
@@ -40,8 +40,8 @@ public class KafkaEndpointConfigurer extends PropertyConfigurerSupport implement
         map.put("fetchMinBytes", java.lang.Integer.class);
         map.put("fetchWaitMaxMs", java.lang.Integer.class);
         map.put("groupId", java.lang.String.class);
+        map.put("headerDeserializer", org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class);
         map.put("heartbeatIntervalMs", java.lang.Integer.class);
-        map.put("kafkaHeaderDeserializer", org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class);
         map.put("keyDeserializer", java.lang.String.class);
         map.put("maxPartitionFetchBytes", java.lang.Integer.class);
         map.put("maxPollIntervalMs", java.lang.Long.class);
@@ -172,6 +172,8 @@ public class KafkaEndpointConfigurer extends PropertyConfigurerSupport implement
         case "fetchWaitMaxMs": target.getConfiguration().setFetchWaitMaxMs(property(camelContext, java.lang.Integer.class, value)); return true;
         case "groupid":
         case "groupId": target.getConfiguration().setGroupId(property(camelContext, java.lang.String.class, value)); return true;
+        case "headerdeserializer":
+        case "headerDeserializer": target.getConfiguration().setHeaderDeserializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class, value)); return true;
         case "headerfilterstrategy":
         case "headerFilterStrategy": target.getConfiguration().setHeaderFilterStrategy(property(camelContext, org.apache.camel.spi.HeaderFilterStrategy.class, value)); return true;
         case "headerserializer":
@@ -180,8 +182,6 @@ public class KafkaEndpointConfigurer extends PropertyConfigurerSupport implement
         case "heartbeatIntervalMs": target.getConfiguration().setHeartbeatIntervalMs(property(camelContext, java.lang.Integer.class, value)); return true;
         case "interceptorclasses":
         case "interceptorClasses": target.getConfiguration().setInterceptorClasses(property(camelContext, java.lang.String.class, value)); return true;
-        case "kafkaheaderdeserializer":
-        case "kafkaHeaderDeserializer": target.getConfiguration().setKafkaHeaderDeserializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class, value)); return true;
         case "kerberosbeforereloginmintime":
         case "kerberosBeforeReloginMinTime": target.getConfiguration().setKerberosBeforeReloginMinTime(property(camelContext, java.lang.Integer.class, value)); return true;
         case "kerberosinitcmd":
@@ -374,6 +374,8 @@ public class KafkaEndpointConfigurer extends PropertyConfigurerSupport implement
         case "fetchWaitMaxMs": return target.getConfiguration().getFetchWaitMaxMs();
         case "groupid":
         case "groupId": return target.getConfiguration().getGroupId();
+        case "headerdeserializer":
+        case "headerDeserializer": return target.getConfiguration().getHeaderDeserializer();
         case "headerfilterstrategy":
         case "headerFilterStrategy": return target.getConfiguration().getHeaderFilterStrategy();
         case "headerserializer":
@@ -382,8 +384,6 @@ public class KafkaEndpointConfigurer extends PropertyConfigurerSupport implement
         case "heartbeatIntervalMs": return target.getConfiguration().getHeartbeatIntervalMs();
         case "interceptorclasses":
         case "interceptorClasses": return target.getConfiguration().getInterceptorClasses();
-        case "kafkaheaderdeserializer":
-        case "kafkaHeaderDeserializer": return target.getConfiguration().getKafkaHeaderDeserializer();
         case "kerberosbeforereloginmintime":
         case "kerberosBeforeReloginMinTime": return target.getConfiguration().getKerberosBeforeReloginMinTime();
         case "kerberosinitcmd":
diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java
index e815bdc..92cbe3e 100644
--- a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java
+++ b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java
@@ -109,13 +109,13 @@ public class KafkaEndpointUriFactory extends org.apache.camel.support.component.
         props.add("sslEnabledProtocols");
         props.add("sslKeystoreLocation");
         props.add("schemaRegistryURL");
+        props.add("headerDeserializer");
         props.add("maxInFlightRequest");
         props.add("exchangePattern");
         props.add("valueSerializer");
         props.add("autoOffsetReset");
         props.add("seekTo");
         props.add("requestTimeoutMs");
-        props.add("kafkaHeaderDeserializer");
         props.add("fetchMaxBytes");
         props.add("checkCrcs");
         props.add("partitionAssignor");
diff --git a/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json b/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json
index 319e694..2cbc60d 100644
--- a/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json
+++ b/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json
@@ -44,8 +44,8 @@
     "fetchMinBytes": { "kind": "property", "displayName": "Fetch Min Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The minimum amount of data the server should return for a fetch request. If insufficient data is available the  [...]
     "fetchWaitMaxMs": { "kind": "property", "displayName": "Fetch Wait Max Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "500", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of time the server will block before answering the fetch request if there isn't suffici [...]
     "groupId": { "kind": "property", "displayName": "Group Id", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes i [...]
+    "headerDeserializer": { "kind": "property", "displayName": "Header Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderDeserializer to deserialize kafka headers values" },
     "heartbeatIntervalMs": { "kind": "property", "displayName": "Heartbeat Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "3000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The expected time between heartbeats to the consumer coordinator when using Kafka's group manage [...]
-    "kafkaHeaderDeserializer": { "kind": "property", "displayName": "Kafka Header Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderDeserializer to deserialize kafka headers [...]
     "keyDeserializer": { "kind": "property", "displayName": "Key Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringDeserializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Deserializer class for key that implements the Deserial [...]
     "maxPartitionFetchBytes": { "kind": "property", "displayName": "Max Partition Fetch Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1048576", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of data per-partition the server will return. The maximum total mem [...]
     "maxPollIntervalMs": { "kind": "property", "displayName": "Max Poll Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "duration", "javaType": "java.lang.Long", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum delay between invocations of poll() when using consumer group management. This places an upper bound on the amoun [...]
@@ -143,8 +143,8 @@
     "fetchMinBytes": { "kind": "parameter", "displayName": "Fetch Min Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The minimum amount of data the server should return for a fetch request. If insufficient data is available the [...]
     "fetchWaitMaxMs": { "kind": "parameter", "displayName": "Fetch Wait Max Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "500", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of time the server will block before answering the fetch request if there isn't suffic [...]
     "groupId": { "kind": "parameter", "displayName": "Group Id", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes  [...]
+    "headerDeserializer": { "kind": "parameter", "displayName": "Header Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderDeserializer to deserialize kafka headers values" },
     "heartbeatIntervalMs": { "kind": "parameter", "displayName": "Heartbeat Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "3000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The expected time between heartbeats to the consumer coordinator when using Kafka's group manag [...]
-    "kafkaHeaderDeserializer": { "kind": "parameter", "displayName": "Kafka Header Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderDeserializer to deserialize kafka header [...]
     "keyDeserializer": { "kind": "parameter", "displayName": "Key Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringDeserializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Deserializer class for key that implements the Deseria [...]
     "maxPartitionFetchBytes": { "kind": "parameter", "displayName": "Max Partition Fetch Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1048576", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of data per-partition the server will return. The maximum total me [...]
     "maxPollIntervalMs": { "kind": "parameter", "displayName": "Max Poll Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "duration", "javaType": "java.lang.Long", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum delay between invocations of poll() when using consumer group management. This places an upper bound on the amou [...]
diff --git a/components/camel-kafka/src/main/docs/kafka-component.adoc b/components/camel-kafka/src/main/docs/kafka-component.adoc
index 26f7fef..0e1408b 100644
--- a/components/camel-kafka/src/main/docs/kafka-component.adoc
+++ b/components/camel-kafka/src/main/docs/kafka-component.adoc
@@ -70,8 +70,8 @@ The Kafka component supports 97 options, which are listed below.
 | *fetchMinBytes* (consumer) | The minimum amount of data the server should return for a fetch request. If insufficient data is available the request will wait for that much data to accumulate before answering the request. | 1 | Integer
 | *fetchWaitMaxMs* (consumer) | The maximum amount of time the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy fetch.min.bytes | 500 | Integer
 | *groupId* (consumer) | A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes indicate that they are all part of the same consumer group. This option is required for consumers. |  | String
+| *headerDeserializer* (consumer) | To use a custom KafkaHeaderDeserializer to deserialize kafka headers values |  | KafkaHeaderDeserializer
 | *heartbeatIntervalMs* (consumer) | The expected time between heartbeats to the consumer coordinator when using Kafka's group management facilities. Heartbeats are used to ensure that the consumer's session stays active and to facilitate rebalancing when new consumers join or leave the group. The value must be set lower than session.timeout.ms, but typically should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances.  [...]
-| *kafkaHeaderDeserializer* (consumer) | To use a custom KafkaHeaderDeserializer to deserialize kafka headers values |  | KafkaHeaderDeserializer
 | *keyDeserializer* (consumer) | Deserializer class for key that implements the Deserializer interface. | org.apache.kafka.common.serialization.StringDeserializer | String
 | *maxPartitionFetchBytes* (consumer) | The maximum amount of data per-partition the server will return. The maximum total memory used for a request will be #partitions max.partition.fetch.bytes. This size must be at least as large as the maximum message size the server allows or else it is possible for the producer to send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. | 1048576 | Integer
 | *maxPollIntervalMs* (consumer) | The maximum delay between invocations of poll() when using consumer group management. This places an upper bound on the amount of time that the consumer can be idle before fetching more records. If poll() is not called before expiration of this timeout, then the consumer is considered failed and the group will rebalance in order to reassign the partitions to another member. |  | Long
@@ -196,8 +196,8 @@ with the following path and query parameters:
 | *fetchMinBytes* (consumer) | The minimum amount of data the server should return for a fetch request. If insufficient data is available the request will wait for that much data to accumulate before answering the request. | 1 | Integer
 | *fetchWaitMaxMs* (consumer) | The maximum amount of time the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy fetch.min.bytes | 500 | Integer
 | *groupId* (consumer) | A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes indicate that they are all part of the same consumer group. This option is required for consumers. |  | String
+| *headerDeserializer* (consumer) | To use a custom KafkaHeaderDeserializer to deserialize kafka headers values |  | KafkaHeaderDeserializer
 | *heartbeatIntervalMs* (consumer) | The expected time between heartbeats to the consumer coordinator when using Kafka's group management facilities. Heartbeats are used to ensure that the consumer's session stays active and to facilitate rebalancing when new consumers join or leave the group. The value must be set lower than session.timeout.ms, but typically should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances.  [...]
-| *kafkaHeaderDeserializer* (consumer) | To use a custom KafkaHeaderDeserializer to deserialize kafka headers values |  | KafkaHeaderDeserializer
 | *keyDeserializer* (consumer) | Deserializer class for key that implements the Deserializer interface. | org.apache.kafka.common.serialization.StringDeserializer | String
 | *maxPartitionFetchBytes* (consumer) | The maximum amount of data per-partition the server will return. The maximum total memory used for a request will be #partitions max.partition.fetch.bytes. This size must be at least as large as the maximum message size the server allows or else it is possible for the producer to send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. | 1048576 | Integer
 | *maxPollIntervalMs* (consumer) | The maximum delay between invocations of poll() when using consumer group management. This places an upper bound on the amount of time that the consumer can be idle before fetching more records. If poll() is not called before expiration of this timeout, then the consumer is considered failed and the group will rebalance in order to reassign the partitions to another member. |  | Long
diff --git a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerFullTest.java b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerFullTest.java
index 850cefa..8e1cad7 100644
--- a/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerFullTest.java
+++ b/components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaConsumerFullTest.java
@@ -195,7 +195,7 @@ public class KafkaConsumerFullTest extends BaseEmbeddedKafkaTest {
     public void headerDeserializerCouldBeOverridden() {
         KafkaEndpoint kafkaEndpoint
                 = context.getEndpoint("kafka:random_topic?kafkaHeaderDeserializer=#myHeaderDeserializer", KafkaEndpoint.class);
-        assertIsInstanceOf(MyKafkaHeaderDeserializer.class, kafkaEndpoint.getConfiguration().getKafkaHeaderDeserializer());
+        assertIsInstanceOf(MyKafkaHeaderDeserializer.class, kafkaEndpoint.getConfiguration().getHeaderDeserializer());
     }
 
     private static class MyKafkaHeaderDeserializer extends DefaultKafkaHeaderDeserializer {
diff --git a/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java b/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java
index ee66c62..6a526bf 100644
--- a/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java
+++ b/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java
@@ -388,6 +388,20 @@ public interface KafkaComponentBuilderFactory {
             return this;
         }
         /**
+         * To use a custom KafkaHeaderDeserializer to deserialize kafka headers
+         * values.
+         * 
+         * The option is a:
+         * <code>org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer</code> type.
+         * 
+         * Group: consumer
+         */
+        default KafkaComponentBuilder headerDeserializer(
+                org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer headerDeserializer) {
+            doSetProperty("headerDeserializer", headerDeserializer);
+            return this;
+        }
+        /**
          * The expected time between heartbeats to the consumer coordinator when
          * using Kafka's group management facilities. Heartbeats are used to
          * ensure that the consumer's session stays active and to facilitate
@@ -407,20 +421,6 @@ public interface KafkaComponentBuilderFactory {
             return this;
         }
         /**
-         * To use a custom KafkaHeaderDeserializer to deserialize kafka headers
-         * values.
-         * 
-         * The option is a:
-         * <code>org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer</code> type.
-         * 
-         * Group: consumer
-         */
-        default KafkaComponentBuilder kafkaHeaderDeserializer(
-                org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer kafkaHeaderDeserializer) {
-            doSetProperty("kafkaHeaderDeserializer", kafkaHeaderDeserializer);
-            return this;
-        }
-        /**
          * Deserializer class for key that implements the Deserializer
          * interface.
          * 
@@ -1587,8 +1587,8 @@ public interface KafkaComponentBuilderFactory {
             case "fetchMinBytes": getOrCreateConfiguration((KafkaComponent) component).setFetchMinBytes((java.lang.Integer) value); return true;
             case "fetchWaitMaxMs": getOrCreateConfiguration((KafkaComponent) component).setFetchWaitMaxMs((java.lang.Integer) value); return true;
             case "groupId": getOrCreateConfiguration((KafkaComponent) component).setGroupId((java.lang.String) value); return true;
+            case "headerDeserializer": getOrCreateConfiguration((KafkaComponent) component).setHeaderDeserializer((org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer) value); return true;
             case "heartbeatIntervalMs": getOrCreateConfiguration((KafkaComponent) component).setHeartbeatIntervalMs((java.lang.Integer) value); return true;
-            case "kafkaHeaderDeserializer": getOrCreateConfiguration((KafkaComponent) component).setKafkaHeaderDeserializer((org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer) value); return true;
             case "keyDeserializer": getOrCreateConfiguration((KafkaComponent) component).setKeyDeserializer((java.lang.String) value); return true;
             case "maxPartitionFetchBytes": getOrCreateConfiguration((KafkaComponent) component).setMaxPartitionFetchBytes((java.lang.Integer) value); return true;
             case "maxPollIntervalMs": getOrCreateConfiguration((KafkaComponent) component).setMaxPollIntervalMs((java.lang.Long) value); return true;
diff --git a/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java b/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java
index 823068b..9921b04 100644
--- a/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java
+++ b/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java
@@ -638,6 +638,34 @@ public interface KafkaEndpointBuilderFactory {
             return this;
         }
         /**
+         * To use a custom KafkaHeaderDeserializer to deserialize kafka headers
+         * values.
+         * 
+         * The option is a:
+         * <code>org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer</code> type.
+         * 
+         * Group: consumer
+         */
+        default KafkaEndpointConsumerBuilder headerDeserializer(
+                Object headerDeserializer) {
+            doSetProperty("headerDeserializer", headerDeserializer);
+            return this;
+        }
+        /**
+         * To use a custom KafkaHeaderDeserializer to deserialize kafka headers
+         * values.
+         * 
+         * The option will be converted to a
+         * <code>org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer</code> type.
+         * 
+         * Group: consumer
+         */
+        default KafkaEndpointConsumerBuilder headerDeserializer(
+                String headerDeserializer) {
+            doSetProperty("headerDeserializer", headerDeserializer);
+            return this;
+        }
+        /**
          * The expected time between heartbeats to the consumer coordinator when
          * using Kafka's group management facilities. Heartbeats are used to
          * ensure that the consumer's session stays active and to facilitate
@@ -677,34 +705,6 @@ public interface KafkaEndpointBuilderFactory {
             return this;
         }
         /**
-         * To use a custom KafkaHeaderDeserializer to deserialize kafka headers
-         * values.
-         * 
-         * The option is a:
-         * <code>org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer</code> type.
-         * 
-         * Group: consumer
-         */
-        default KafkaEndpointConsumerBuilder kafkaHeaderDeserializer(
-                Object kafkaHeaderDeserializer) {
-            doSetProperty("kafkaHeaderDeserializer", kafkaHeaderDeserializer);
-            return this;
-        }
-        /**
-         * To use a custom KafkaHeaderDeserializer to deserialize kafka headers
-         * values.
-         * 
-         * The option will be converted to a
-         * <code>org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer</code> type.
-         * 
-         * Group: consumer
-         */
-        default KafkaEndpointConsumerBuilder kafkaHeaderDeserializer(
-                String kafkaHeaderDeserializer) {
-            doSetProperty("kafkaHeaderDeserializer", kafkaHeaderDeserializer);
-            return this;
-        }
-        /**
          * Deserializer class for key that implements the Deserializer
          * interface.
          * 
diff --git a/docs/components/modules/ROOT/pages/kafka-component.adoc b/docs/components/modules/ROOT/pages/kafka-component.adoc
index 9280343..c39b2da 100644
--- a/docs/components/modules/ROOT/pages/kafka-component.adoc
+++ b/docs/components/modules/ROOT/pages/kafka-component.adoc
@@ -72,8 +72,8 @@ The Kafka component supports 97 options, which are listed below.
 | *fetchMinBytes* (consumer) | The minimum amount of data the server should return for a fetch request. If insufficient data is available the request will wait for that much data to accumulate before answering the request. | 1 | Integer
 | *fetchWaitMaxMs* (consumer) | The maximum amount of time the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy fetch.min.bytes | 500 | Integer
 | *groupId* (consumer) | A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes indicate that they are all part of the same consumer group. This option is required for consumers. |  | String
+| *headerDeserializer* (consumer) | To use a custom KafkaHeaderDeserializer to deserialize kafka headers values |  | KafkaHeaderDeserializer
 | *heartbeatIntervalMs* (consumer) | The expected time between heartbeats to the consumer coordinator when using Kafka's group management facilities. Heartbeats are used to ensure that the consumer's session stays active and to facilitate rebalancing when new consumers join or leave the group. The value must be set lower than session.timeout.ms, but typically should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances.  [...]
-| *kafkaHeaderDeserializer* (consumer) | To use a custom KafkaHeaderDeserializer to deserialize kafka headers values |  | KafkaHeaderDeserializer
 | *keyDeserializer* (consumer) | Deserializer class for key that implements the Deserializer interface. | org.apache.kafka.common.serialization.StringDeserializer | String
 | *maxPartitionFetchBytes* (consumer) | The maximum amount of data per-partition the server will return. The maximum total memory used for a request will be #partitions max.partition.fetch.bytes. This size must be at least as large as the maximum message size the server allows or else it is possible for the producer to send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. | 1048576 | Integer
 | *maxPollIntervalMs* (consumer) | The maximum delay between invocations of poll() when using consumer group management. This places an upper bound on the amount of time that the consumer can be idle before fetching more records. If poll() is not called before expiration of this timeout, then the consumer is considered failed and the group will rebalance in order to reassign the partitions to another member. |  | Long
@@ -198,8 +198,8 @@ with the following path and query parameters:
 | *fetchMinBytes* (consumer) | The minimum amount of data the server should return for a fetch request. If insufficient data is available the request will wait for that much data to accumulate before answering the request. | 1 | Integer
 | *fetchWaitMaxMs* (consumer) | The maximum amount of time the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy fetch.min.bytes | 500 | Integer
 | *groupId* (consumer) | A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes indicate that they are all part of the same consumer group. This option is required for consumers. |  | String
+| *headerDeserializer* (consumer) | To use a custom KafkaHeaderDeserializer to deserialize kafka headers values |  | KafkaHeaderDeserializer
 | *heartbeatIntervalMs* (consumer) | The expected time between heartbeats to the consumer coordinator when using Kafka's group management facilities. Heartbeats are used to ensure that the consumer's session stays active and to facilitate rebalancing when new consumers join or leave the group. The value must be set lower than session.timeout.ms, but typically should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances.  [...]
-| *kafkaHeaderDeserializer* (consumer) | To use a custom KafkaHeaderDeserializer to deserialize kafka headers values |  | KafkaHeaderDeserializer
 | *keyDeserializer* (consumer) | Deserializer class for key that implements the Deserializer interface. | org.apache.kafka.common.serialization.StringDeserializer | String
 | *maxPartitionFetchBytes* (consumer) | The maximum amount of data per-partition the server will return. The maximum total memory used for a request will be #partitions max.partition.fetch.bytes. This size must be at least as large as the maximum message size the server allows or else it is possible for the producer to send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. | 1048576 | Integer
 | *maxPollIntervalMs* (consumer) | The maximum delay between invocations of poll() when using consumer group management. This places an upper bound on the amount of time that the consumer can be idle before fetching more records. If poll() is not called before expiration of this timeout, then the consumer is considered failed and the group will rebalance in order to reassign the partitions to another member. |  | Long


[camel] 09/13: CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderSerializer

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 79c2c87cd003863953f3ca43839f6ab84f5301c4
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Thu Oct 29 19:24:52 2020 +0100

    CAMEL-15770 - Kafka serialize/deserialize properties are inconsistently named - kafkaHeaderSerializer
---
 .../org/apache/camel/catalog/docs/kafka-component.adoc       |  4 ++--
 .../camel/component/kafka/KafkaComponentConfigurer.java      | 10 +++++-----
 .../camel/component/kafka/KafkaEndpointConfigurer.java       | 10 +++++-----
 .../camel/component/kafka/KafkaEndpointUriFactory.java       |  2 +-
 .../resources/org/apache/camel/component/kafka/kafka.json    |  4 ++--
 components/camel-kafka/src/main/docs/kafka-component.adoc    |  4 ++--
 .../org/apache/camel/component/kafka/KafkaConfiguration.java |  2 +-
 .../builder/component/dsl/KafkaComponentBuilderFactory.java  |  8 ++++----
 .../builder/endpoint/dsl/KafkaEndpointBuilderFactory.java    | 12 ++++++------
 docs/components/modules/ROOT/pages/kafka-component.adoc      |  4 ++--
 10 files changed, 30 insertions(+), 30 deletions(-)

diff --git a/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc b/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc
index 9cabbeb..26f7fef 100644
--- a/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc
+++ b/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/docs/kafka-component.adoc
@@ -89,7 +89,7 @@ The Kafka component supports 97 options, which are listed below.
 | *compressionCodec* (producer) | This parameter allows you to specify the compression codec for all data generated by this producer. Valid values are none, gzip and snappy. There are 4 enums and the value can be one of: none, gzip, snappy, lz4 | none | String
 | *connectionMaxIdleMs* (producer) | Close idle connections after the number of milliseconds specified by this config. | 540000 | Integer
 | *enableIdempotence* (producer) | If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'false', producer retries may write duplicates of the retried message in the stream. If set to true this option will require max.in.flight.requests.per.connection to be set to 1 and retries cannot be zero and additionally acks must be set to 'all'. | false | boolean
-| *kafkaHeaderSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
+| *headerSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
 | *key* (producer) | The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY |  | String
 | *keySerializer* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
 | *lazyStartProducer* (producer) | Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and [...]
@@ -216,7 +216,7 @@ with the following path and query parameters:
 | *compressionCodec* (producer) | This parameter allows you to specify the compression codec for all data generated by this producer. Valid values are none, gzip and snappy. There are 4 enums and the value can be one of: none, gzip, snappy, lz4 | none | String
 | *connectionMaxIdleMs* (producer) | Close idle connections after the number of milliseconds specified by this config. | 540000 | Integer
 | *enableIdempotence* (producer) | If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'false', producer retries may write duplicates of the retried message in the stream. If set to true this option will require max.in.flight.requests.per.connection to be set to 1 and retries cannot be zero and additionally acks must be set to 'all'. | false | boolean
-| *kafkaHeaderSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
+| *headerSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
 | *key* (producer) | The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY |  | String
 | *keySerializer* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
 | *lazyStartProducer* (producer) | Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and [...]
diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java
index 6f9d5f6..c5b3a26 100644
--- a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java
+++ b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaComponentConfigurer.java
@@ -59,7 +59,7 @@ public class KafkaComponentConfigurer extends PropertyConfigurerSupport implemen
         map.put("compressionCodec", java.lang.String.class);
         map.put("connectionMaxIdleMs", java.lang.Integer.class);
         map.put("enableIdempotence", boolean.class);
-        map.put("kafkaHeaderSerializer", org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class);
+        map.put("headerSerializer", org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class);
         map.put("key", java.lang.String.class);
         map.put("keySerializer", java.lang.String.class);
         map.put("lazyStartProducer", boolean.class);
@@ -177,14 +177,14 @@ public class KafkaComponentConfigurer extends PropertyConfigurerSupport implemen
         case "groupId": getOrCreateConfiguration(target).setGroupId(property(camelContext, java.lang.String.class, value)); return true;
         case "headerfilterstrategy":
         case "headerFilterStrategy": getOrCreateConfiguration(target).setHeaderFilterStrategy(property(camelContext, org.apache.camel.spi.HeaderFilterStrategy.class, value)); return true;
+        case "headerserializer":
+        case "headerSerializer": getOrCreateConfiguration(target).setHeaderSerializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class, value)); return true;
         case "heartbeatintervalms":
         case "heartbeatIntervalMs": getOrCreateConfiguration(target).setHeartbeatIntervalMs(property(camelContext, java.lang.Integer.class, value)); return true;
         case "interceptorclasses":
         case "interceptorClasses": getOrCreateConfiguration(target).setInterceptorClasses(property(camelContext, java.lang.String.class, value)); return true;
         case "kafkaheaderdeserializer":
         case "kafkaHeaderDeserializer": getOrCreateConfiguration(target).setKafkaHeaderDeserializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class, value)); return true;
-        case "kafkaheaderserializer":
-        case "kafkaHeaderSerializer": getOrCreateConfiguration(target).setKafkaHeaderSerializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class, value)); return true;
         case "kafkamanualcommitfactory":
         case "kafkaManualCommitFactory": target.setKafkaManualCommitFactory(property(camelContext, org.apache.camel.component.kafka.KafkaManualCommitFactory.class, value)); return true;
         case "kerberosbeforereloginmintime":
@@ -379,14 +379,14 @@ public class KafkaComponentConfigurer extends PropertyConfigurerSupport implemen
         case "groupId": return getOrCreateConfiguration(target).getGroupId();
         case "headerfilterstrategy":
         case "headerFilterStrategy": return getOrCreateConfiguration(target).getHeaderFilterStrategy();
+        case "headerserializer":
+        case "headerSerializer": return getOrCreateConfiguration(target).getHeaderSerializer();
         case "heartbeatintervalms":
         case "heartbeatIntervalMs": return getOrCreateConfiguration(target).getHeartbeatIntervalMs();
         case "interceptorclasses":
         case "interceptorClasses": return getOrCreateConfiguration(target).getInterceptorClasses();
         case "kafkaheaderdeserializer":
         case "kafkaHeaderDeserializer": return getOrCreateConfiguration(target).getKafkaHeaderDeserializer();
-        case "kafkaheaderserializer":
-        case "kafkaHeaderSerializer": return getOrCreateConfiguration(target).getKafkaHeaderSerializer();
         case "kafkamanualcommitfactory":
         case "kafkaManualCommitFactory": return target.getKafkaManualCommitFactory();
         case "kerberosbeforereloginmintime":
diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java
index e2dfa37..733a607 100644
--- a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java
+++ b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointConfigurer.java
@@ -60,7 +60,7 @@ public class KafkaEndpointConfigurer extends PropertyConfigurerSupport implement
         map.put("compressionCodec", java.lang.String.class);
         map.put("connectionMaxIdleMs", java.lang.Integer.class);
         map.put("enableIdempotence", boolean.class);
-        map.put("kafkaHeaderSerializer", org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class);
+        map.put("headerSerializer", org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class);
         map.put("key", java.lang.String.class);
         map.put("keySerializer", java.lang.String.class);
         map.put("lazyStartProducer", boolean.class);
@@ -174,14 +174,14 @@ public class KafkaEndpointConfigurer extends PropertyConfigurerSupport implement
         case "groupId": target.getConfiguration().setGroupId(property(camelContext, java.lang.String.class, value)); return true;
         case "headerfilterstrategy":
         case "headerFilterStrategy": target.getConfiguration().setHeaderFilterStrategy(property(camelContext, org.apache.camel.spi.HeaderFilterStrategy.class, value)); return true;
+        case "headerserializer":
+        case "headerSerializer": target.getConfiguration().setHeaderSerializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class, value)); return true;
         case "heartbeatintervalms":
         case "heartbeatIntervalMs": target.getConfiguration().setHeartbeatIntervalMs(property(camelContext, java.lang.Integer.class, value)); return true;
         case "interceptorclasses":
         case "interceptorClasses": target.getConfiguration().setInterceptorClasses(property(camelContext, java.lang.String.class, value)); return true;
         case "kafkaheaderdeserializer":
         case "kafkaHeaderDeserializer": target.getConfiguration().setKafkaHeaderDeserializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer.class, value)); return true;
-        case "kafkaheaderserializer":
-        case "kafkaHeaderSerializer": target.getConfiguration().setKafkaHeaderSerializer(property(camelContext, org.apache.camel.component.kafka.serde.KafkaHeaderSerializer.class, value)); return true;
         case "kerberosbeforereloginmintime":
         case "kerberosBeforeReloginMinTime": target.getConfiguration().setKerberosBeforeReloginMinTime(property(camelContext, java.lang.Integer.class, value)); return true;
         case "kerberosinitcmd":
@@ -376,14 +376,14 @@ public class KafkaEndpointConfigurer extends PropertyConfigurerSupport implement
         case "groupId": return target.getConfiguration().getGroupId();
         case "headerfilterstrategy":
         case "headerFilterStrategy": return target.getConfiguration().getHeaderFilterStrategy();
+        case "headerserializer":
+        case "headerSerializer": return target.getConfiguration().getHeaderSerializer();
         case "heartbeatintervalms":
         case "heartbeatIntervalMs": return target.getConfiguration().getHeartbeatIntervalMs();
         case "interceptorclasses":
         case "interceptorClasses": return target.getConfiguration().getInterceptorClasses();
         case "kafkaheaderdeserializer":
         case "kafkaHeaderDeserializer": return target.getConfiguration().getKafkaHeaderDeserializer();
-        case "kafkaheaderserializer":
-        case "kafkaHeaderSerializer": return target.getConfiguration().getKafkaHeaderSerializer();
         case "kerberosbeforereloginmintime":
         case "kerberosBeforeReloginMinTime": return target.getConfiguration().getKerberosBeforeReloginMinTime();
         case "kerberosinitcmd":
diff --git a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java
index e0d88d3..e815bdc 100644
--- a/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java
+++ b/components/camel-kafka/src/generated/java/org/apache/camel/component/kafka/KafkaEndpointUriFactory.java
@@ -33,7 +33,6 @@ public class KafkaEndpointUriFactory extends org.apache.camel.support.component.
         props.add("sendBufferBytes");
         props.add("heartbeatIntervalMs");
         props.add("consumerStreams");
-        props.add("kafkaHeaderSerializer");
         props.add("interceptorClasses");
         props.add("sslKeystoreType");
         props.add("breakOnFirstError");
@@ -106,6 +105,7 @@ public class KafkaEndpointUriFactory extends org.apache.camel.support.component.
         props.add("autoCommitIntervalMs");
         props.add("partitioner");
         props.add("kerberosPrincipalToLocalRules");
+        props.add("headerSerializer");
         props.add("sslEnabledProtocols");
         props.add("sslKeystoreLocation");
         props.add("schemaRegistryURL");
diff --git a/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json b/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json
index 82d0b38..319e694 100644
--- a/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json
+++ b/components/camel-kafka/src/generated/resources/org/apache/camel/component/kafka/kafka.json
@@ -63,7 +63,7 @@
     "compressionCodec": { "kind": "property", "displayName": "Compression Codec", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "none", "gzip", "snappy", "lz4" ], "deprecated": false, "secret": false, "defaultValue": "none", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This parameter allows you to specify the compression codec f [...]
     "connectionMaxIdleMs": { "kind": "property", "displayName": "Connection Max Idle Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "540000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Close idle connections after the number of milliseconds specified by this config." },
     "enableIdempotence": { "kind": "property", "displayName": "Enable Idempotence", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'fa [...]
-    "kafkaHeaderSerializer": { "kind": "property", "displayName": "Kafka Header Serializer", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderSerializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderSerializer to serialize kafka headers values" },
+    "headerSerializer": { "kind": "property", "displayName": "Header Serializer", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderSerializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderSerializer to serialize kafka headers values" },
     "key": { "kind": "property", "displayName": "Key", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY" },
     "keySerializer": { "kind": "property", "displayName": "Key Serializer", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for keys (defaults to the same as for me [...]
     "lazyStartProducer": { "kind": "property", "displayName": "Lazy Start Producer", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the r [...]
@@ -163,7 +163,7 @@
     "compressionCodec": { "kind": "parameter", "displayName": "Compression Codec", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "none", "gzip", "snappy", "lz4" ], "deprecated": false, "secret": false, "defaultValue": "none", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This parameter allows you to specify the compression codec  [...]
     "connectionMaxIdleMs": { "kind": "parameter", "displayName": "Connection Max Idle Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "540000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Close idle connections after the number of milliseconds specified by this config." },
     "enableIdempotence": { "kind": "parameter", "displayName": "Enable Idempotence", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'f [...]
-    "kafkaHeaderSerializer": { "kind": "parameter", "displayName": "Kafka Header Serializer", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderSerializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderSerializer to serialize kafka headers values" },
+    "headerSerializer": { "kind": "parameter", "displayName": "Header Serializer", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderSerializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderSerializer to serialize kafka headers values" },
     "key": { "kind": "parameter", "displayName": "Key", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY" },
     "keySerializer": { "kind": "parameter", "displayName": "Key Serializer", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for keys (defaults to the same as for m [...]
     "lazyStartProducer": { "kind": "parameter", "displayName": "Lazy Start Producer", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the  [...]
diff --git a/components/camel-kafka/src/main/docs/kafka-component.adoc b/components/camel-kafka/src/main/docs/kafka-component.adoc
index 9cabbeb..26f7fef 100644
--- a/components/camel-kafka/src/main/docs/kafka-component.adoc
+++ b/components/camel-kafka/src/main/docs/kafka-component.adoc
@@ -89,7 +89,7 @@ The Kafka component supports 97 options, which are listed below.
 | *compressionCodec* (producer) | This parameter allows you to specify the compression codec for all data generated by this producer. Valid values are none, gzip and snappy. There are 4 enums and the value can be one of: none, gzip, snappy, lz4 | none | String
 | *connectionMaxIdleMs* (producer) | Close idle connections after the number of milliseconds specified by this config. | 540000 | Integer
 | *enableIdempotence* (producer) | If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'false', producer retries may write duplicates of the retried message in the stream. If set to true this option will require max.in.flight.requests.per.connection to be set to 1 and retries cannot be zero and additionally acks must be set to 'all'. | false | boolean
-| *kafkaHeaderSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
+| *headerSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
 | *key* (producer) | The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY |  | String
 | *keySerializer* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
 | *lazyStartProducer* (producer) | Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and [...]
@@ -216,7 +216,7 @@ with the following path and query parameters:
 | *compressionCodec* (producer) | This parameter allows you to specify the compression codec for all data generated by this producer. Valid values are none, gzip and snappy. There are 4 enums and the value can be one of: none, gzip, snappy, lz4 | none | String
 | *connectionMaxIdleMs* (producer) | Close idle connections after the number of milliseconds specified by this config. | 540000 | Integer
 | *enableIdempotence* (producer) | If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'false', producer retries may write duplicates of the retried message in the stream. If set to true this option will require max.in.flight.requests.per.connection to be set to 1 and retries cannot be zero and additionally acks must be set to 'all'. | false | boolean
-| *kafkaHeaderSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
+| *headerSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
 | *key* (producer) | The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY |  | String
 | *keySerializer* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
 | *lazyStartProducer* (producer) | Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and [...]
diff --git a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
index 266012c..ae251a4 100644
--- a/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
+++ b/components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaConfiguration.java
@@ -1696,7 +1696,7 @@ public class KafkaConfiguration implements Cloneable, HeaderFilterStrategyAware
      *
      * @param kafkaHeaderSerializer custom kafka header serializer to be used
      */
-    public void setKafkaHeaderSerializer(final KafkaHeaderSerializer headerSerializer) {
+    public void setHeaderSerializer(final KafkaHeaderSerializer headerSerializer) {
         this.headerSerializer = headerSerializer;
     }
 
diff --git a/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java b/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java
index 0f38264..ee66c62 100644
--- a/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java
+++ b/core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java
@@ -690,9 +690,9 @@ public interface KafkaComponentBuilderFactory {
          * 
          * Group: producer
          */
-        default KafkaComponentBuilder kafkaHeaderSerializer(
-                org.apache.camel.component.kafka.serde.KafkaHeaderSerializer kafkaHeaderSerializer) {
-            doSetProperty("kafkaHeaderSerializer", kafkaHeaderSerializer);
+        default KafkaComponentBuilder headerSerializer(
+                org.apache.camel.component.kafka.serde.KafkaHeaderSerializer headerSerializer) {
+            doSetProperty("headerSerializer", headerSerializer);
             return this;
         }
         /**
@@ -1606,7 +1606,7 @@ public interface KafkaComponentBuilderFactory {
             case "compressionCodec": getOrCreateConfiguration((KafkaComponent) component).setCompressionCodec((java.lang.String) value); return true;
             case "connectionMaxIdleMs": getOrCreateConfiguration((KafkaComponent) component).setConnectionMaxIdleMs((java.lang.Integer) value); return true;
             case "enableIdempotence": getOrCreateConfiguration((KafkaComponent) component).setEnableIdempotence((boolean) value); return true;
-            case "kafkaHeaderSerializer": getOrCreateConfiguration((KafkaComponent) component).setKafkaHeaderSerializer((org.apache.camel.component.kafka.serde.KafkaHeaderSerializer) value); return true;
+            case "headerSerializer": getOrCreateConfiguration((KafkaComponent) component).setHeaderSerializer((org.apache.camel.component.kafka.serde.KafkaHeaderSerializer) value); return true;
             case "key": getOrCreateConfiguration((KafkaComponent) component).setKey((java.lang.String) value); return true;
             case "keySerializer": getOrCreateConfiguration((KafkaComponent) component).setKeySerializer((java.lang.String) value); return true;
             case "lazyStartProducer": ((KafkaComponent) component).setLazyStartProducer((boolean) value); return true;
diff --git a/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java b/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java
index 67be3f5..823068b 100644
--- a/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java
+++ b/core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java
@@ -1797,9 +1797,9 @@ public interface KafkaEndpointBuilderFactory {
          * 
          * Group: producer
          */
-        default KafkaEndpointProducerBuilder kafkaHeaderSerializer(
-                Object kafkaHeaderSerializer) {
-            doSetProperty("kafkaHeaderSerializer", kafkaHeaderSerializer);
+        default KafkaEndpointProducerBuilder headerSerializer(
+                Object headerSerializer) {
+            doSetProperty("headerSerializer", headerSerializer);
             return this;
         }
         /**
@@ -1811,9 +1811,9 @@ public interface KafkaEndpointBuilderFactory {
          * 
          * Group: producer
          */
-        default KafkaEndpointProducerBuilder kafkaHeaderSerializer(
-                String kafkaHeaderSerializer) {
-            doSetProperty("kafkaHeaderSerializer", kafkaHeaderSerializer);
+        default KafkaEndpointProducerBuilder headerSerializer(
+                String headerSerializer) {
+            doSetProperty("headerSerializer", headerSerializer);
             return this;
         }
         /**
diff --git a/docs/components/modules/ROOT/pages/kafka-component.adoc b/docs/components/modules/ROOT/pages/kafka-component.adoc
index e307248..9280343 100644
--- a/docs/components/modules/ROOT/pages/kafka-component.adoc
+++ b/docs/components/modules/ROOT/pages/kafka-component.adoc
@@ -91,7 +91,7 @@ The Kafka component supports 97 options, which are listed below.
 | *compressionCodec* (producer) | This parameter allows you to specify the compression codec for all data generated by this producer. Valid values are none, gzip and snappy. There are 4 enums and the value can be one of: none, gzip, snappy, lz4 | none | String
 | *connectionMaxIdleMs* (producer) | Close idle connections after the number of milliseconds specified by this config. | 540000 | Integer
 | *enableIdempotence* (producer) | If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'false', producer retries may write duplicates of the retried message in the stream. If set to true this option will require max.in.flight.requests.per.connection to be set to 1 and retries cannot be zero and additionally acks must be set to 'all'. | false | boolean
-| *kafkaHeaderSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
+| *headerSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
 | *key* (producer) | The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY |  | String
 | *keySerializer* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
 | *lazyStartProducer* (producer) | Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and [...]
@@ -218,7 +218,7 @@ with the following path and query parameters:
 | *compressionCodec* (producer) | This parameter allows you to specify the compression codec for all data generated by this producer. Valid values are none, gzip and snappy. There are 4 enums and the value can be one of: none, gzip, snappy, lz4 | none | String
 | *connectionMaxIdleMs* (producer) | Close idle connections after the number of milliseconds specified by this config. | 540000 | Integer
 | *enableIdempotence* (producer) | If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'false', producer retries may write duplicates of the retried message in the stream. If set to true this option will require max.in.flight.requests.per.connection to be set to 1 and retries cannot be zero and additionally acks must be set to 'all'. | false | boolean
-| *kafkaHeaderSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
+| *headerSerializer* (producer) | To use a custom KafkaHeaderSerializer to serialize kafka headers values |  | KafkaHeaderSerializer
 | *key* (producer) | The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY |  | String
 | *keySerializer* (producer) | The serializer class for keys (defaults to the same as for messages if nothing is given). | org.apache.kafka.common.serialization.StringSerializer | String
 | *lazyStartProducer* (producer) | Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the route to fail being started. By deferring this startup to be lazy then the startup failure can be handled during routing messages via Camel's routing error handlers. Beware that when the first message is processed then creating and [...]


[camel] 13/13: CAMEL-15770 - Regen catalog

Posted by ac...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

acosentino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/camel.git

commit 96d3c42c266e0fdd4dcd90e9ce2ceaa3c8c76b2a
Author: Andrea Cosentino <an...@gmail.com>
AuthorDate: Thu Oct 29 19:56:20 2020 +0100

    CAMEL-15770 - Regen catalog
---
 .../org/apache/camel/catalog/components/kafka.json       | 16 ++++++++--------
 1 file changed, 8 insertions(+), 8 deletions(-)

diff --git a/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/components/kafka.json b/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/components/kafka.json
index 91ab79c..2cbc60d 100644
--- a/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/components/kafka.json
+++ b/catalog/camel-catalog/src/generated/resources/org/apache/camel/catalog/components/kafka.json
@@ -44,8 +44,8 @@
     "fetchMinBytes": { "kind": "property", "displayName": "Fetch Min Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The minimum amount of data the server should return for a fetch request. If insufficient data is available the  [...]
     "fetchWaitMaxMs": { "kind": "property", "displayName": "Fetch Wait Max Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "500", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of time the server will block before answering the fetch request if there isn't suffici [...]
     "groupId": { "kind": "property", "displayName": "Group Id", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes i [...]
+    "headerDeserializer": { "kind": "property", "displayName": "Header Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderDeserializer to deserialize kafka headers values" },
     "heartbeatIntervalMs": { "kind": "property", "displayName": "Heartbeat Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "3000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The expected time between heartbeats to the consumer coordinator when using Kafka's group manage [...]
-    "kafkaHeaderDeserializer": { "kind": "property", "displayName": "Kafka Header Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderDeserializer to deserialize kafka headers [...]
     "keyDeserializer": { "kind": "property", "displayName": "Key Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringDeserializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Deserializer class for key that implements the Deserial [...]
     "maxPartitionFetchBytes": { "kind": "property", "displayName": "Max Partition Fetch Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1048576", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of data per-partition the server will return. The maximum total mem [...]
     "maxPollIntervalMs": { "kind": "property", "displayName": "Max Poll Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "duration", "javaType": "java.lang.Long", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum delay between invocations of poll() when using consumer group management. This places an upper bound on the amoun [...]
@@ -63,9 +63,9 @@
     "compressionCodec": { "kind": "property", "displayName": "Compression Codec", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "none", "gzip", "snappy", "lz4" ], "deprecated": false, "secret": false, "defaultValue": "none", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This parameter allows you to specify the compression codec f [...]
     "connectionMaxIdleMs": { "kind": "property", "displayName": "Connection Max Idle Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "540000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Close idle connections after the number of milliseconds specified by this config." },
     "enableIdempotence": { "kind": "property", "displayName": "Enable Idempotence", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'fa [...]
-    "kafkaHeaderSerializer": { "kind": "property", "displayName": "Kafka Header Serializer", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderSerializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderSerializer to serialize kafka headers values" },
+    "headerSerializer": { "kind": "property", "displayName": "Header Serializer", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderSerializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderSerializer to serialize kafka headers values" },
     "key": { "kind": "property", "displayName": "Key", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY" },
-    "keySerializerClass": { "kind": "property", "displayName": "Key Serializer Class", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for keys (defaults to the sam [...]
+    "keySerializer": { "kind": "property", "displayName": "Key Serializer", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for keys (defaults to the same as for me [...]
     "lazyStartProducer": { "kind": "property", "displayName": "Lazy Start Producer", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the r [...]
     "lingerMs": { "kind": "property", "displayName": "Linger Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "0", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The producer groups together any records that arrive in between request transmissions into a single batched request. Norma [...]
     "maxBlockMs": { "kind": "property", "displayName": "Max Block Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "60000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The configuration controls how long sending to kafka will block. These methods can be blocked for multiple reason [...]
@@ -87,7 +87,7 @@
     "retries": { "kind": "property", "displayName": "Retries", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "0", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient er [...]
     "retryBackoffMs": { "kind": "property", "displayName": "Retry Backoff Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "100", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Before each retry, the producer refreshes the metadata of relevant topics to see if a new leader has been e [...]
     "sendBufferBytes": { "kind": "property", "displayName": "Send Buffer Bytes", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "131072", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Socket write buffer size" },
-    "serializerClass": { "kind": "property", "displayName": "Serializer Class", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for messages." },
+    "valueSerializer": { "kind": "property", "displayName": "Value Serializer", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for messages." },
     "workerPool": { "kind": "property", "displayName": "Worker Pool", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "java.util.concurrent.ExecutorService", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent to [...]
     "workerPoolCoreSize": { "kind": "property", "displayName": "Worker Pool Core Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "10", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Number of core threads for the worker pool for continue routing Exchange after kafka server has ack [...]
     "workerPoolMaxSize": { "kind": "property", "displayName": "Worker Pool Max Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "20", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Maximum number of threads for the worker pool for continue routing Exchange after kafka server has ac [...]
@@ -143,8 +143,8 @@
     "fetchMinBytes": { "kind": "parameter", "displayName": "Fetch Min Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The minimum amount of data the server should return for a fetch request. If insufficient data is available the [...]
     "fetchWaitMaxMs": { "kind": "parameter", "displayName": "Fetch Wait Max Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "500", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of time the server will block before answering the fetch request if there isn't suffic [...]
     "groupId": { "kind": "parameter", "displayName": "Group Id", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "A string that uniquely identifies the group of consumer processes to which this consumer belongs. By setting the same group id multiple processes  [...]
+    "headerDeserializer": { "kind": "parameter", "displayName": "Header Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderDeserializer to deserialize kafka headers values" },
     "heartbeatIntervalMs": { "kind": "parameter", "displayName": "Heartbeat Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "3000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The expected time between heartbeats to the consumer coordinator when using Kafka's group manag [...]
-    "kafkaHeaderDeserializer": { "kind": "parameter", "displayName": "Kafka Header Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderDeserializer to deserialize kafka header [...]
     "keyDeserializer": { "kind": "parameter", "displayName": "Key Deserializer", "group": "consumer", "label": "consumer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringDeserializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Deserializer class for key that implements the Deseria [...]
     "maxPartitionFetchBytes": { "kind": "parameter", "displayName": "Max Partition Fetch Bytes", "group": "consumer", "label": "consumer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "1048576", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum amount of data per-partition the server will return. The maximum total me [...]
     "maxPollIntervalMs": { "kind": "parameter", "displayName": "Max Poll Interval Ms", "group": "consumer", "label": "consumer", "required": false, "type": "duration", "javaType": "java.lang.Long", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The maximum delay between invocations of poll() when using consumer group management. This places an upper bound on the amou [...]
@@ -163,9 +163,9 @@
     "compressionCodec": { "kind": "parameter", "displayName": "Compression Codec", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "enum": [ "none", "gzip", "snappy", "lz4" ], "deprecated": false, "secret": false, "defaultValue": "none", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "This parameter allows you to specify the compression codec  [...]
     "connectionMaxIdleMs": { "kind": "parameter", "displayName": "Connection Max Idle Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "540000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Close idle connections after the number of milliseconds specified by this config." },
     "enableIdempotence": { "kind": "parameter", "displayName": "Enable Idempotence", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "If set to 'true' the producer will ensure that exactly one copy of each message is written in the stream. If 'f [...]
-    "kafkaHeaderSerializer": { "kind": "parameter", "displayName": "Kafka Header Serializer", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderSerializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderSerializer to serialize kafka headers values" },
+    "headerSerializer": { "kind": "parameter", "displayName": "Header Serializer", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "org.apache.camel.component.kafka.serde.KafkaHeaderSerializer", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom KafkaHeaderSerializer to serialize kafka headers values" },
     "key": { "kind": "parameter", "displayName": "Key", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The record key (or null if no key is specified). If this option has been configured then it take precedence over header KafkaConstants#KEY" },
-    "keySerializerClass": { "kind": "parameter", "displayName": "Key Serializer Class", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for keys (defaults to the sa [...]
+    "keySerializer": { "kind": "parameter", "displayName": "Key Serializer", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for keys (defaults to the same as for m [...]
     "lazyStartProducer": { "kind": "parameter", "displayName": "Lazy Start Producer", "group": "producer", "label": "producer", "required": false, "type": "boolean", "javaType": "boolean", "deprecated": false, "secret": false, "defaultValue": false, "description": "Whether the producer should be started lazy (on the first message). By starting lazy you can use this to allow CamelContext and routes to startup in situations where a producer may otherwise fail during starting and cause the  [...]
     "lingerMs": { "kind": "parameter", "displayName": "Linger Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "0", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The producer groups together any records that arrive in between request transmissions into a single batched request. Norm [...]
     "maxBlockMs": { "kind": "parameter", "displayName": "Max Block Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "60000", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The configuration controls how long sending to kafka will block. These methods can be blocked for multiple reaso [...]
@@ -187,7 +187,7 @@
     "retries": { "kind": "parameter", "displayName": "Retries", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "0", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient e [...]
     "retryBackoffMs": { "kind": "parameter", "displayName": "Retry Backoff Ms", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "100", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Before each retry, the producer refreshes the metadata of relevant topics to see if a new leader has been  [...]
     "sendBufferBytes": { "kind": "parameter", "displayName": "Send Buffer Bytes", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "131072", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Socket write buffer size" },
-    "serializerClass": { "kind": "parameter", "displayName": "Serializer Class", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for messages." },
+    "valueSerializer": { "kind": "parameter", "displayName": "Value Serializer", "group": "producer", "label": "producer", "required": false, "type": "string", "javaType": "java.lang.String", "deprecated": false, "secret": false, "defaultValue": "org.apache.kafka.common.serialization.StringSerializer", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "The serializer class for messages." },
     "workerPool": { "kind": "parameter", "displayName": "Worker Pool", "group": "producer", "label": "producer", "required": false, "type": "object", "javaType": "java.util.concurrent.ExecutorService", "deprecated": false, "secret": false, "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "To use a custom worker pool for continue routing Exchange after kafka server has acknowledge the message that was sent t [...]
     "workerPoolCoreSize": { "kind": "parameter", "displayName": "Worker Pool Core Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "10", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Number of core threads for the worker pool for continue routing Exchange after kafka server has ac [...]
     "workerPoolMaxSize": { "kind": "parameter", "displayName": "Worker Pool Max Size", "group": "producer", "label": "producer", "required": false, "type": "integer", "javaType": "java.lang.Integer", "deprecated": false, "secret": false, "defaultValue": "20", "configurationClass": "org.apache.camel.component.kafka.KafkaConfiguration", "configurationField": "configuration", "description": "Maximum number of threads for the worker pool for continue routing Exchange after kafka server has a [...]